From a91cbb6bececf114e33e69317231b8ea93679c7f Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Mon, 26 Apr 2021 21:16:18 -0400 Subject: [PATCH 01/30] chore(revert): revert preventing normalization (#421) reverts previous commit for preventing normalization of versioning --- setup.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/setup.py b/setup.py index aff482545..e1974b607 100644 --- a/setup.py +++ b/setup.py @@ -17,20 +17,6 @@ import setuptools -# Disable version normalization performed by setuptools.setup() -try: - # Try the approach of using sic(), added in setuptools 46.1.0 - from setuptools import sic -except ImportError: - # Try the approach of replacing packaging.version.Version - sic = lambda v: v - try: - # setuptools >=39.0.0 uses packaging from setuptools.extern - from setuptools.extern import packaging - except ImportError: - # setuptools <39.0.0 uses packaging from pkg_resources.extern - from pkg_resources.extern import packaging - packaging.version.Version = packaging.version.LegacyVersion # Package metadata. @@ -77,7 +63,7 @@ setuptools.setup( name=name, - version=sic(version), + version=version, description=description, long_description=readme, author="Google LLC", From 801206d9eaa490f298010b0d4901bc66397bee12 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Wed, 28 Apr 2021 12:28:05 -0400 Subject: [PATCH 02/30] chore: use `gcp-sphinx-docfx-yaml` (#423) makes use of the updated plugin for generating DocFX YAMLs --- noxfile.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/noxfile.py b/noxfile.py index 5f3f73ebb..1e3e5ddd3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -179,9 +179,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( From 9777904b7237fd228d160fa65e7a419aefe414f6 Mon Sep 17 00:00:00 2001 From: "google-cloud-policy-bot[bot]" <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> Date: Fri, 30 Apr 2021 10:20:37 -0700 Subject: [PATCH 03/30] chore: add SECURITY.md (#424) Co-authored-by: google-cloud-policy-bot[bot] <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> --- SECURITY.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..8b58ae9c0 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. From 2e70a582f3b37dddfe7a2f11df81b118f4ac0292 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 May 2021 17:34:21 -0400 Subject: [PATCH 04/30] tests: fix unit tests broken by dependency drift (#430) * tests: fix 'TestClient.test_ctor_wo_project' Broken by https://github.com/googleapis/python-cloud-core/pull/51 Closes #427 * chore(deps): pin googleapis-common-protos' FBO python2.7 Closes #429 --- setup.py | 1 + tests/unit/test_client.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/setup.py b/setup.py index e1974b607..55863aabb 100644 --- a/setup.py +++ b/setup.py @@ -32,6 +32,7 @@ "google-cloud-core >= 1.4.1, < 2.0dev", "google-resumable-media >= 1.2.0, < 2.0dev", "requests >= 2.18.0, < 3.0.0dev", + "googleapis-common-protos < 1.53.0; python_version<'3.0'", ] extras = {} diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index df780c786..00a9b4913 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -39,9 +39,12 @@ _FAKE_CREDENTIALS = Credentials.from_service_account_info(_SERVICE_ACCOUNT_JSON) -def _make_credentials(): +def _make_credentials(project=None): import google.auth.credentials + if project is not None: + return mock.Mock(spec=google.auth.credentials.Credentials, project_id=project) + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -174,14 +177,9 @@ def test_ctor_wo_project(self): from google.cloud.storage._http import Connection PROJECT = "PROJECT" - credentials = _make_credentials() + credentials = _make_credentials(project=PROJECT) - ddp_patch = mock.patch( - "google.cloud.client._determine_default_project", return_value=PROJECT - ) - - with ddp_patch: - client = self._make_one(credentials=credentials) + client = self._make_one(credentials=credentials) self.assertEqual(client.project, PROJECT) self.assertIsInstance(client._connection, Connection) From 246a13bbfa97c7103b10485c2593f923b323ba5e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 May 2021 13:56:38 -0400 Subject: [PATCH 05/30] tests: tweak systest deps install to avoid py27 conflict (#433) Follow on to PR #430. --- noxfile.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/noxfile.py b/noxfile.py index 1e3e5ddd3..3104fdac2 100644 --- a/noxfile.py +++ b/noxfile.py @@ -122,15 +122,19 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. + # 2021-05-06: defer installing 'google-cloud-*' to after this package, + # in order to work around Python 2.7 googolapis-common-protos + # issue. + session.install( + "mock", "pytest", + ) + session.install("-e", ".") session.install( - "mock", - "pytest", "google-cloud-testutils", "google-cloud-iam", "google-cloud-pubsub < 2.0.0", "google-cloud-kms < 2.0dev", ) - session.install("-e", ".") # Run py.test against the system tests. if system_test_exists: From 2532d506b44fc1ef0fa0a996822d29e7459c465a Mon Sep 17 00:00:00 2001 From: cojenco <59401799+cojenco@users.noreply.github.com> Date: Sat, 8 May 2021 00:58:43 -0700 Subject: [PATCH 06/30] fix: replace python lifecycle action parsing ValueError with warning (#437) * fix: replace python lifecycle action parsing ValueError with warning * fix lint * add client upgrade suggestion to unknown OLM rule warning * update warning message --- google/cloud/storage/bucket.py | 8 +++++++- tests/unit/test_bucket.py | 13 ++++++++++--- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 889a65888..ac38208a3 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -2361,7 +2361,13 @@ def lifecycle_rules(self): elif action_type == "SetStorageClass": yield LifecycleRuleSetStorageClass.from_api_repr(rule) else: - raise ValueError("Unknown lifecycle rule: {}".format(rule)) + warnings.warn( + "Unknown lifecycle rule type received: {}. Please upgrade to the latest version of google-cloud-storage.".format( + rule + ), + UserWarning, + stacklevel=1, + ) @lifecycle_rules.setter def lifecycle_rules(self, rules): diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 4d776c365..22984a343 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -1783,15 +1783,22 @@ def test_iam_configuration_policy_w_entry(self): self.assertTrue(config.uniform_bucket_level_access_enabled) self.assertEqual(config.uniform_bucket_level_access_locked_time, now) - def test_lifecycle_rules_getter_unknown_action_type(self): + @mock.patch("warnings.warn") + def test_lifecycle_rules_getter_unknown_action_type(self, mock_warn): NAME = "name" BOGUS_RULE = {"action": {"type": "Bogus"}, "condition": {"age": 42}} rules = [BOGUS_RULE] properties = {"lifecycle": {"rule": rules}} bucket = self._make_one(name=NAME, properties=properties) - with self.assertRaises(ValueError): - list(bucket.lifecycle_rules) + list(bucket.lifecycle_rules) + mock_warn.assert_called_with( + "Unknown lifecycle rule type received: {}. Please upgrade to the latest version of google-cloud-storage.".format( + BOGUS_RULE + ), + UserWarning, + stacklevel=1, + ) def test_lifecycle_rules_getter(self): from google.cloud.storage.bucket import ( From 1f960699ce22d950132b2352ca7f89d75cb9606c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 12 May 2021 13:10:37 -0400 Subject: [PATCH 07/30] chore: add library type to .repo-metadata.json (#439) --- .repo-metadata.json | 1 + 1 file changed, 1 insertion(+) diff --git a/.repo-metadata.json b/.repo-metadata.json index 499d6158c..315fd7657 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -6,6 +6,7 @@ "issue_tracker": "https://issuetracker.google.com/savedsearches/559782", "release_level": "ga", "language": "python", + "library_type": "GAPIC_MANUAL", "repo": "googleapis/python-storage", "distribution_name": "google-cloud-storage", "api_id": "storage.googleapis.com", From 3fb5086d54ebe487e54cb5a254c0899c91faaf8e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 18 May 2021 06:00:08 +0200 Subject: [PATCH 08/30] chore(deps): update precommit hook pre-commit/pre-commit-hooks to v4 (#444) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [pre-commit/pre-commit-hooks](https://togithub.com/pre-commit/pre-commit-hooks) | repository | major | `v3.4.0` -> `v4.0.1` | --- ### Release Notes
pre-commit/pre-commit-hooks ### [`v4.0.1`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.1) [Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v4.0.0...v4.0.1) ##### Fixes - `check-shebang-scripts-are-executable` fix entry point. - [#​602](https://togithub.com/pre-commit/pre-commit-hooks/issues/602) issue by [@​Person-93](https://togithub.com/Person-93). - [#​603](https://togithub.com/pre-commit/pre-commit-hooks/issues/603) PR by [@​scop](https://togithub.com/scop). ### [`v4.0.0`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.0) [Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v3.4.0...v4.0.0) ##### Features - `check-json`: report duplicate keys. - [#​558](https://togithub.com/pre-commit/pre-commit-hooks/issues/558) PR by [@​AdityaKhursale](https://togithub.com/AdityaKhursale). - [#​554](https://togithub.com/pre-commit/pre-commit-hooks/issues/554) issue by [@​adamchainz](https://togithub.com/adamchainz). - `no-commit-to-branch`: add `main` to default blocked branches. - [#​565](https://togithub.com/pre-commit/pre-commit-hooks/issues/565) PR by [@​ndevenish](https://togithub.com/ndevenish). - `check-case-conflict`: check conflicts in directory names as well. - [#​575](https://togithub.com/pre-commit/pre-commit-hooks/issues/575) PR by [@​slsyy](https://togithub.com/slsyy). - [#​70](https://togithub.com/pre-commit/pre-commit-hooks/issues/70) issue by [@​andyjack](https://togithub.com/andyjack). - `check-vcs-permalinks`: forbid other branch names. - [#​582](https://togithub.com/pre-commit/pre-commit-hooks/issues/582) PR by [@​jack1142](https://togithub.com/jack1142). - [#​581](https://togithub.com/pre-commit/pre-commit-hooks/issues/581) issue by [@​jack1142](https://togithub.com/jack1142). - `check-shebang-scripts-are-executable`: new hook which ensures shebang'd scripts are executable. - [#​545](https://togithub.com/pre-commit/pre-commit-hooks/issues/545) PR by [@​scop](https://togithub.com/scop). ##### Fixes - `check-executables-have-shebangs`: Short circuit shebang lookup on windows. - [#​544](https://togithub.com/pre-commit/pre-commit-hooks/issues/544) PR by [@​scop](https://togithub.com/scop). - `requirements-txt-fixer`: Fix comments which have indentation - [#​549](https://togithub.com/pre-commit/pre-commit-hooks/issues/549) PR by [@​greshilov](https://togithub.com/greshilov). - [#​548](https://togithub.com/pre-commit/pre-commit-hooks/issues/548) issue by [@​greshilov](https://togithub.com/greshilov). - `pretty-format-json`: write to stdout using UTF-8 encoding. - [#​571](https://togithub.com/pre-commit/pre-commit-hooks/issues/571) PR by [@​jack1142](https://togithub.com/jack1142). - [#​570](https://togithub.com/pre-commit/pre-commit-hooks/issues/570) issue by [@​jack1142](https://togithub.com/jack1142). - Use more inclusive language. - [#​599](https://togithub.com/pre-commit/pre-commit-hooks/issues/599) PR by [@​asottile](https://togithub.com/asottile). ##### Breaking changes - Remove deprecated hooks: `flake8`, `pyflakes`, `autopep8-wrapper`. - [#​597](https://togithub.com/pre-commit/pre-commit-hooks/issues/597) PR by [@​asottile](https://togithub.com/asottile).
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻️ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-storage). --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2a87c6d4d..41a6222b0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v4.0.1 hooks: - id: trailing-whitespace - id: end-of-file-fixer From 179de2a7e0ca92b2cd849714a2d7801baaf1c551 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 20 May 2021 10:45:24 -0400 Subject: [PATCH 09/30] chore: migrate to owl bot (#412) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: migrate to owl bot * chore: run the post processor * 🦉 Updates from OwlBot Co-authored-by: Owl Bot --- .flake8 | 1 + .github/.OwlBot.lock.yaml | 4 + .github/.OwlBot.yaml | 19 +++ .github/header-checker-lint.yml | 15 +++ .gitignore | 4 +- .kokoro/build.sh | 26 ++++- .kokoro/docs/common.cfg | 2 +- .kokoro/docs/docs-presubmit.cfg | 11 ++ .kokoro/release.sh | 4 +- .kokoro/release/common.cfg | 14 +-- .kokoro/samples/python3.6/common.cfg | 6 + .kokoro/samples/python3.6/periodic-head.cfg | 11 ++ .kokoro/samples/python3.7/common.cfg | 6 + .kokoro/samples/python3.7/periodic-head.cfg | 11 ++ .kokoro/samples/python3.8/common.cfg | 6 + .kokoro/samples/python3.8/periodic-head.cfg | 11 ++ .kokoro/test-samples-against-head.sh | 28 +++++ .kokoro/test-samples-impl.sh | 102 ++++++++++++++++ .kokoro/test-samples.sh | 90 +++----------- .pre-commit-config.yaml | 16 ++- .trampolinerc | 1 + CODE_OF_CONDUCT.md | 123 ++++++++++++++------ LICENSE | 7 +- MANIFEST.in | 4 +- docs/_static/custom.css | 18 ++- docs/conf.py | 20 +++- noxfile.py | 6 +- synth.py => owlbot.py | 3 +- renovate.json | 6 +- synth.metadata | 83 ------------- 30 files changed, 426 insertions(+), 232 deletions(-) create mode 100644 .github/.OwlBot.lock.yaml create mode 100644 .github/.OwlBot.yaml create mode 100644 .github/header-checker-lint.yml create mode 100644 .kokoro/samples/python3.6/periodic-head.cfg create mode 100644 .kokoro/samples/python3.7/periodic-head.cfg create mode 100644 .kokoro/samples/python3.8/periodic-head.cfg create mode 100755 .kokoro/test-samples-against-head.sh create mode 100755 .kokoro/test-samples-impl.sh rename synth.py => owlbot.py (93%) delete mode 100644 synth.metadata diff --git a/.flake8 b/.flake8 index ed9316381..29227d4cf 100644 --- a/.flake8 +++ b/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 000000000..d49860b32 --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,4 @@ +docker: + digest: sha256:457583330eec64daa02aeb7a72a04d33e7be2428f646671ce4045dcbc0191b1e + image: gcr.io/repo-automation-bots/owlbot-python:latest + diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 000000000..c2e0f4b92 --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,19 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +begin-after-commit-hash: 6acf4a0a797f1082027985c55c4b14b60f673dd7 + diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 000000000..6fe78aa79 --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.gitignore b/.gitignore index b9daa52f1..b4243ced7 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 9e7febd82..9f144307d 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-storage +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-storage" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index ea96964f1..308bf124b 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -30,7 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2-staging" + value: "docs-staging-v2" } # It will upload the docker image after successful builds. diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 111810782..a5a723164 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-storage/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 268407736..7970969eb 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-storage python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index b96ee4f07..58a3ff6b8 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-storage/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index 6287c9952..04e100210 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-storage/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index fb9bedb81..0089e9b79 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-storage/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index 52a03a568..2f92d6c76 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-storage/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh new file mode 100755 index 000000000..aa5013db2 --- /dev/null +++ b/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-storage + +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 000000000..cf5de74c1 --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index cdf5d4e32..421439bc8 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,81 +28,19 @@ cd github/python-storage # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" \ No newline at end of file +exec .kokoro/test-samples-impl.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 41a6222b0..1bbd78783 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,8 +1,22 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 + rev: v3.4.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer diff --git a/.trampolinerc b/.trampolinerc index 995ee2911..383b6ec89 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index b3d1f6029..039f43681 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/LICENSE b/LICENSE index a8ee855de..d64569567 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d120..e783f4c62 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0abaf229f..b0a295464 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/docs/conf.py b/docs/conf.py index 858ffec80..1d11a2dfc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-storage documentation build configuration file # @@ -345,10 +358,11 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), - "grpc": ("https://grpc.io/grpc/python/", None), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/noxfile.py b/noxfile.py index 3104fdac2..0b85dc8b0 100644 --- a/noxfile.py +++ b/noxfile.py @@ -45,15 +45,11 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): """Run black. Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install(BLACK_VERSION) session.run( diff --git a/synth.py b/owlbot.py similarity index 93% rename from synth.py rename to owlbot.py index 9774b1a01..945dfa91a 100644 --- a/synth.py +++ b/owlbot.py @@ -33,8 +33,9 @@ "google-cloud-kms < 2.0dev", ], ) + s.move( - templated_files, excludes=["docs/multiprocessing.rst"], + templated_files, excludes=["docs/multiprocessing.rst", "noxfile.py", "CONTRIBUTING.rst"], ) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/renovate.json b/renovate.json index 4fa949311..c04895563 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,9 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index 7fdf91454..000000000 --- a/synth.metadata +++ /dev/null @@ -1,83 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-storage.git", - "sha": "dc3671963f25dde7ba393c8e3939225b5c61d158" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f3c04883d6c43261ff13db1f52d03a283be06871" - } - } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "noxfile.py", - "renovate.json", - "scripts/decrypt-secrets.sh", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore" - ] -} \ No newline at end of file From 8344253a1969b9d04b81f87a6d7bddd3ddb55006 Mon Sep 17 00:00:00 2001 From: cojenco Date: Thu, 20 May 2021 08:26:02 -0700 Subject: [PATCH 10/30] fix: add ConnectionError to default retry (#445) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This adds python built-in [ConnectionError](https://docs.python.org/3/library/exceptions.html#ConnectionError) to default retryable types. ConnectionError was recently added in the [BigQuery library](https://github.com/googleapis/python-bigquery/pull/571) to allow retries. Fixes #426 🦕 --- google/cloud/storage/retry.py | 10 +++++++++- tests/unit/test_retry.py | 21 +++++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/google/cloud/storage/retry.py b/google/cloud/storage/retry.py index e17f3d5a0..7b9626ed5 100644 --- a/google/cloud/storage/retry.py +++ b/google/cloud/storage/retry.py @@ -21,7 +21,14 @@ import json -_RETRYABLE_TYPES = ( +# ConnectionError is a built-in exception only in Python3 and not in Python2. +try: + _RETRYABLE_STDLIB_TYPES = (ConnectionError,) +except NameError: + _RETRYABLE_STDLIB_TYPES = () + + +_RETRYABLE_TYPES = _RETRYABLE_STDLIB_TYPES + ( api_exceptions.TooManyRequests, # 429 api_exceptions.InternalServerError, # 500 api_exceptions.BadGateway, # 502 @@ -30,6 +37,7 @@ requests.ConnectionError, ) + # Some retriable errors don't have their own custom exception in api_core. _ADDITIONAL_RETRYABLE_STATUS_CODES = (408,) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 582fa8097..3111584cb 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -19,6 +19,14 @@ import mock +try: + ConnectionError +except NameError: + _HAS_STDLIB_CONNECTION_ERROR = False +else: + _HAS_STDLIB_CONNECTION_ERROR = True + + class Test_should_retry(unittest.TestCase): def _call_fut(self, exc): from google.cloud.storage import retry @@ -56,9 +64,22 @@ def test_w_google_api_call_error_miss(self): self.assertFalse(self._call_fut(exc)) def test_w_requests_connection_error(self): + import requests + + exc = requests.ConnectionError() + self.assertTrue(self._call_fut(exc)) + + def test_miss_w_stdlib_error(self): exc = ValueError("testing") self.assertFalse(self._call_fut(exc)) + @unittest.skipUnless( + _HAS_STDLIB_CONNECTION_ERROR, "No builtin 'ConnectionError' in Python 2", + ) + def test_w_stdlib_connection_error(self): + exc = ConnectionError() + self.assertTrue(self._call_fut(exc)) + class TestConditionalRetryPolicy(unittest.TestCase): def _make_one(self, retry_policy, conditional_predicate, required_kwargs): From 34ce17ef0961c47aebc1348e40cbeffa8cafddec Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 20 May 2021 15:12:02 -0400 Subject: [PATCH 11/30] chore: suppress renovate for googleapis-common-protos (#434) See #432, which undoes the work of #430. From c7c71cd3d0c4fa513bf224abf9f91d4db31d104a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 22 May 2021 09:18:24 +0000 Subject: [PATCH 12/30] chore: new owl bot post processor docker image (#448) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 --- .github/.OwlBot.lock.yaml | 5 ++--- .pre-commit-config.yaml | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index d49860b32..46e3f021c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,3 @@ docker: - digest: sha256:457583330eec64daa02aeb7a72a04d33e7be2428f646671ce4045dcbc0191b1e - image: gcr.io/repo-automation-bots/owlbot-python:latest - + image: gcr.io/repo-automation-bots/owlbot-python:latest + digest: sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1bbd78783..4f00c7cff 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.1 + rev: 3.9.2 hooks: - id: flake8 From fe87755e744436ef9f4dd5236a74a4955dcb1534 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 28 May 2021 11:42:02 +0000 Subject: [PATCH 13/30] chore: new owl bot post processor docker image (#452) Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 --- .github/.OwlBot.lock.yaml | 2 +- docs/conf.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 46e3f021c..da616c91a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 + digest: sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 diff --git a/docs/conf.py b/docs/conf.py index 1d11a2dfc..c61d72fcc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -363,6 +363,7 @@ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } From e0f1b7132a794b7bc12f3829b727d32058cc0f09 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 7 Jun 2021 10:32:02 -0400 Subject: [PATCH 14/30] refactor: add / use 'Client._get_resource' method (#431) Use an explicit helper client method for `GET` requests, rather than manipulating client's private `_connection.api_request`. As a benefit, tests get *way* clearer. Toward #38 ~~Based on top of the branch from #430. I will rebase when that PR merges.~~ --- google/cloud/storage/_helpers.py | 7 +- google/cloud/storage/acl.py | 18 +- google/cloud/storage/blob.py | 24 +- google/cloud/storage/bucket.py | 24 +- google/cloud/storage/client.py | 71 ++- google/cloud/storage/hmac_key.py | 16 +- google/cloud/storage/notification.py | 16 +- tests/unit/test__helpers.py | 193 ++++--- tests/unit/test_acl.py | 114 ++-- tests/unit/test_blob.py | 471 ++++++++-------- tests/unit/test_bucket.py | 811 +++++++++++++++------------ tests/unit/test_client.py | 499 ++++++++-------- tests/unit/test_hmac_key.py | 106 ++-- tests/unit/test_notification.py | 118 ++-- 14 files changed, 1348 insertions(+), 1140 deletions(-) diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py index 338b79861..a126c9e02 100644 --- a/google/cloud/storage/_helpers.py +++ b/google/cloud/storage/_helpers.py @@ -215,14 +215,13 @@ def reload( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, ) - api_response = client._connection.api_request( - method="GET", - path=self.path, + api_response = client._get_resource( + self.path, query_params=query_params, headers=self._encryption_headers(), - _target_object=self, timeout=timeout, retry=retry, + _target_object=self, ) self._set_properties(api_response) diff --git a/google/cloud/storage/acl.py b/google/cloud/storage/acl.py index 55c12c9b8..b80447b9d 100644 --- a/google/cloud/storage/acl.py +++ b/google/cloud/storage/acl.py @@ -85,6 +85,7 @@ """ from google.cloud.storage.constants import _DEFAULT_TIMEOUT +from google.cloud.storage.retry import DEFAULT_RETRY class _ACLEntity(object): @@ -206,6 +207,7 @@ class ACL(object): # Subclasses must override to provide these attributes (typically, # as properties). + client = None reload_path = None save_path = None user_project = None @@ -430,7 +432,7 @@ def _require_client(self, client): client = self.client return client - def reload(self, client=None, timeout=_DEFAULT_TIMEOUT): + def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): """Reload the ACL data from Cloud Storage. If :attr:`user_project` is set, bills the API request to that project. @@ -445,6 +447,15 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT): Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + + :type retry: :class:`~google.api_core.retry.Retry` + :param retry: (Optional) How to retry the RPC. + + A None value will disable retries. + + A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors + and configure backoff and timeout options. """ path = self.reload_path client = self._require_client(client) @@ -455,10 +466,11 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT): self.entities.clear() - found = client._connection.api_request( - method="GET", path=path, query_params=query_params, timeout=timeout, + found = client._get_resource( + path, query_params=query_params, timeout=timeout, retry=retry, ) self.loaded = True + for entry in found.get("items", ()): self.add_entity(self.entity_from_dict(entry)) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 66cc1d153..a105053de 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -704,20 +704,19 @@ def exists( try: # We intentionally pass `_target_object=None` since fields=name # would limit the local properties. - client._connection.api_request( - method="GET", - path=self.path, + client._get_resource( + self.path, query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) + except NotFound: # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. - return True - except NotFound: return False + return True def delete( self, @@ -2829,13 +2828,12 @@ def get_iam_policy( if requested_policy_version is not None: query_params["optionsRequestedPolicyVersion"] = requested_policy_version - info = client._connection.api_request( - method="GET", - path="%s/iam" % (self.path,), + info = client._get_resource( + "%s/iam" % (self.path,), query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) return Policy.from_api_repr(info) @@ -2970,12 +2968,12 @@ def test_iam_permissions( query_params["userProject"] = self.user_project path = "%s/iam/testPermissions" % (self.path,) - resp = client._connection.api_request( - method="GET", - path=path, + resp = client._get_resource( + path, query_params=query_params, timeout=timeout, retry=retry, + _target_object=None, ) return resp.get("permissions", []) diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index ac38208a3..aacc2efe5 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -786,20 +786,19 @@ def exists( try: # We intentionally pass `_target_object=None` since fields=name # would limit the local properties. - client._connection.api_request( - method="GET", - path=self.path, + client._get_resource( + self.path, query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) + except NotFound: # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. - return True - except NotFound: return False + return True def create( self, @@ -2882,13 +2881,12 @@ def get_iam_policy( if requested_policy_version is not None: query_params["optionsRequestedPolicyVersion"] = requested_policy_version - info = client._connection.api_request( - method="GET", - path="%s/iam" % (self.path,), + info = client._get_resource( + "%s/iam" % (self.path,), query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) return Policy.from_api_repr(info) @@ -3008,12 +3006,12 @@ def test_iam_permissions( query_params["userProject"] = self.user_project path = "%s/iam/testPermissions" % (self.path,) - resp = client._connection.api_request( - method="GET", - path=path, + resp = client._get_resource( + path, query_params=query_params, timeout=timeout, retry=retry, + _target_object=None, ) return resp.get("permissions", []) diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 858fecdce..848012725 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -284,10 +284,9 @@ def get_service_account_email( """ if project is None: project = self.project + path = "/projects/%s/serviceAccount" % (project,) - api_response = self._base_connection.api_request( - method="GET", path=path, timeout=timeout, retry=retry, - ) + api_response = self._get_resource(path, timeout=timeout, retry=retry) return api_response["email_address"] def bucket(self, bucket_name, user_project=None): @@ -321,6 +320,72 @@ def batch(self): """ return Batch(client=self) + def _get_resource( + self, + path, + query_params=None, + headers=None, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, + _target_object=None, + ): + """Helper for bucket / blob methods making API 'GET' calls. + + Args: + path str: + The path of the resource to fetch. + + query_params Optional[dict]: + HTTP query parameters to be passed + + headers Optional[dict]: + HTTP headers to be passed + + timeout (Optional[Union[float, Tuple[float, float]]]): + The amount of time, in seconds, to wait for the server response. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + retry (Optional[Union[google.api_core.retry.Retry, google.cloud.storage.retry.ConditionalRetryPolicy]]): + How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. + + _target_object (Union[ \ + :class:`~google.cloud.storage.bucket.Bucket`, \ + :class:`~google.cloud.storage.bucket.blob`, \ + ]): + Object to which future data is to be applied -- only relevant + in the context of a batch. + + Returns: + dict + The JSON resource fetched + + Raises: + google.cloud.exceptions.NotFound + If the bucket is not found. + """ + return self._connection.api_request( + method="GET", + path=path, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=_target_object, + ) + def get_bucket( self, bucket_or_name, diff --git a/google/cloud/storage/hmac_key.py b/google/cloud/storage/hmac_key.py index 3fd49079e..c6cc17a85 100644 --- a/google/cloud/storage/hmac_key.py +++ b/google/cloud/storage/hmac_key.py @@ -222,12 +222,8 @@ def exists(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): if self.user_project is not None: qs_params["userProject"] = self.user_project - self._client._connection.api_request( - method="GET", - path=self.path, - query_params=qs_params, - timeout=timeout, - retry=retry, + self._client._get_resource( + self.path, query_params=qs_params, timeout=timeout, retry=retry, ) except NotFound: return False @@ -266,12 +262,8 @@ def reload(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): if self.user_project is not None: qs_params["userProject"] = self.user_project - self._properties = self._client._connection.api_request( - method="GET", - path=self.path, - query_params=qs_params, - timeout=timeout, - retry=retry, + self._properties = self._client._get_resource( + self.path, query_params=qs_params, timeout=timeout, retry=retry, ) def update(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_ETAG_IN_JSON): diff --git a/google/cloud/storage/notification.py b/google/cloud/storage/notification.py index fde5e4559..5389ab51e 100644 --- a/google/cloud/storage/notification.py +++ b/google/cloud/storage/notification.py @@ -323,12 +323,8 @@ def exists(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): query_params["userProject"] = self.bucket.user_project try: - client._connection.api_request( - method="GET", - path=self.path, - query_params=query_params, - timeout=timeout, - retry=retry, + client._get_resource( + self.path, query_params=query_params, timeout=timeout, retry=retry, ) except NotFound: return False @@ -381,12 +377,8 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project - response = client._connection.api_request( - method="GET", - path=self.path, - query_params=query_params, - timeout=timeout, - retry=retry, + response = client._get_resource( + self.path, query_params=query_params, timeout=timeout, retry=retry, ) self._set_properties(response) diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index fa989f96e..f1c6b0436 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -66,6 +66,7 @@ def _derivedClass(self, path=None, user_project=None): class Derived(self._get_target_class()): client = None + _actual_encryption_headers = None @property def path(self): @@ -75,6 +76,9 @@ def path(self): def user_project(self): return user_project + def _encryption_headers(self): + return self._actual_encryption_headers or {} + return Derived def test_path_is_abstract(self): @@ -105,119 +109,130 @@ def test__query_params_w_user_project(self): derived = self._derivedClass("/path", user_project)() self.assertEqual(derived._query_params, {"userProject": user_project}) - def test_reload(self): - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + def test_reload_w_defaults(self): + path = "/path" + response = {"foo": "Foo"} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = response + derived = self._derivedClass(path)() # Make sure changes is not a set instance before calling reload # (which will clear / replace it with an empty set), checked below. derived._changes = object() - derived.reload(client=client, timeout=42) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/path", - "query_params": {"projection": "noAcl"}, - "headers": {}, - "_target_object": derived, - "timeout": 42, - "retry": DEFAULT_RETRY, - }, - ) + derived.client = client + + derived.reload() + + self.assertEqual(derived._properties, response) self.assertEqual(derived._changes, set()) - def test_reload_with_generation_match(self): - GENERATION_NUMBER = 9 - METAGENERATION_NUMBER = 6 + expected_query_params = {"projection": "noAcl"} + expected_headers = {} # no encryption headers by default + client._get_resource.assert_called_once_with( + path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=derived, + ) - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + def test_reload_w_generation_match_w_timeout(self): + generation_number = 9 + metageneration_number = 6 + path = "/path" + timeout = 42 + response = {"foo": "Foo"} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = response + derived = self._derivedClass(path)() # Make sure changes is not a set instance before calling reload # (which will clear / replace it with an empty set), checked below. derived._changes = object() + derived.client = client + derived.reload( - client=client, - timeout=42, - if_generation_match=GENERATION_NUMBER, - if_metageneration_match=METAGENERATION_NUMBER, - ) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/path", - "query_params": { - "projection": "noAcl", - "ifGenerationMatch": GENERATION_NUMBER, - "ifMetagenerationMatch": METAGENERATION_NUMBER, - }, - "headers": {}, - "_target_object": derived, - "timeout": 42, - "retry": DEFAULT_RETRY, - }, + if_generation_match=generation_number, + if_metageneration_match=metageneration_number, + timeout=timeout, ) + + self.assertEqual(derived._properties, response) self.assertEqual(derived._changes, set()) - def test_reload_w_user_project(self): + expected_query_params = { + "projection": "noAcl", + "ifGenerationMatch": generation_number, + "ifMetagenerationMatch": metageneration_number, + } + expected_headers = {} # no encryption headers by default + client._get_resource.assert_called_once_with( + path, + query_params=expected_query_params, + headers=expected_headers, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=derived, + ) + + def test_reload_w_user_project_w_retry(self): user_project = "user-project-123" - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path", user_project)() + path = "/path" + retry = mock.Mock(spec=[]) + response = {"foo": "Foo"} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = response + derived = self._derivedClass(path, user_project)() # Make sure changes is not a set instance before calling reload # (which will clear / replace it with an empty set), checked below. derived._changes = object() - derived.reload(client=client) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/path", - "query_params": {"projection": "noAcl", "userProject": user_project}, - "headers": {}, - "_target_object": derived, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, - ) + derived.client = client + + derived.reload(retry=retry) + + self.assertEqual(derived._properties, response) self.assertEqual(derived._changes, set()) - def test_reload_w_projection(self): - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + expected_query_params = { + "projection": "noAcl", + "userProject": user_project, + } + expected_headers = {} # no encryption headers by default + client._get_resource.assert_called_once_with( + path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=derived, + ) + + def test_reload_w_projection_w_explicit_client_w_enc_header(self): + path = "/path" + response = {"foo": "Foo"} + encryption_headers = {"bar": "Bar"} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = response + derived = self._derivedClass(path)() # Make sure changes is not a set instance before calling reload # (which will clear / replace it with an empty set), checked below. derived._changes = object() - derived.reload(projection="full", client=client, timeout=42) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/path", - "query_params": {"projection": "full"}, - "headers": {}, - "_target_object": derived, - "timeout": 42, - "retry": DEFAULT_RETRY, - }, - ) + derived._actual_encryption_headers = encryption_headers + + derived.reload(projection="full", client=client) + + self.assertEqual(derived._properties, response) self.assertEqual(derived._changes, set()) + expected_query_params = {"projection": "full"} + client._get_resource.assert_called_once_with( + path, + query_params=expected_query_params, + headers=encryption_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=derived, + ) + def test__set_properties(self): mixin = self._make_one() self.assertEqual(mixin._properties, {}) diff --git a/tests/unit/test_acl.py b/tests/unit/test_acl.py index 47400f1ef..7cea5c8fc 100644 --- a/tests/unit/test_acl.py +++ b/tests/unit/test_acl.py @@ -14,6 +14,10 @@ import unittest +import mock + +from google.cloud.storage.retry import DEFAULT_RETRY + class Test_ACLEntity(unittest.TestCase): @staticmethod @@ -530,78 +534,82 @@ def test_get_entities_nonempty(self): entity = acl.entity(TYPE, ID) self.assertEqual(acl.get_entities(), [entity]) - def test_reload_missing(self): + def test_reload_missing_w_defaults(self): # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/652 - ROLE = "role" - connection = _Connection({}) - client = _Client(connection) - acl = self._make_one() - acl.reload_path = "/testing/acl" + class Derived(self._get_target_class()): + client = None + + role = "role" + reload_path = "/testing/acl" + api_response = {} + acl = Derived() + acl.reload_path = reload_path acl.loaded = True - acl.entity("allUsers", ROLE) - acl.reload(client=client, timeout=42) + acl.entity("allUsers", role) + client = acl.client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + + acl.reload() + self.assertEqual(list(acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/testing/acl", - "query_params": {}, - "timeout": 42, - }, + + expected_query_params = {} + client._get_resource.assert_called_once_with( + reload_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) - def test_reload_empty_result_clears_local(self): - ROLE = "role" - connection = _Connection({"items": []}) - client = _Client(connection) + def test_reload_w_empty_result_w_timeout_w_retry_w_explicit_client(self): + role = "role" + reload_path = "/testing/acl" + timeout = 42 + retry = mock.Mock(spec=[]) + api_response = {"items": []} acl = self._make_one() - acl.reload_path = "/testing/acl" + acl.reload_path = reload_path acl.loaded = True - acl.entity("allUsers", ROLE) + acl.entity("allUsers", role) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response - acl.reload(client=client) + acl.reload(client=client, timeout=timeout, retry=retry) self.assertTrue(acl.loaded) self.assertEqual(list(acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/testing/acl", - "query_params": {}, - "timeout": self._get_default_timeout(), - }, + + expected_query_params = {} + client._get_resource.assert_called_once_with( + reload_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, ) - def test_reload_nonempty_result_w_user_project(self): - ROLE = "role" - USER_PROJECT = "user-project-123" - connection = _Connection({"items": [{"entity": "allUsers", "role": ROLE}]}) - client = _Client(connection) + def test_reload_w_nonempty_result_w_user_project(self): + role = "role" + reload_path = "/testing/acl" + user_project = "user-project-123" + api_response = {"items": [{"entity": "allUsers", "role": role}]} acl = self._make_one() - acl.reload_path = "/testing/acl" + acl.reload_path = reload_path acl.loaded = True - acl.user_project = USER_PROJECT + acl.user_project = user_project + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response acl.reload(client=client) self.assertTrue(acl.loaded) - self.assertEqual(list(acl), [{"entity": "allUsers", "role": ROLE}]) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/testing/acl", - "query_params": {"userProject": USER_PROJECT}, - "timeout": self._get_default_timeout(), - }, + self.assertEqual(list(acl), [{"entity": "allUsers", "role": role}]) + + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + reload_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) def test_save_none_set_none_passed(self): diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 50732a7f0..6d8fc7b33 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -688,117 +688,109 @@ def test_generate_signed_url_v4_w_credentials(self): credentials = object() self._generate_signed_url_v4_helper(credentials=credentials) - def test_exists_miss(self): - NONESUCH = "nonesuch" - not_found_response = ({"status": http_client.NOT_FOUND}, b"") - connection = _Connection(not_found_response) - client = _Client(connection) + def test_exists_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + + blob_name = "nonesuch" + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.side_effect = NotFound("testing") bucket = _Bucket(client) - blob = self._make_one(NONESUCH, bucket=bucket) - self.assertFalse(blob.exists(timeout=42)) - self.assertEqual(len(connection._requested), 1) - self.assertEqual( - connection._requested[0], - { - "method": "GET", - "path": "/b/name/o/{}".format(NONESUCH), - "query_params": {"fields": "name"}, - "_target_object": None, - "timeout": 42, - "retry": DEFAULT_RETRY, - }, + blob = self._make_one(blob_name, bucket=bucket) + + self.assertFalse(blob.exists()) + + expected_query_params = {"fields": "name"} + client._get_resource.assert_called_once_with( + blob.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, ) - def test_exists_hit_w_user_project(self): - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - found_response = ({"status": http_client.OK}, b"") - connection = _Connection(found_response) - client = _Client(connection) - bucket = _Bucket(client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket) - bucket._blobs[BLOB_NAME] = 1 - self.assertTrue(blob.exists()) - self.assertEqual(len(connection._requested), 1) - self.assertEqual( - connection._requested[0], - { - "method": "GET", - "path": "/b/name/o/{}".format(BLOB_NAME), - "query_params": {"fields": "name", "userProject": USER_PROJECT}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, + def test_exists_hit_w_user_project_w_timeout(self): + blob_name = "blob-name" + user_project = "user-project-123" + timeout = 42 + api_response = {"name": blob_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = _Bucket(client, user_project=user_project) + blob = self._make_one(blob_name, bucket=bucket) + + self.assertTrue(blob.exists(timeout=timeout)) + + expected_query_params = {"fields": "name", "userProject": user_project} + client._get_resource.assert_called_once_with( + blob.path, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=None, ) - def test_exists_hit_w_generation(self): - BLOB_NAME = "blob-name" - GENERATION = 123456 - found_response = ({"status": http_client.OK}, b"") - connection = _Connection(found_response) - client = _Client(connection) + def test_exists_hit_w_generation_w_retry(self): + blob_name = "blob-name" + generation = 123456 + api_response = {"name": blob_name} + retry = mock.Mock(spec=[]) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response bucket = _Bucket(client) - blob = self._make_one(BLOB_NAME, bucket=bucket, generation=GENERATION) - bucket._blobs[BLOB_NAME] = 1 - self.assertTrue(blob.exists()) - self.assertEqual(len(connection._requested), 1) - self.assertEqual( - connection._requested[0], - { - "method": "GET", - "path": "/b/name/o/{}".format(BLOB_NAME), - "query_params": {"fields": "name", "generation": GENERATION}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, + blob = self._make_one(blob_name, bucket=bucket, generation=generation) + + self.assertTrue(blob.exists(retry=retry)) + + expected_query_params = {"fields": "name", "generation": generation} + client._get_resource.assert_called_once_with( + blob.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=None, ) def test_exists_w_generation_match(self): - BLOB_NAME = "blob-name" - GENERATION_NUMBER = 123456 - METAGENERATION_NUMBER = 6 - - found_response = ({"status": http_client.OK}, b"") - connection = _Connection(found_response) - client = _Client(connection) + blob_name = "blob-name" + generation_number = 123456 + metageneration_number = 6 + api_response = {"name": blob_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response bucket = _Bucket(client) - blob = self._make_one(BLOB_NAME, bucket=bucket) - bucket._blobs[BLOB_NAME] = 1 + blob = self._make_one(blob_name, bucket=bucket) + self.assertTrue( blob.exists( - if_generation_match=GENERATION_NUMBER, - if_metageneration_match=METAGENERATION_NUMBER, + if_generation_match=generation_number, + if_metageneration_match=metageneration_number, + retry=None, ) ) - self.assertEqual(len(connection._requested), 1) - self.assertEqual( - connection._requested[0], - { - "method": "GET", - "path": "/b/name/o/{}".format(BLOB_NAME), - "query_params": { - "fields": "name", - "ifGenerationMatch": GENERATION_NUMBER, - "ifMetagenerationMatch": METAGENERATION_NUMBER, - }, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, + + expected_query_params = { + "fields": "name", + "ifGenerationMatch": generation_number, + "ifMetagenerationMatch": metageneration_number, + } + client._get_resource.assert_called_once_with( + blob.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, + _target_object=None, ) def test_delete_wo_generation(self): BLOB_NAME = "blob-name" - not_found_response = ({"status": http_client.NOT_FOUND}, b"") - connection = _Connection(not_found_response) + connection = _Connection() # no requests will be made client = _Client(connection) bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) bucket._blobs[BLOB_NAME] = 1 + blob.delete() - self.assertFalse(blob.exists()) + self.assertEqual( bucket._deleted, [ @@ -819,14 +811,14 @@ def test_delete_wo_generation(self): def test_delete_w_generation(self): BLOB_NAME = "blob-name" GENERATION = 123456 - not_found_response = ({"status": http_client.NOT_FOUND}, b"") - connection = _Connection(not_found_response) + connection = _Connection() # no requests will be made client = _Client(connection) bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket, generation=GENERATION) bucket._blobs[BLOB_NAME] = 1 + blob.delete(timeout=42) - self.assertFalse(blob.exists()) + self.assertEqual( bucket._deleted, [ @@ -847,14 +839,14 @@ def test_delete_w_generation(self): def test_delete_w_generation_match(self): BLOB_NAME = "blob-name" GENERATION = 123456 - not_found_response = ({"status": http_client.NOT_FOUND}, b"") - connection = _Connection(not_found_response) + connection = _Connection() # no requests will be made client = _Client(connection) bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket, generation=GENERATION) bucket._blobs[BLOB_NAME] = 1 + blob.delete(timeout=42, if_generation_match=GENERATION) - self.assertFalse(blob.exists()) + self.assertEqual( bucket._deleted, [ @@ -3146,139 +3138,128 @@ def test_create_resumable_upload_session_with_failure(self): self.assertIn(message, exc_info.exception.message) self.assertEqual(exc_info.exception.errors, []) - def test_get_iam_policy(self): + def test_get_iam_policy_defaults(self): from google.cloud.storage.iam import STORAGE_OWNER_ROLE from google.cloud.storage.iam import STORAGE_EDITOR_ROLE from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.api_core.iam import Policy - BLOB_NAME = "blob-name" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - EDITOR1 = "domain:google.com" - EDITOR2 = "user:phred@example.com" - VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" - VIEWER2 = "user:phred@example.com" - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, + blob_name = "blob-name" + path = "/b/name/o/%s" % (blob_name,) + etag = "DEADBEEF" + version = 1 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + editor1 = "domain:google.com" + editor2 = "user:phred@example.com" + viewer1 = "serviceAccount:1234-abcdef@service.example.com" + viewer2 = "user:phred@example.com" + api_response = { + "resourceId": path, + "etag": etag, + "version": version, "bindings": [ - {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, - {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, - {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, + {"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [editor1, editor2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [viewer1, viewer2]}, ], } - after = ({"status": http_client.OK}, RETURNED) - EXPECTED = { - binding["role"]: set(binding["members"]) for binding in RETURNED["bindings"] + expected_policy = { + binding["role"]: set(binding["members"]) + for binding in api_response["bindings"] } - connection = _Connection(after) - client = _Client(connection) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) - policy = blob.get_iam_policy(timeout=42) + policy = blob.get_iam_policy() self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED["etag"]) - self.assertEqual(policy.version, RETURNED["version"]) - self.assertEqual(dict(policy), EXPECTED) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "%s/iam" % (PATH,), - "query_params": {}, - "_target_object": None, - "timeout": 42, - "retry": DEFAULT_RETRY, - }, + self.assertEqual(policy.etag, api_response["etag"]) + self.assertEqual(policy.version, api_response["version"]) + self.assertEqual(dict(policy), expected_policy) + + expected_path = "%s/iam" % (path,) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, ) - def test_get_iam_policy_w_requested_policy_version(self): - from google.cloud.storage.iam import STORAGE_OWNER_ROLE + def test_get_iam_policy_w_user_project_w_timeout(self): + from google.api_core.iam import Policy - BLOB_NAME = "blob-name" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, - "bindings": [{"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}], + blob_name = "blob-name" + user_project = "user-project-123" + timeout = 42 + path = "/b/name/o/%s" % (blob_name,) + etag = "DEADBEEF" + version = 1 + api_response = { + "resourceId": path, + "etag": etag, + "version": version, + "bindings": [], } - after = ({"status": http_client.OK}, RETURNED) - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + expected_policy = {} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = _Bucket(client=client, user_project=user_project) + blob = self._make_one(blob_name, bucket=bucket) - blob.get_iam_policy(requested_policy_version=3) + policy = blob.get_iam_policy(timeout=42) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "%s/iam" % (PATH,), - "query_params": {"optionsRequestedPolicyVersion": 3}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, + self.assertIsInstance(policy, Policy) + self.assertEqual(policy.etag, api_response["etag"]) + self.assertEqual(policy.version, api_response["version"]) + self.assertEqual(dict(policy), expected_policy) + + expected_path = "%s/iam" % (path,) + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=None, ) - def test_get_iam_policy_w_user_project(self): - from google.api_core.iam import Policy + def test_get_iam_policy_w_requested_policy_version(self): + from google.cloud.storage.iam import STORAGE_OWNER_ROLE - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, - "bindings": [], + blob_name = "blob-name" + path = "/b/name/o/%s" % (blob_name,) + etag = "DEADBEEF" + version = 3 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + api_response = { + "resourceId": path, + "etag": etag, + "version": version, + "bindings": [{"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}], } - after = ({"status": http_client.OK}, RETURNED) - EXPECTED = {} - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = _Bucket(client=client) + blob = self._make_one(blob_name, bucket=bucket) - policy = blob.get_iam_policy() + policy = blob.get_iam_policy(requested_policy_version=version) - self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED["etag"]) - self.assertEqual(policy.version, RETURNED["version"]) - self.assertEqual(dict(policy), EXPECTED) + self.assertEqual(policy.version, version) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "%s/iam" % (PATH,), - "query_params": {"userProject": USER_PROJECT}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, + expected_path = "%s/iam" % (path,) + expected_query_params = {"optionsRequestedPolicyVersion": version} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, ) def test_set_iam_policy(self): @@ -3367,71 +3348,75 @@ def test_set_iam_policy_w_user_project(self): self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) self.assertEqual(kw[0]["data"], {"resourceId": PATH}) - def test_test_iam_permissions(self): + def test_test_iam_permissions_defaults(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - BLOB_NAME = "blob-name" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - PERMISSIONS = [ + blob_name = "blob-name" + permissions = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] - ALLOWED = PERMISSIONS[1:] - RETURNED = {"permissions": ALLOWED} - after = ({"status": http_client.OK}, RETURNED) - connection = _Connection(after) - client = _Client(connection) + expected = permissions[1:] + api_response = {"permissions": expected} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) - allowed = blob.test_iam_permissions(PERMISSIONS, timeout=42) + found = blob.test_iam_permissions(permissions) - self.assertEqual(allowed, ALLOWED) + self.assertEqual(found, expected) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"permissions": PERMISSIONS}) - self.assertEqual(kw[0]["timeout"], 42) + expected_path = "/b/name/o/%s/iam/testPermissions" % (blob_name,) + expected_query_params = {"permissions": permissions} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) - def test_test_iam_permissions_w_user_project(self): + def test_test_iam_permissions_w_user_project_w_timeout_w_retry(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - PERMISSIONS = [ + blob_name = "blob-name" + user_project = "user-project-123" + timeout = 42 + retry = mock.Mock(spec=[]) + permissions = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] - ALLOWED = PERMISSIONS[1:] - RETURNED = {"permissions": ALLOWED} - after = ({"status": http_client.OK}, RETURNED) - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket) + expected = permissions[1:] + api_response = {"permissions": expected} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = _Bucket(client=client, user_project=user_project) + blob = self._make_one(blob_name, bucket=bucket) - allowed = blob.test_iam_permissions(PERMISSIONS) + found = blob.test_iam_permissions(permissions, timeout=timeout, retry=retry) - self.assertEqual(allowed, ALLOWED) + self.assertEqual(found, expected) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) - self.assertEqual( - kw[0]["query_params"], - {"permissions": PERMISSIONS, "userProject": USER_PROJECT}, + expected_path = "/b/name/o/%s/iam/testPermissions" % (blob_name,) + expected_query_params = { + "permissions": permissions, + "userProject": user_project, + } + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=None, ) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) def test_make_public(self): from google.cloud.storage.acl import _ACLEntity @@ -4899,11 +4884,7 @@ def _respond(self, **kw): return response def api_request(self, **kw): - from google.cloud.exceptions import NotFound - info, content = self._respond(**kw) - if info.get("status") == http_client.NOT_FOUND: - raise NotFound(info) return content diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 22984a343..5ff2209c7 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -650,95 +650,72 @@ def test_user_project(self): bucket._user_project = USER_PROJECT self.assertEqual(bucket.user_project, USER_PROJECT) - def test_exists_miss(self): + def test_exists_miss_w_defaults(self): from google.cloud.exceptions import NotFound - class _FakeConnection(object): + bucket_name = "bucket-name" + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.side_effect = NotFound("testing") + bucket = self._make_one(client, name=bucket_name) - _called_with = [] + self.assertFalse(bucket.exists()) - @classmethod - def api_request(cls, *args, **kwargs): - cls._called_with.append((args, kwargs)) - raise NotFound(args) - - BUCKET_NAME = "bucket-name" - bucket = self._make_one(name=BUCKET_NAME) - client = _Client(_FakeConnection) - self.assertFalse(bucket.exists(client=client, timeout=42)) - expected_called_kwargs = { - "method": "GET", - "path": bucket.path, - "query_params": {"fields": "name"}, - "_target_object": None, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - expected_cw = [((), expected_called_kwargs)] - self.assertEqual(_FakeConnection._called_with, expected_cw) - - def test_exists_with_metageneration_match(self): - class _FakeConnection(object): - - _called_with = [] - - @classmethod - def api_request(cls, *args, **kwargs): - cls._called_with.append((args, kwargs)) - # exists() does not use the return value - return object() + expected_query_params = {"fields": "name"} + client._get_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) - BUCKET_NAME = "bucket-name" - METAGENERATION_NUMBER = 6 + def test_exists_w_metageneration_match_w_timeout(self): + bucket_name = "bucket-name" + metageneration_number = 6 + timeout = 42 + api_response = {"name": bucket_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=bucket_name) - bucket = self._make_one(name=BUCKET_NAME) - client = _Client(_FakeConnection) self.assertTrue( - bucket.exists( - client=client, timeout=42, if_metageneration_match=METAGENERATION_NUMBER - ) + bucket.exists(timeout=42, if_metageneration_match=metageneration_number) ) - expected_called_kwargs = { - "method": "GET", - "path": bucket.path, - "query_params": { - "fields": "name", - "ifMetagenerationMatch": METAGENERATION_NUMBER, - }, - "_target_object": None, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - expected_cw = [((), expected_called_kwargs)] - self.assertEqual(_FakeConnection._called_with, expected_cw) - - def test_exists_hit_w_user_project(self): - USER_PROJECT = "user-project-123" - class _FakeConnection(object): + expected_query_params = { + "fields": "name", + "ifMetagenerationMatch": metageneration_number, + } + client._get_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=None, + ) - _called_with = [] + def test_exists_hit_w_user_project_w_retry_w_explicit_client(self): + bucket_name = "bucket-name" + user_project = "user-project-123" + retry = mock.Mock(spec=[]) + api_response = {"name": bucket_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(name=bucket_name, user_project=user_project) - @classmethod - def api_request(cls, *args, **kwargs): - cls._called_with.append((args, kwargs)) - # exists() does not use the return value - return object() + self.assertTrue(bucket.exists(client=client, retry=retry)) - BUCKET_NAME = "bucket-name" - bucket = self._make_one(name=BUCKET_NAME, user_project=USER_PROJECT) - client = _Client(_FakeConnection) - self.assertTrue(bucket.exists(client=client)) - expected_called_kwargs = { - "method": "GET", - "path": bucket.path, - "query_params": {"fields": "name", "userProject": USER_PROJECT}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, + expected_query_params = { + "fields": "name", + "userProject": user_project, } - expected_cw = [((), expected_called_kwargs)] - self.assertEqual(_FakeConnection._called_with, expected_cw) + client._get_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=None, + ) def test_acl_property(self): from google.cloud.storage.acl import BucketACL @@ -765,97 +742,173 @@ def test_path_w_name(self): bucket = self._make_one(name=NAME) self.assertEqual(bucket.path, "/b/%s" % NAME) - def test_get_blob_miss(self): - NAME = "name" - NONESUCH = "nonesuch" - connection = _Connection() - client = _Client(connection) - bucket = self._make_one(name=NAME) - result = bucket.get_blob(NONESUCH, client=client, timeout=42) + def test_get_blob_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + from google.cloud.storage.blob import Blob + + name = "name" + blob_name = "nonesuch" + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.side_effect = NotFound("testing") + bucket = self._make_one(client, name=name) + + result = bucket.get_blob(blob_name) + self.assertIsNone(result) - (kw,) = connection._requested - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) - self.assertEqual(kw["timeout"], 42) + + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=mock.ANY, + ) + + target = client._get_resource.call_args[1]["_target_object"] + self.assertIsInstance(target, Blob) + self.assertIs(target.bucket, bucket) + self.assertEqual(target.name, blob_name) def test_get_blob_hit_w_user_project(self): - NAME = "name" - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - connection = _Connection({"name": BLOB_NAME}) - client = _Client(connection) - bucket = self._make_one(name=NAME, user_project=USER_PROJECT) - blob = bucket.get_blob(BLOB_NAME, client=client) + from google.cloud.storage.blob import Blob + + name = "name" + blob_name = "blob-name" + user_project = "user-project-123" + api_response = {"name": blob_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=name, user_project=user_project) + + blob = bucket.get_blob(blob_name, client=client) + + self.assertIsInstance(blob, Blob) self.assertIs(blob.bucket, bucket) - self.assertEqual(blob.name, BLOB_NAME) - (kw,) = connection._requested - expected_qp = {"userProject": USER_PROJECT, "projection": "noAcl"} - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["query_params"], expected_qp) - self.assertEqual(kw["timeout"], self._get_default_timeout()) + self.assertEqual(blob.name, blob_name) - def test_get_blob_hit_w_generation(self): - NAME = "name" - BLOB_NAME = "blob-name" - GENERATION = 1512565576797178 - connection = _Connection({"name": BLOB_NAME, "generation": GENERATION}) - client = _Client(connection) - bucket = self._make_one(name=NAME) - blob = bucket.get_blob(BLOB_NAME, client=client, generation=GENERATION) + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = { + "userProject": user_project, + "projection": "noAcl", + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=blob, + ) + + def test_get_blob_hit_w_generation_w_timeout(self): + from google.cloud.storage.blob import Blob + + name = "name" + blob_name = "blob-name" + generation = 1512565576797178 + timeout = 42 + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=name) + + blob = bucket.get_blob(blob_name, generation=generation, timeout=timeout) + + self.assertIsInstance(blob, Blob) self.assertIs(blob.bucket, bucket) - self.assertEqual(blob.name, BLOB_NAME) - self.assertEqual(blob.generation, GENERATION) - (kw,) = connection._requested - expected_qp = {"generation": GENERATION, "projection": "noAcl"} - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["query_params"], expected_qp) - self.assertEqual(kw["timeout"], self._get_default_timeout()) + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob.generation, generation) - def test_get_blob_w_generation_match(self): - NAME = "name" - BLOB_NAME = "blob-name" - GENERATION = 1512565576797178 + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = { + "generation": generation, + "projection": "noAcl", + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=blob, + ) - connection = _Connection({"name": BLOB_NAME, "generation": GENERATION}) - client = _Client(connection) - bucket = self._make_one(name=NAME) - blob = bucket.get_blob(BLOB_NAME, client=client, if_generation_match=GENERATION) + def test_get_blob_w_generation_match_w_retry(self): + from google.cloud.storage.blob import Blob + name = "name" + blob_name = "blob-name" + generation = 1512565576797178 + retry = mock.Mock(spec=[]) + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=name) + + blob = bucket.get_blob(blob_name, if_generation_match=generation, retry=retry) + + self.assertIsInstance(blob, Blob) self.assertIs(blob.bucket, bucket) - self.assertEqual(blob.name, BLOB_NAME) - self.assertEqual(blob.generation, GENERATION) - (kw,) = connection._requested - expected_qp = {"ifGenerationMatch": GENERATION, "projection": "noAcl"} - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["query_params"], expected_qp) - self.assertEqual(kw["timeout"], self._get_default_timeout()) + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob.generation, generation) - def test_get_blob_hit_with_kwargs(self): + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = { + "ifGenerationMatch": generation, + "projection": "noAcl", + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=blob, + ) + + def test_get_blob_hit_with_kwargs_w_explicit_client(self): + from google.cloud.storage.blob import Blob from google.cloud.storage.blob import _get_encryption_headers - NAME = "name" - BLOB_NAME = "blob-name" - CHUNK_SIZE = 1024 * 1024 - KEY = b"01234567890123456789012345678901" # 32 bytes + name = "name" + blob_name = "blob-name" + chunk_size = 1024 * 1024 + key = b"01234567890123456789012345678901" # 32 bytes + api_response = {"name": blob_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(name=name) - connection = _Connection({"name": BLOB_NAME}) - client = _Client(connection) - bucket = self._make_one(name=NAME) blob = bucket.get_blob( - BLOB_NAME, client=client, encryption_key=KEY, chunk_size=CHUNK_SIZE + blob_name, client=client, encryption_key=key, chunk_size=chunk_size ) + + self.assertIsInstance(blob, Blob) self.assertIs(blob.bucket, bucket) - self.assertEqual(blob.name, BLOB_NAME) - (kw,) = connection._requested - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["headers"], _get_encryption_headers(KEY)) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(blob.chunk_size, CHUNK_SIZE) - self.assertEqual(blob._encryption_key, KEY) + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob.chunk_size, chunk_size) + self.assertEqual(blob._encryption_key, key) + + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = { + "projection": "noAcl", + } + expected_headers = _get_encryption_headers(key) + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=blob, + ) def test_list_blobs_defaults(self): NAME = "name" @@ -982,44 +1035,76 @@ def test_list_notifications(self): notification.payload_format, resource.get("payload_format") ) - def test_get_notification(self): + def test_get_notification_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + + project = "my-project-123" + name = "name" + notification_id = "1" + + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.side_effect = NotFound("testing") + client.project = project + bucket = self._make_one(client=client, name=name) + + with self.assertRaises(NotFound): + bucket.get_notification(notification_id=notification_id) + + expected_path = "/b/{}/notificationConfigs/{}".format(name, notification_id) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + def test_get_notification_hit_w_explicit_w_user_project(self): + from google.cloud.storage.notification import BucketNotification from google.cloud.storage.notification import _TOPIC_REF_FMT from google.cloud.storage.notification import JSON_API_V1_PAYLOAD_FORMAT - NAME = "name" - ETAG = "FACECABB" - NOTIFICATION_ID = "1" - SELF_LINK = "https://example.com/notification/1" - resources = { + project = "my-project-123" + user_project = "user-project-456" + name = "name" + etag = "FACECABB" + notification_id = "1" + self_link = "https://example.com/notification/1" + api_response = { "topic": _TOPIC_REF_FMT.format("my-project-123", "topic-1"), - "id": NOTIFICATION_ID, - "etag": ETAG, - "selfLink": SELF_LINK, + "id": notification_id, + "etag": etag, + "selfLink": self_link, "payload_format": JSON_API_V1_PAYLOAD_FORMAT, } + timeout = 42 + retry = mock.Mock(spec=[]) + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.return_value = api_response + client.project = project + bucket = self._make_one(client=client, name=name, user_project=user_project) - connection = _make_connection(resources) - client = _Client(connection, project="my-project-123") - bucket = self._make_one(client=client, name=NAME) - notification = bucket.get_notification(notification_id=NOTIFICATION_ID) + notification = bucket.get_notification( + notification_id=notification_id, timeout=timeout, retry=retry, + ) - self.assertEqual(notification.notification_id, NOTIFICATION_ID) - self.assertEqual(notification.etag, ETAG) - self.assertEqual(notification.self_link, SELF_LINK) + self.assertIsInstance(notification, BucketNotification) + self.assertEqual(notification.notification_id, notification_id) + self.assertEqual(notification.etag, etag) + self.assertEqual(notification.self_link, self_link) self.assertIsNone(notification.custom_attributes) self.assertIsNone(notification.event_types) self.assertIsNone(notification.blob_name_prefix) self.assertEqual(notification.payload_format, JSON_API_V1_PAYLOAD_FORMAT) - def test_get_notification_miss(self): - from google.cloud.exceptions import NotFound - - response = NotFound("testing") - connection = _make_connection(response) - client = _Client(connection, project="my-project-123") - bucket = self._make_one(client=client, name="name") - with self.assertRaises(NotFound): - bucket.get_notification(notification_id="1") + expected_path = "/b/{}/notificationConfigs/{}".format(name, notification_id) + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) def test_delete_miss(self): from google.cloud.exceptions import NotFound @@ -1374,27 +1459,32 @@ def test_delete_blobs_miss_w_on_error(self): self.assertEqual(kw[1]["timeout"], self._get_default_timeout()) self.assertEqual(kw[1]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - def test_reload_bucket_w_metageneration_match(self): - NAME = "name" - METAGENERATION_NUMBER = 9 - - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - - bucket.reload(if_metageneration_match=METAGENERATION_NUMBER) - - self.assertEqual(len(connection._requested), 1) - req = connection._requested[0] - self.assertEqual(req["method"], "GET") - self.assertEqual(req["path"], "/b/%s" % NAME) - self.assertEqual(req["timeout"], self._get_default_timeout()) - self.assertEqual( - req["query_params"], - {"projection": "noAcl", "ifMetagenerationMatch": METAGENERATION_NUMBER}, + def test_reload_w_metageneration_match(self): + name = "name" + metageneration_number = 9 + api_response = {"name": name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=name) + + bucket.reload(if_metageneration_match=metageneration_number) + + expected_path = "/b/%s" % (name,) + expected_query_params = { + "projection": "noAcl", + "ifMetagenerationMatch": metageneration_number, + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=bucket, ) - def test_reload_bucket_w_generation_match(self): + def test_reload_w_generation_match(self): connection = _Connection({}) client = _Client(connection) bucket = self._make_one(client=client, name="name") @@ -2415,115 +2505,129 @@ def test_disable_website(self): bucket.disable_website() self.assertEqual(bucket._properties, UNSET) - def test_get_iam_policy(self): + def test_get_iam_policy_defaults(self): from google.cloud.storage.iam import STORAGE_OWNER_ROLE from google.cloud.storage.iam import STORAGE_EDITOR_ROLE from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.api_core.iam import Policy - NAME = "name" - PATH = "/b/%s" % (NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - EDITOR1 = "domain:google.com" - EDITOR2 = "user:phred@example.com" - VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" - VIEWER2 = "user:phred@example.com" - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, + bucket_name = "name" + path = "/b/%s" % (bucket_name,) + etag = "DEADBEEF" + version = 1 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + editor1 = "domain:google.com" + editor2 = "user:phred@example.com" + viewer1 = "serviceAccount:1234-abcdef@service.example.com" + viewer2 = "user:phred@example.com" + api_response = { + "resourceId": path, + "etag": etag, + "version": version, "bindings": [ - {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, - {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, - {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, + {"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [editor1, editor2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [viewer1, viewer2]}, ], } - EXPECTED = { - binding["role"]: set(binding["members"]) for binding in RETURNED["bindings"] + expected_policy = { + binding["role"]: set(binding["members"]) + for binding in api_response["bindings"] } - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client=client, name=bucket_name) - policy = bucket.get_iam_policy(timeout=42) + policy = bucket.get_iam_policy() self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED["etag"]) - self.assertEqual(policy.version, RETURNED["version"]) - self.assertEqual(dict(policy), EXPECTED) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {}) - self.assertEqual(kw[0]["timeout"], 42) + self.assertEqual(policy.etag, api_response["etag"]) + self.assertEqual(policy.version, api_response["version"]) + self.assertEqual(dict(policy), expected_policy) + + expected_path = "/b/%s/iam" % (bucket_name,) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) - def test_get_iam_policy_w_user_project(self): + def test_get_iam_policy_w_user_project_w_timeout(self): from google.api_core.iam import Policy - NAME = "name" - USER_PROJECT = "user-project-123" - PATH = "/b/%s" % (NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, + bucket_name = "name" + timeout = 42 + user_project = "user-project-123" + path = "/b/%s" % (bucket_name,) + etag = "DEADBEEF" + version = 1 + api_response = { + "resourceId": path, + "etag": etag, + "version": version, "bindings": [], } - EXPECTED = {} - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) + expected_policy = {} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one( + client=client, name=bucket_name, user_project=user_project + ) - policy = bucket.get_iam_policy() + policy = bucket.get_iam_policy(timeout=timeout) self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED["etag"]) - self.assertEqual(policy.version, RETURNED["version"]) - self.assertEqual(dict(policy), EXPECTED) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) + self.assertEqual(policy.etag, api_response["etag"]) + self.assertEqual(policy.version, api_response["version"]) + self.assertEqual(dict(policy), expected_policy) + + expected_path = "/b/%s/iam" % (bucket_name,) + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=None, + ) - def test_get_iam_policy_w_requested_policy_version(self): + def test_get_iam_policy_w_requested_policy_version_w_retry(self): from google.cloud.storage.iam import STORAGE_OWNER_ROLE - NAME = "name" - PATH = "/b/%s" % (NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, - "bindings": [{"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}], + bucket_name = "name" + path = "/b/%s" % (bucket_name,) + etag = "DEADBEEF" + version = 3 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + api_response = { + "resourceId": path, + "etag": etag, + "version": version, + "bindings": [{"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}], } - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME) + retry = mock.Mock(spec=[]) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client=client, name=bucket_name) - policy = bucket.get_iam_policy(requested_policy_version=3) + policy = bucket.get_iam_policy(requested_policy_version=3, retry=retry) - self.assertEqual(policy.version, VERSION) + self.assertEqual(policy.version, version) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"optionsRequestedPolicyVersion": 3}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) + expected_path = "/b/%s/iam" % (bucket_name,) + expected_query_params = {"optionsRequestedPolicyVersion": version} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=None, + ) def test_set_iam_policy(self): import operator @@ -2632,67 +2736,74 @@ def test_set_iam_policy_w_user_project(self): self.assertEqual(found["role"], expected["role"]) self.assertEqual(sorted(found["members"]), sorted(expected["members"])) - def test_test_iam_permissions(self): + def test_test_iam_permissions_defaults(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - NAME = "name" - PATH = "/b/%s" % (NAME,) - PERMISSIONS = [ + name = "name" + permissions = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] - ALLOWED = PERMISSIONS[1:] - RETURNED = {"permissions": ALLOWED} - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME) + expected = permissions[1:] + api_response = {"permissions": expected} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client=client, name=name) - allowed = bucket.test_iam_permissions(PERMISSIONS, timeout=42) + found = bucket.test_iam_permissions(permissions) - self.assertEqual(allowed, ALLOWED) + self.assertEqual(found, expected) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"permissions": PERMISSIONS}) - self.assertEqual(kw[0]["timeout"], 42) + expected_path = "/b/%s/iam/testPermissions" % (name,) + expected_query_params = {} + expected_query_params = {"permissions": permissions} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) - def test_test_iam_permissions_w_user_project(self): + def test_test_iam_permissions_w_user_project_w_timeout_w_retry(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - NAME = "name" - USER_PROJECT = "user-project-123" - PATH = "/b/%s" % (NAME,) - PERMISSIONS = [ + name = "name" + user_project = "user-project-123" + timeout = 42 + retry = mock.Mock(spec=[]) + permissions = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] - ALLOWED = PERMISSIONS[1:] - RETURNED = {"permissions": ALLOWED} - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) + expected = permissions[1:] + api_response = {"permissions": expected} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client=client, name=name, user_project=user_project) - allowed = bucket.test_iam_permissions(PERMISSIONS) + found = bucket.test_iam_permissions(permissions, timeout=timeout, retry=retry) - self.assertEqual(allowed, ALLOWED) + self.assertEqual(found, expected) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) - self.assertEqual( - kw[0]["query_params"], - {"permissions": PERMISSIONS, "userProject": USER_PROJECT}, + expected_path = "/b/%s/iam/testPermissions" % (name,) + expected_query_params = { + "permissions": permissions, + "userProject": user_project, + } + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=None, ) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) def test_make_public_defaults(self): from google.cloud.storage.acl import _ACLEntity @@ -2723,15 +2834,12 @@ def _make_public_w_future_helper(self, default_object_acl_loaded=True): permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] after1 = {"acl": permissive, "defaultObjectAcl": []} after2 = {"acl": permissive, "defaultObjectAcl": permissive} - if default_object_acl_loaded: - num_requests = 2 - connection = _Connection(after1, after2) - else: - num_requests = 3 - # We return the same value for default_object_acl.reload() - # to consume. - connection = _Connection(after1, after1, after2) + connection = _Connection(after1, after2) client = _Client(connection) + + # Temporary workaround until we use real mock client + client._get_resource = mock.Mock(return_value={"items": []}) + bucket = self._make_one(client=client, name=NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = default_object_acl_loaded @@ -2739,21 +2847,27 @@ def _make_public_w_future_helper(self, default_object_acl_loaded=True): self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), permissive) kw = connection._requested - self.assertEqual(len(kw), num_requests) + self.assertEqual(len(kw), 2) self.assertEqual(kw[0]["method"], "PATCH") self.assertEqual(kw[0]["path"], "/b/%s" % NAME) self.assertEqual(kw[0]["data"], {"acl": permissive}) self.assertEqual(kw[0]["query_params"], {"projection": "full"}) self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) + self.assertEqual(kw[1]["method"], "PATCH") + self.assertEqual(kw[1]["path"], "/b/%s" % NAME) + self.assertEqual(kw[1]["data"], {"defaultObjectAcl": permissive}) + self.assertEqual(kw[1]["query_params"], {"projection": "full"}) + self.assertEqual(kw[1]["timeout"], self._get_default_timeout()) + if not default_object_acl_loaded: - self.assertEqual(kw[1]["method"], "GET") - self.assertEqual(kw[1]["path"], "/b/%s/defaultObjectAcl" % NAME) - # Last could be 1 or 2 depending on `default_object_acl_loaded`. - self.assertEqual(kw[-1]["method"], "PATCH") - self.assertEqual(kw[-1]["path"], "/b/%s" % NAME) - self.assertEqual(kw[-1]["data"], {"defaultObjectAcl": permissive}) - self.assertEqual(kw[-1]["query_params"], {"projection": "full"}) - self.assertEqual(kw[-1]["timeout"], self._get_default_timeout()) + expected_path = "/b/%s/defaultObjectAcl" % (NAME,) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) def test_make_public_w_future(self): self._make_public_w_future_helper(default_object_acl_loaded=True) @@ -2871,37 +2985,40 @@ def _make_private_w_future_helper(self, default_object_acl_loaded=True): no_permissions = [] after1 = {"acl": no_permissions, "defaultObjectAcl": []} after2 = {"acl": no_permissions, "defaultObjectAcl": no_permissions} - if default_object_acl_loaded: - num_requests = 2 - connection = _Connection(after1, after2) - else: - num_requests = 3 - # We return the same value for default_object_acl.reload() - # to consume. - connection = _Connection(after1, after1, after2) + connection = _Connection(after1, after2) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = default_object_acl_loaded + + # Temporary workaround until we use real mock client + client._get_resource = mock.Mock(return_value={"items": []}) + bucket.make_private(future=True) self.assertEqual(list(bucket.acl), no_permissions) self.assertEqual(list(bucket.default_object_acl), no_permissions) kw = connection._requested - self.assertEqual(len(kw), num_requests) + self.assertEqual(len(kw), 2) self.assertEqual(kw[0]["method"], "PATCH") self.assertEqual(kw[0]["path"], "/b/%s" % NAME) self.assertEqual(kw[0]["data"], {"acl": no_permissions}) self.assertEqual(kw[0]["query_params"], {"projection": "full"}) self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) + self.assertEqual(kw[1]["method"], "PATCH") + self.assertEqual(kw[1]["path"], "/b/%s" % NAME) + self.assertEqual(kw[1]["data"], {"defaultObjectAcl": no_permissions}) + self.assertEqual(kw[1]["query_params"], {"projection": "full"}) + self.assertEqual(kw[1]["timeout"], self._get_default_timeout()) + if not default_object_acl_loaded: - self.assertEqual(kw[1]["method"], "GET") - self.assertEqual(kw[1]["path"], "/b/%s/defaultObjectAcl" % NAME) - # Last could be 1 or 2 depending on `default_object_acl_loaded`. - self.assertEqual(kw[-1]["method"], "PATCH") - self.assertEqual(kw[-1]["path"], "/b/%s" % NAME) - self.assertEqual(kw[-1]["data"], {"defaultObjectAcl": no_permissions}) - self.assertEqual(kw[-1]["query_params"], {"projection": "full"}) - self.assertEqual(kw[-1]["timeout"], self._get_default_timeout()) + expected_path = "/b/%s/defaultObjectAcl" % (NAME,) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) def test_make_private_w_future(self): self._make_private_w_future_helper(default_object_acl_loaded=True) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 00a9b4913..c6a98951e 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -409,339 +409,360 @@ def test_batch(self): self.assertIsInstance(batch, Batch) self.assertIs(batch._client, client) - def test_get_bucket_with_string_miss(self): + def test__get_resource_miss_w_defaults(self): from google.cloud.exceptions import NotFound PROJECT = "PROJECT" + PATH = "/path/to/something" CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - NONESUCH = "nonesuch" - http = _make_requests_session( - [_make_json_response({}, status=http_client.NOT_FOUND)] - ) - client._http_internal = http + client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + connection = client._base_connection = _make_connection() with self.assertRaises(NotFound): - client.get_bucket(NONESUCH, timeout=42) + client._get_resource(PATH) - http.request.assert_called_once_with( - method="GET", url=mock.ANY, data=mock.ANY, headers=mock.ANY, timeout=42 - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", NONESUCH]), + connection.api_request.assert_called_once_with( + method="GET", + path=PATH, + query_params=None, + headers=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - - def test_get_bucket_with_string_hit(self): - from google.cloud.storage.bucket import Bucket + def test__get_resource_hit_w_explicit(self): PROJECT = "PROJECT" + PATH = "/path/to/something" + QUERY_PARAMS = {"foo": "Foo"} + HEADERS = {"bar": "Bar"} + TIMEOUT = 100 + RETRY = mock.Mock(spec=[]) CREDENTIALS = _make_credentials() + client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + expected = mock.Mock(spec={}) + connection = client._base_connection = _make_connection(expected) + target = mock.Mock(spec={}) - BUCKET_NAME = "bucket-name" - data = {"name": BUCKET_NAME} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http + found = client._get_resource( + PATH, + query_params=QUERY_PARAMS, + headers=HEADERS, + timeout=TIMEOUT, + retry=RETRY, + _target_object=target, + ) - bucket = client.get_bucket(BUCKET_NAME) + self.assertIs(found, expected) - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, BUCKET_NAME) - http.request.assert_called_once_with( + connection.api_request.assert_called_once_with( method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=self._get_default_timeout(), + path=PATH, + query_params=QUERY_PARAMS, + headers=HEADERS, + timeout=TIMEOUT, + retry=RETRY, + _target_object=target, ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", BUCKET_NAME]), + + def test_get_bucket_miss_w_string_w_defaults(self): + from google.cloud.exceptions import NotFound + from google.cloud.storage.bucket import Bucket + + project = "PROJECT" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock() + client._get_resource.side_effect = NotFound("testing") + bucket_name = "nonesuch" + + with self.assertRaises(NotFound): + client.get_bucket(bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=mock.ANY, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - def test_get_bucket_with_metageneration_match(self): + target = client._get_resource.call_args[1]["_target_object"] + self.assertIsInstance(target, Bucket) + self.assertEqual(target.name, bucket_name) + + def test_get_bucket_hit_w_string_w_timeout(self): from google.cloud.storage.bucket import Bucket - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - METAGENERATION_NUMBER = 6 - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + project = "PROJECT" + bucket_name = "bucket-name" + timeout = 42 + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) - BUCKET_NAME = "bucket-name" - data = {"name": BUCKET_NAME} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http + bucket = client.get_bucket(bucket_name, timeout=timeout) - bucket = client.get_bucket( - BUCKET_NAME, if_metageneration_match=METAGENERATION_NUMBER - ) self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, BUCKET_NAME) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=self._get_default_timeout(), - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", BUCKET_NAME]), + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=bucket, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["ifMetagenerationMatch"], str(METAGENERATION_NUMBER)) - self.assertEqual(parms["projection"], "noAcl") - def test_get_bucket_with_object_miss(self): - from google.cloud.exceptions import NotFound + def test_get_bucket_hit_w_string_w_metageneration_match(self): from google.cloud.storage.bucket import Bucket project = "PROJECT" + bucket_name = "bucket-name" + metageneration_number = 6 + api_response = {"name": bucket_name} credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) - nonesuch = "nonesuch" - bucket_obj = Bucket(client, nonesuch) - http = _make_requests_session( - [_make_json_response({}, status=http_client.NOT_FOUND)] + bucket = client.get_bucket( + bucket_name, if_metageneration_match=metageneration_number ) - client._http_internal = http - with self.assertRaises(NotFound): - client.get_bucket(bucket_obj) + self.assertIsInstance(bucket, Bucket) + self.assertEqual(bucket.name, bucket_name) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = { + "projection": "noAcl", + "ifMetagenerationMatch": metageneration_number, + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=bucket, ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", nonesuch]), - ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - def test_get_bucket_with_object_hit(self): + def test_get_bucket_miss_w_object_w_retry(self): + from google.cloud.exceptions import NotFound from google.cloud.storage.bucket import Bucket project = "PROJECT" + bucket_name = "nonesuch" + retry = mock.Mock(spec=[]) credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - - bucket_name = "bucket-name" + client._get_resource = mock.Mock(side_effect=NotFound("testing")) bucket_obj = Bucket(client, bucket_name) - data = {"name": bucket_name} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http - - bucket = client.get_bucket(bucket_obj) - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, bucket_name) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, + with self.assertRaises(NotFound): + client.get_bucket(bucket_obj, retry=retry) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, timeout=self._get_default_timeout(), + retry=retry, + _target_object=mock.ANY, ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", bucket_name]), - ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - def test_get_bucket_default_retry(self): - from google.cloud.storage.bucket import Bucket - from google.cloud.storage._http import Connection + target = client._get_resource.call_args[1]["_target_object"] + self.assertIsInstance(target, Bucket) + self.assertEqual(target.name, bucket_name) - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + def test_get_bucket_hit_w_object_defaults(self): + from google.cloud.storage.bucket import Bucket + project = "PROJECT" bucket_name = "bucket-name" + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) bucket_obj = Bucket(client, bucket_name) - with mock.patch.object(Connection, "api_request") as req: - client.get_bucket(bucket_obj) + bucket = client.get_bucket(bucket_obj) - req.assert_called_once_with( - method="GET", - path=mock.ANY, - query_params=mock.ANY, - headers=mock.ANY, - _target_object=bucket_obj, - timeout=mock.ANY, + self.assertIsInstance(bucket, Bucket) + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, + _target_object=bucket, ) - def test_get_bucket_respects_retry_override(self): + def test_get_bucket_hit_w_object_w_retry_none(self): from google.cloud.storage.bucket import Bucket - from google.cloud.storage._http import Connection - - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + project = "PROJECT" bucket_name = "bucket-name" + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) bucket_obj = Bucket(client, bucket_name) - with mock.patch.object(Connection, "api_request") as req: - client.get_bucket(bucket_obj, retry=None) + bucket = client.get_bucket(bucket_obj, retry=None) - req.assert_called_once_with( - method="GET", - path=mock.ANY, - query_params=mock.ANY, - headers=mock.ANY, - _target_object=bucket_obj, - timeout=mock.ANY, + self.assertIsInstance(bucket, Bucket) + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), retry=None, + _target_object=bucket, ) - def test_lookup_bucket_miss(self): - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + def test_lookup_bucket_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + from google.cloud.storage.bucket import Bucket - NONESUCH = "nonesuch" - http = _make_requests_session( - [_make_json_response({}, status=http_client.NOT_FOUND)] - ) - client._http_internal = http + project = "PROJECT" + bucket_name = "nonesuch" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(side_effect=NotFound("testing")) - bucket = client.lookup_bucket(NONESUCH, timeout=42) + bucket = client.lookup_bucket(bucket_name) self.assertIsNone(bucket) - http.request.assert_called_once_with( - method="GET", url=mock.ANY, data=mock.ANY, headers=mock.ANY, timeout=42 - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", NONESUCH]), + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=mock.ANY, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - def test_lookup_bucket_hit(self): - from google.cloud.storage.bucket import Bucket + target = client._get_resource.call_args[1]["_target_object"] + self.assertIsInstance(target, Bucket) + self.assertEqual(target.name, bucket_name) - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + def test_lookup_bucket_hit_w_timeout(self): + from google.cloud.storage.bucket import Bucket - BUCKET_NAME = "bucket-name" - data = {"name": BUCKET_NAME} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http + project = "PROJECT" + bucket_name = "bucket-name" + timeout = 42 + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) - bucket = client.lookup_bucket(BUCKET_NAME) + bucket = client.lookup_bucket(bucket_name, timeout=timeout) self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, BUCKET_NAME) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=self._get_default_timeout(), - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", BUCKET_NAME]), + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=bucket, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - def test_lookup_bucket_with_metageneration_match(self): + def test_lookup_bucket_hit_w_metageneration_match(self): from google.cloud.storage.bucket import Bucket - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - METAGENERATION_NUMBER = 6 - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - - BUCKET_NAME = "bucket-name" - data = {"name": BUCKET_NAME} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http + project = "PROJECT" + bucket_name = "bucket-name" + api_response = {"name": bucket_name} + credentials = _make_credentials() + metageneration_number = 6 + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) bucket = client.lookup_bucket( - BUCKET_NAME, if_metageneration_match=METAGENERATION_NUMBER + bucket_name, if_metageneration_match=metageneration_number ) + self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, BUCKET_NAME) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = { + "projection": "noAcl", + "ifMetagenerationMatch": metageneration_number, + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=bucket, ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", BUCKET_NAME]), - ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - self.assertEqual(parms["ifMetagenerationMatch"], str(METAGENERATION_NUMBER)) - def test_lookup_bucket_default_retry(self): + def test_lookup_bucket_hit_w_retry(self): from google.cloud.storage.bucket import Bucket - from google.cloud.storage._http import Connection - - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + project = "PROJECT" bucket_name = "bucket-name" + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) bucket_obj = Bucket(client, bucket_name) - with mock.patch.object(Connection, "api_request") as req: - client.lookup_bucket(bucket_obj) - req.assert_called_once_with( - method="GET", - path=mock.ANY, - query_params=mock.ANY, - headers=mock.ANY, - _target_object=bucket_obj, - timeout=mock.ANY, - retry=DEFAULT_RETRY, - ) + bucket = client.lookup_bucket(bucket_obj, retry=None) + + self.assertIsInstance(bucket, Bucket) + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=None, + _target_object=bucket, + ) def test_create_bucket_w_missing_client_project(self): credentials = _make_credentials() diff --git a/tests/unit/test_hmac_key.py b/tests/unit/test_hmac_key.py index 5761f4a96..06b3a65c5 100644 --- a/tests/unit/test_hmac_key.py +++ b/tests/unit/test_hmac_key.py @@ -218,31 +218,29 @@ def test_path_w_access_id_w_explicit_project(self): expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) self.assertEqual(metadata.path, expected_path) - def test_exists_miss_no_project_set(self): + def test_exists_miss_w_defaults(self): from google.cloud.exceptions import NotFound access_id = "ACCESS-ID" - connection = mock.Mock(spec=["api_request"]) - connection.api_request.side_effect = NotFound("testing") - client = _Client(connection) + project = "PROJECT" + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.side_effect = NotFound("testing") + client.project = project metadata = self._make_one(client) metadata._properties["accessId"] = access_id - self.assertFalse(metadata.exists(timeout=42)) + self.assertFalse(metadata.exists()) - expected_path = "/projects/{}/hmacKeys/{}".format( - client.DEFAULT_PROJECT, access_id + expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) - expected_kwargs = { - "method": "GET", - "path": expected_path, - "query_params": {}, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) - def test_exists_hit_w_project_set(self): + def test_exists_hit_w_explicit_w_user_project(self): project = "PROJECT-ID" access_id = "ACCESS-ID" user_project = "billed-project" @@ -252,49 +250,47 @@ def test_exists_hit_w_project_set(self): "accessId": access_id, "serviceAccountEmail": email, } - connection = mock.Mock(spec=["api_request"]) - connection.api_request.return_value = resource - client = _Client(connection) + timeout = 42 + retry = mock.Mock(spec=[]) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = resource metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project - self.assertTrue(metadata.exists()) + self.assertTrue(metadata.exists(timeout=timeout, retry=retry)) expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) - expected_kwargs = { - "method": "GET", - "path": expected_path, - "query_params": {"userProject": user_project}, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) - def test_reload_miss_no_project_set(self): + def test_reload_miss_w_defaults(self): from google.cloud.exceptions import NotFound access_id = "ACCESS-ID" - connection = mock.Mock(spec=["api_request"]) - connection.api_request.side_effect = NotFound("testing") - client = _Client(connection) + project = "PROJECT" + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.side_effect = NotFound("testing") + client.project = project metadata = self._make_one(client) metadata._properties["accessId"] = access_id with self.assertRaises(NotFound): - metadata.reload(timeout=42) + metadata.reload() - expected_path = "/projects/{}/hmacKeys/{}".format( - client.DEFAULT_PROJECT, access_id + expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) - expected_kwargs = { - "method": "GET", - "path": expected_path, - "query_params": {}, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) def test_reload_hit_w_project_set(self): project = "PROJECT-ID" @@ -306,26 +302,26 @@ def test_reload_hit_w_project_set(self): "accessId": access_id, "serviceAccountEmail": email, } - connection = mock.Mock(spec=["api_request"]) - connection.api_request.return_value = resource - client = _Client(connection) + timeout = 42 + retry = mock.Mock(spec=[]) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = resource metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project - metadata.reload() + metadata.reload(timeout=timeout, retry=retry) self.assertEqual(metadata._properties, resource) expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) - expected_kwargs = { - "method": "GET", - "path": expected_path, - "query_params": {"userProject": user_project}, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) def test_update_miss_no_project_set(self): from google.cloud.exceptions import NotFound diff --git a/tests/unit/test_notification.py b/tests/unit/test_notification.py index 7ecabfa3a..e8cee0478 100644 --- a/tests/unit/test_notification.py +++ b/tests/unit/test_notification.py @@ -327,104 +327,118 @@ def test_create_w_explicit_client(self): ) def test_exists_wo_notification_id(self): - client = self._make_client() + client = mock.Mock(spec=["_get_resource", "project"]) + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) with self.assertRaises(ValueError): notification.exists() - def test_exists_miss(self): + client._get_resource.assert_not_called() + + def test_exists_miss_w_defaults(self): from google.cloud.exceptions import NotFound - client = self._make_client() + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.side_effect = NotFound("testing") + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.side_effect = NotFound("testing") - self.assertFalse(notification.exists(timeout=42)) + self.assertFalse(notification.exists()) - api_request.assert_called_once_with( - method="GET", - path=self.NOTIFICATION_PATH, - query_params={}, - timeout=42, + expected_query_params = {} + client._get_resource.assert_called_once_with( + self.NOTIFICATION_PATH, + query_params=expected_query_params, + timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, ) - def test_exists_hit(self): - USER_PROJECT = "user-project-123" - client = self._make_client() - bucket = self._make_bucket(client, user_project=USER_PROJECT) - notification = self._make_one(bucket, self.TOPIC_NAME) - notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.return_value = { + def test_exists_hit_w_explicit_w_user_project(self): + user_project = "user-project-123" + api_response = { "topic": self.TOPIC_REF, "id": self.NOTIFICATION_ID, "etag": self.ETAG, "selfLink": self.SELF_LINK, } + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.return_vale = api_response + client.project = self.BUCKET_PROJECT + bucket = self._make_bucket(client, user_project=user_project) + notification = self._make_one(bucket, self.TOPIC_NAME) + notification._properties["id"] = self.NOTIFICATION_ID + timeout = 42 + retry = mock.Mock(spec=[]) - self.assertTrue(notification.exists(client=client)) + self.assertTrue( + notification.exists(client=client, timeout=timeout, retry=retry) + ) - api_request.assert_called_once_with( - method="GET", - path=self.NOTIFICATION_PATH, - query_params={"userProject": USER_PROJECT}, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + self.NOTIFICATION_PATH, + query_params=expected_query_params, + timeout=timeout, + retry=retry, ) def test_reload_wo_notification_id(self): - client = self._make_client() + client = mock.Mock(spec=["_get_resource", "project"]) + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) with self.assertRaises(ValueError): notification.reload() - def test_reload_miss(self): + client._get_resource.assert_not_called() + + def test_reload_miss_w_defaults(self): from google.cloud.exceptions import NotFound - client = self._make_client() + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.side_effect = NotFound("testing") + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.side_effect = NotFound("testing") with self.assertRaises(NotFound): - notification.reload(timeout=42) + notification.reload() - api_request.assert_called_once_with( - method="GET", - path=self.NOTIFICATION_PATH, - query_params={}, - timeout=42, + expected_query_params = {} + client._get_resource.assert_called_once_with( + self.NOTIFICATION_PATH, + query_params=expected_query_params, + timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, ) - def test_reload_hit(self): + def test_reload_hit_w_explicit_w_user_project(self): from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT - USER_PROJECT = "user-project-123" - client = self._make_client() - bucket = self._make_bucket(client, user_project=USER_PROJECT) - notification = self._make_one(bucket, self.TOPIC_NAME) - notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.return_value = { + user_project = "user-project-123" + api_response = { "topic": self.TOPIC_REF, "id": self.NOTIFICATION_ID, "etag": self.ETAG, "selfLink": self.SELF_LINK, "payload_format": NONE_PAYLOAD_FORMAT, } + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.return_value = api_response + client.project = self.BUCKET_PROJECT + bucket = self._make_bucket(client, user_project=user_project) + notification = self._make_one(bucket, self.TOPIC_NAME) + notification._properties["id"] = self.NOTIFICATION_ID + timeout = 42 + retry = mock.Mock(spec=[]) - notification.reload(client=client) + notification.reload(client=client, timeout=timeout, retry=retry) self.assertEqual(notification.etag, self.ETAG) self.assertEqual(notification.self_link, self.SELF_LINK) @@ -433,12 +447,12 @@ def test_reload_hit(self): self.assertIsNone(notification.blob_name_prefix) self.assertEqual(notification.payload_format, NONE_PAYLOAD_FORMAT) - api_request.assert_called_once_with( - method="GET", - path=self.NOTIFICATION_PATH, - query_params={"userProject": USER_PROJECT}, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + self.NOTIFICATION_PATH, + query_params=expected_query_params, + timeout=timeout, + retry=retry, ) def test_delete_wo_notification_id(self): From e96f8528fe03555b4fd92fcc27ee3311ba649812 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 7 Jun 2021 12:01:04 -0400 Subject: [PATCH 15/30] refactor: add / use 'Client._patch_resource' method (#436) Add retry support to 'ACL.save', 'ACL.save_predefined', and 'ACL.clear'. Add 'timeout' and 'retry' support to 'Blob.make_private'. Toward #38. --- google/cloud/storage/_helpers.py | 7 +- google/cloud/storage/acl.py | 70 ++++- google/cloud/storage/blob.py | 56 +++- google/cloud/storage/bucket.py | 12 +- google/cloud/storage/client.py | 71 +++++ tests/unit/test__helpers.py | 160 +++++------ tests/unit/test_acl.py | 385 ++++++++++++++------------ tests/unit/test_blob.py | 114 ++++++-- tests/unit/test_bucket.py | 447 ++++++++++++++++++------------- tests/unit/test_client.py | 111 ++++++-- 10 files changed, 936 insertions(+), 497 deletions(-) diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py index a126c9e02..858f4bccb 100644 --- a/google/cloud/storage/_helpers.py +++ b/google/cloud/storage/_helpers.py @@ -331,10 +331,9 @@ def patch( update_properties = {key: self._properties[key] for key in self._changes} # Make the API call. - api_response = client._connection.api_request( - method="PATCH", - path=self.path, - data=update_properties, + api_response = client._patch_resource( + self.path, + update_properties, query_params=query_params, _target_object=self, timeout=timeout, diff --git a/google/cloud/storage/acl.py b/google/cloud/storage/acl.py index b80447b9d..ec6c5bed9 100644 --- a/google/cloud/storage/acl.py +++ b/google/cloud/storage/acl.py @@ -474,7 +474,9 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): for entry in found.get("items", ()): self.add_entity(self.entity_from_dict(entry)) - def _save(self, acl, predefined, client, timeout=_DEFAULT_TIMEOUT): + def _save( + self, acl, predefined, client, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, + ): """Helper for :meth:`save` and :meth:`save_predefined`. :type acl: :class:`google.cloud.storage.acl.ACL`, or a compatible list. @@ -495,8 +497,19 @@ def _save(self, acl, predefined, client, timeout=_DEFAULT_TIMEOUT): Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + + :type retry: :class:`~google.api_core.retry.Retry` + :param retry: (Optional) How to retry the RPC. + + A None value will disable retries. + + A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors + and configure backoff and timeout options. """ + client = self._require_client(client) query_params = {"projection": "full"} + if predefined is not None: acl = [] query_params[self._PREDEFINED_QUERY_PARAM] = predefined @@ -505,21 +518,25 @@ def _save(self, acl, predefined, client, timeout=_DEFAULT_TIMEOUT): query_params["userProject"] = self.user_project path = self.save_path - client = self._require_client(client) - result = client._connection.api_request( - method="PATCH", - path=path, - data={self._URL_PATH_ELEM: list(acl)}, + result = client._patch_resource( + path, + {self._URL_PATH_ELEM: list(acl)}, query_params=query_params, timeout=timeout, + retry=retry, ) + self.entities.clear() + for entry in result.get(self._URL_PATH_ELEM, ()): self.add_entity(self.entity_from_dict(entry)) + self.loaded = True - def save(self, acl=None, client=None, timeout=_DEFAULT_TIMEOUT): + def save( + self, acl=None, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY + ): """Save this ACL for the current bucket. If :attr:`user_project` is set, bills the API request to that project. @@ -538,6 +555,15 @@ def save(self, acl=None, client=None, timeout=_DEFAULT_TIMEOUT): Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + + :type retry: :class:`~google.api_core.retry.Retry` + :param retry: (Optional) How to retry the RPC. + + A None value will disable retries. + + A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors + and configure backoff and timeout options. """ if acl is None: acl = self @@ -546,9 +572,11 @@ def save(self, acl=None, client=None, timeout=_DEFAULT_TIMEOUT): save_to_backend = True if save_to_backend: - self._save(acl, None, client, timeout=timeout) + self._save(acl, None, client, timeout=timeout, retry=retry) - def save_predefined(self, predefined, client=None, timeout=_DEFAULT_TIMEOUT): + def save_predefined( + self, predefined, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, + ): """Save this ACL for the current bucket using a predefined ACL. If :attr:`user_project` is set, bills the API request to that project. @@ -570,11 +598,20 @@ def save_predefined(self, predefined, client=None, timeout=_DEFAULT_TIMEOUT): Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + + :type retry: :class:`~google.api_core.retry.Retry` + :param retry: (Optional) How to retry the RPC. + + A None value will disable retries. + + A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors + and configure backoff and timeout options. """ predefined = self.validate_predefined(predefined) - self._save(None, predefined, client, timeout=timeout) + self._save(None, predefined, client, timeout=timeout, retry=retry) - def clear(self, client=None, timeout=_DEFAULT_TIMEOUT): + def clear(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): """Remove all ACL entries. If :attr:`user_project` is set, bills the API request to that project. @@ -594,8 +631,17 @@ def clear(self, client=None, timeout=_DEFAULT_TIMEOUT): Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + + :type retry: :class:`~google.api_core.retry.Retry` + :param retry: (Optional) How to retry the RPC. + + A None value will disable retries. + + A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors + and configure backoff and timeout options. """ - self.save([], client=client, timeout=timeout) + self.save([], client=client, timeout=timeout, retry=retry) class BucketACL(ACL): diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index a105053de..0679c0222 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -2978,27 +2978,75 @@ def test_iam_permissions( return resp.get("permissions", []) - def make_public(self, client=None): + def make_public( + self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, + ): """Update blob's ACL, granting read access to anonymous users. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. + + :type timeout: float or tuple + :param timeout: (Optional) The amount of time, in seconds, to wait + for the server response. The timeout applies to each underlying + request. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. """ self.acl.all().grant_read() - self.acl.save(client=client) + self.acl.save(client=client, timeout=timeout, retry=retry) - def make_private(self, client=None): + def make_private( + self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, + ): """Update blob's ACL, revoking read access for anonymous users. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. + + :type timeout: float or tuple + :param timeout: (Optional) The amount of time, in seconds, to wait + for the server response. The timeout applies to each underlying + request. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. """ self.acl.all().revoke_read() - self.acl.save(client=client) + self.acl.save(client=client, timeout=timeout, retry=retry) def compose( self, diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index aacc2efe5..1407ffd88 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -1065,9 +1065,9 @@ def patch( # Call the superclass method. super(Bucket, self).patch( client=client, - timeout=timeout, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, retry=retry, ) @@ -1993,7 +1993,7 @@ def copy_blob( ) if not preserve_acl: - new_blob.acl.save(acl={}, client=client, timeout=timeout) + new_blob.acl.save(acl={}, client=client, timeout=timeout, retry=retry) new_blob._set_properties(copy_result) return new_blob @@ -3068,7 +3068,7 @@ def make_public( for each blob. """ self.acl.all().grant_read() - self.acl.save(client=client, timeout=timeout) + self.acl.save(client=client, timeout=timeout, retry=retry) if future: doa = self.default_object_acl @@ -3099,7 +3099,7 @@ def make_public( for blob in blobs: blob.acl.all().grant_read() - blob.acl.save(client=client, timeout=timeout) + blob.acl.save(client=client, timeout=timeout, retry=retry) def make_private( self, @@ -3155,7 +3155,7 @@ def make_private( for each blob. """ self.acl.all().revoke_read() - self.acl.save(client=client, timeout=timeout) + self.acl.save(client=client, timeout=timeout, retry=retry) if future: doa = self.default_object_acl @@ -3186,7 +3186,7 @@ def make_private( for blob in blobs: blob.acl.all().revoke_read() - blob.acl.save(client=client, timeout=timeout) + blob.acl.save(client=client, timeout=timeout, retry=retry) def generate_upload_policy(self, conditions, expiration=None, client=None): """Create a signed upload policy for uploading objects. diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 848012725..db81ac292 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -386,6 +386,77 @@ def _get_resource( _target_object=_target_object, ) + def _patch_resource( + self, + path, + data, + query_params=None, + headers=None, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, + _target_object=None, + ): + """Helper for bucket / blob methods making API 'PATCH' calls. + + Args: + path str: + The path of the resource to fetch. + + data dict: + The data to be patched. + + query_params Optional[dict]: + HTTP query parameters to be passed + + headers Optional[dict]: + HTTP headers to be passed + + timeout (Optional[Union[float, Tuple[float, float]]]): + The amount of time, in seconds, to wait for the server response. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + retry (Optional[Union[google.api_core.retry.Retry, google.cloud.storage.retry.ConditionalRetryPolicy]]): + How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. + + _target_object (Union[ \ + :class:`~google.cloud.storage.bucket.Bucket`, \ + :class:`~google.cloud.storage.bucket.blob`, \ + ]): + Object to which future data is to be applied -- only relevant + in the context of a batch. + + Returns: + dict + The JSON resource fetched + + Raises: + google.cloud.exceptions.NotFound + If the bucket is not found. + """ + return self._connection.api_request( + method="PATCH", + path=path, + data=data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=_target_object, + ) + def get_bucket( self, bucket_or_name, diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index f1c6b0436..143e6d239 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -245,106 +245,110 @@ def test__patch_property(self): derived._patch_property("foo", "Foo") self.assertEqual(derived._properties, {"foo": "Foo"}) - def test_patch(self): - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + def test_patch_w_defaults(self): + path = "/path" + api_response = {"foo": "Foo"} + derived = self._derivedClass(path)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Ignore baz. - derived.patch(client=client, timeout=42) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/path", - "query_params": {"projection": "full"}, - # Since changes does not include `baz`, we don't see it sent. - "data": {"bar": BAR}, - "_target_object": derived, - "timeout": 42, - "retry": DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, - }, - ) + client = derived.client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + + derived.patch() + + self.assertEqual(derived._properties, api_response) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) - def test_patch_with_metageneration_match(self): - GENERATION_NUMBER = 9 - METAGENERATION_NUMBER = 6 + expected_data = {"bar": bar} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=derived, + ) - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + def test_patch_w_metageneration_match_w_timeout_w_retry(self): + path = "/path" + api_response = {"foo": "Foo"} + derived = self._derivedClass(path)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Ignore baz. + client = derived.client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + timeout = 42 + retry = mock.Mock(spec=[]) + generation_number = 9 + metageneration_number = 6 + derived.patch( - client=client, - timeout=42, - if_generation_match=GENERATION_NUMBER, - if_metageneration_match=METAGENERATION_NUMBER, + if_generation_match=generation_number, + if_metageneration_match=metageneration_number, + timeout=timeout, + retry=retry, ) + self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/path", - "query_params": { - "projection": "full", - "ifGenerationMatch": GENERATION_NUMBER, - "ifMetagenerationMatch": METAGENERATION_NUMBER, - }, - # Since changes does not include `baz`, we don't see it sent. - "data": {"bar": BAR}, - "_target_object": derived, - "timeout": 42, - "retry": DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, - }, - ) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) - def test_patch_w_user_project(self): + expected_data = {"bar": bar} + expected_query_params = { + "projection": "full", + "ifGenerationMatch": generation_number, + "ifMetagenerationMatch": metageneration_number, + } + client._patch_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=derived, + ) + + def test_patch_w_user_project_w_explicit_client(self): + path = "/path" user_project = "user-project-123" - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path", user_project)() + api_response = {"foo": "Foo"} + derived = self._derivedClass(path, user_project)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Ignore baz. + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + derived.patch(client=client) + self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/path", - "query_params": {"projection": "full", "userProject": user_project}, - # Since changes does not include `baz`, we don't see it sent. - "data": {"bar": BAR}, - "_target_object": derived, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, - }, - ) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) + expected_data = {"bar": bar} + expected_query_params = { + "projection": "full", + "userProject": user_project, + } + client._patch_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=derived, + ) + def test_update(self): connection = _Connection({"foo": "Foo"}) client = _Client(connection) diff --git a/tests/unit/test_acl.py b/tests/unit/test_acl.py index 7cea5c8fc..a31b90840 100644 --- a/tests/unit/test_acl.py +++ b/tests/unit/test_acl.py @@ -613,195 +613,269 @@ def test_reload_w_nonempty_result_w_user_project(self): ) def test_save_none_set_none_passed(self): - connection = _Connection() - client = _Client(connection) + save_path = "/testing" + client = mock.Mock(spec=["_patch_resource"]) acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path + acl.save(client=client) - kw = connection._requested - self.assertEqual(len(kw), 0) - def test_save_existing_missing_none_passed(self): - connection = _Connection({}) - client = _Client(connection) - acl = self._make_one() - acl.save_path = "/testing" + client._patch_resource.assert_not_called() + + def test_save_w_empty_response_w_defaults(self): + class Derived(self._get_target_class()): + client = None + + save_path = "/testing" + api_response = {} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + acl = Derived() + acl.client = client + acl.save_path = save_path acl.loaded = True - acl.save(client=client, timeout=42) + + acl.save() + self.assertEqual(list(acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/testing") - self.assertEqual(kw[0]["data"], {"acl": []}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], 42) - - def test_save_no_acl(self): - ROLE = "role" - AFTER = [{"entity": "allUsers", "role": ROLE}] - connection = _Connection({"acl": AFTER}) - client = _Client(connection) + + expected_data = {"acl": []} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + def test_save_no_acl_w_timeout(self): + save_path = "/testing" + role = "role" + expected_acl = [{"entity": "allUsers", "role": role}] + api_response = {"acl": expected_acl} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path acl.loaded = True - acl.entity("allUsers").grant(ROLE) - acl.save(client=client) - self.assertEqual(list(acl), AFTER) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/testing") - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": {"projection": "full"}, - "data": {"acl": AFTER}, - "timeout": self._get_default_timeout(), - }, + acl.entity("allUsers").grant(role) + timeout = 42 + + acl.save(client=client, timeout=timeout) + + self.assertEqual(list(acl), expected_acl) + + expected_data = api_response + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY, ) - def test_save_w_acl_w_user_project(self): - ROLE1 = "role1" - ROLE2 = "role2" - STICKY = {"entity": "allUsers", "role": ROLE2} - USER_PROJECT = "user-project-123" - new_acl = [{"entity": "allUsers", "role": ROLE1}] - connection = _Connection({"acl": [STICKY] + new_acl}) - client = _Client(connection) + def test_save_w_acl_w_user_project_w_retry(self): + save_path = "/testing" + user_project = "user-project-123" + role1 = "role1" + role2 = "role2" + sticky = {"entity": "allUsers", "role": role2} + new_acl = [{"entity": "allUsers", "role": role1}] + api_response = {"acl": [sticky] + new_acl} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path acl.loaded = True - acl.user_project = USER_PROJECT + acl.user_project = user_project + retry = mock.Mock(spec=[]) - acl.save(new_acl, client=client) + acl.save(new_acl, client=client, retry=retry) entries = list(acl) self.assertEqual(len(entries), 2) - self.assertTrue(STICKY in entries) + self.assertTrue(sticky in entries) self.assertTrue(new_acl[0] in entries) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": {"projection": "full", "userProject": USER_PROJECT}, - "data": {"acl": new_acl}, - "timeout": self._get_default_timeout(), - }, + + expected_data = {"acl": new_acl} + expected_query_params = {"projection": "full", "userProject": user_project} + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, ) def test_save_prefefined_invalid(self): - connection = _Connection() - client = _Client(connection) + save_path = "/testing" + client = mock.Mock(spec=["_patch_resource"]) acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path acl.loaded = True + with self.assertRaises(ValueError): acl.save_predefined("bogus", client=client) - def test_save_predefined_valid(self): - PREDEFINED = "private" - connection = _Connection({"acl": []}) - client = _Client(connection) - acl = self._make_one() - acl.save_path = "/testing" + client._patch_resource.assert_not_called() + + def test_save_predefined_w_defaults(self): + class Derived(self._get_target_class()): + client = None + + save_path = "/testing" + predefined = "private" + api_response = {"acl": []} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + acl = Derived() + acl.save_path = save_path acl.loaded = True - acl.save_predefined(PREDEFINED, client=client, timeout=42) + acl.client = client + + acl.save_predefined(predefined) + entries = list(acl) self.assertEqual(len(entries), 0) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": {"projection": "full", "predefinedAcl": PREDEFINED}, - "data": {"acl": []}, - "timeout": 42, - }, + + expected_data = {"acl": []} + expected_query_params = { + "projection": "full", + "predefinedAcl": predefined, + } + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) - def test_save_predefined_w_XML_alias(self): - PREDEFINED_XML = "project-private" - PREDEFINED_JSON = "projectPrivate" - connection = _Connection({"acl": []}) - client = _Client(connection) + def test_save_predefined_w_XML_alias_w_timeout(self): + save_path = "/testing" + predefined_xml = "project-private" + predefined_json = "projectPrivate" + api_response = {"acl": []} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path acl.loaded = True - acl.save_predefined(PREDEFINED_XML, client=client) + timeout = 42 + + acl.save_predefined(predefined_xml, client=client, timeout=timeout) + entries = list(acl) self.assertEqual(len(entries), 0) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": { - "projection": "full", - "predefinedAcl": PREDEFINED_JSON, - }, - "data": {"acl": []}, - "timeout": self._get_default_timeout(), - }, + + expected_data = {"acl": []} + expected_query_params = { + "projection": "full", + "predefinedAcl": predefined_json, + } + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY, ) - def test_save_predefined_valid_w_alternate_query_param(self): + def test_save_predefined_w_alternate_query_param_w_retry(self): # Cover case where subclass overrides _PREDEFINED_QUERY_PARAM - PREDEFINED = "publicRead" - connection = _Connection({"acl": []}) - client = _Client(connection) + save_path = "/testing" + predefined = "publicRead" + api_response = {"acl": []} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path acl.loaded = True acl._PREDEFINED_QUERY_PARAM = "alternate" - acl.save_predefined(PREDEFINED, client=client) + retry = mock.Mock(spec=[]) + + acl.save_predefined(predefined, client=client, retry=retry) + entries = list(acl) self.assertEqual(len(entries), 0) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": {"projection": "full", "alternate": PREDEFINED}, - "data": {"acl": []}, - "timeout": self._get_default_timeout(), - }, + + expected_data = {"acl": []} + expected_query_params = { + "projection": "full", + "alternate": predefined, + } + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, ) - def test_clear(self): - ROLE1 = "role1" - ROLE2 = "role2" - STICKY = {"entity": "allUsers", "role": ROLE2} - connection = _Connection({"acl": [STICKY]}) - client = _Client(connection) - acl = self._make_one() - acl.save_path = "/testing" - acl.loaded = True - acl.entity("allUsers", ROLE1) - acl.clear(client=client, timeout=42) - self.assertEqual(list(acl), [STICKY]) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": {"projection": "full"}, - "data": {"acl": []}, - "timeout": 42, - }, + def test_clear_w_defaults(self): + class Derived(self._get_target_class()): + client = None + + save_path = "/testing" + role1 = "role1" + role2 = "role2" + sticky = {"entity": "allUsers", "role": role2} + api_response = {"acl": [sticky]} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + acl = Derived() + acl.client = client + acl.save_path = save_path + acl.loaded = True + acl.entity("allUsers", role1) + + acl.clear() + + self.assertEqual(list(acl), [sticky]) + + expected_data = {"acl": []} + expected_query_params = { + "projection": "full", + } + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + def test_clear_w_explicit_client_w_timeout_w_retry(self): + save_path = "/testing" + role1 = "role1" + role2 = "role2" + sticky = {"entity": "allUsers", "role": role2} + api_response = {"acl": [sticky]} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + acl = self._make_one() + acl.save_path = save_path + acl.loaded = True + acl.entity("allUsers", role1) + timeout = 42 + retry = mock.Mock(spec=[]) + + acl.clear(client=client, timeout=timeout, retry=retry) + + self.assertEqual(list(acl), [sticky]) + + expected_data = {"acl": []} + expected_query_params = { + "projection": "full", + } + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, ) @@ -913,22 +987,3 @@ def __init__(self, name): @property def path(self): return "/b/%s" % self.name - - -class _Connection(object): - _delete_ok = False - - def __init__(self, *responses): - self._responses = responses - self._requested = [] - self._deleted = [] - - def api_request(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response - - -class _Client(object): - def __init__(self, connection): - self._connection = connection diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 6d8fc7b33..a24aaf36c 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -3418,43 +3418,109 @@ def test_test_iam_permissions_w_user_project_w_timeout_w_retry(self): _target_object=None, ) - def test_make_public(self): + def test_make_public_w_defaults(self): from google.cloud.storage.acl import _ACLEntity - BLOB_NAME = "blob-name" + blob_name = "blob-name" permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - after = ({"status": http_client.OK}, {"acl": permissive}) - connection = _Connection(after) - client = _Client(connection) + api_response = {"acl": permissive} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) blob.acl.loaded = True + blob.make_public() + self.assertEqual(list(blob.acl), permissive) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/name/o/%s" % BLOB_NAME) - self.assertEqual(kw[0]["data"], {"acl": permissive}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - def test_make_private(self): - BLOB_NAME = "blob-name" + expected_patch_data = {"acl": permissive} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + blob.path, + expected_patch_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + def test_make_public_w_timeout_w_retry(self): + from google.cloud.storage.acl import _ACLEntity + + blob_name = "blob-name" + permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] + api_response = {"acl": permissive} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + bucket = _Bucket(client=client) + blob = self._make_one(blob_name, bucket=bucket) + blob.acl.loaded = True + timeout = 42 + retry = mock.Mock(spec=[]) + + blob.make_public(timeout=timeout, retry=retry) + + self.assertEqual(list(blob.acl), permissive) + + expected_patch_data = {"acl": permissive} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + blob.path, + expected_patch_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) + + def test_make_private_w_defaults(self): + blob_name = "blob-name" no_permissions = [] - after = ({"status": http_client.OK}, {"acl": no_permissions}) - connection = _Connection(after) - client = _Client(connection) + api_response = {"acl": no_permissions} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) blob.acl.loaded = True + blob.make_private() + self.assertEqual(list(blob.acl), no_permissions) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/name/o/%s" % BLOB_NAME) - self.assertEqual(kw[0]["data"], {"acl": no_permissions}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) + + expected_patch_data = {"acl": no_permissions} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + blob.path, + expected_patch_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + def test_make_private_w_timeout_w_retry(self): + blob_name = "blob-name" + no_permissions = [] + api_response = {"acl": no_permissions} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + bucket = _Bucket(client=client) + blob = self._make_one(blob_name, bucket=bucket) + blob.acl.loaded = True + timeout = 42 + retry = mock.Mock(spec=[]) + + blob.make_private(timeout=timeout, retry=retry) + + self.assertEqual(list(blob.acl), no_permissions) + + expected_patch_data = {"acl": no_permissions} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + blob.path, + expected_patch_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) def test_compose_wo_content_type_set(self): SOURCE_1 = "source-1" diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 5ff2209c7..fa8f85f98 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -1623,41 +1623,49 @@ def test_copy_blobs_w_generation_match(self): def test_copy_blobs_preserve_acl(self): from google.cloud.storage.acl import ObjectACL - SOURCE = "source" - DEST = "dest" - BLOB_NAME = "blob-name" - NEW_NAME = "new_name" + source_name = "source" + dest_name = "dest" + blob_name = "blob-name" + new_name = "new_name" connection = _Connection({}, {}) client = _Client(connection) - source = self._make_one(client=client, name=SOURCE) - dest = self._make_one(client=client, name=DEST) - blob = self._make_blob(SOURCE, BLOB_NAME) + + # Temporary, until we get a real client in place. + client._patch_resource = mock.Mock(return_value={}) + + source = self._make_one(client=client, name=source_name) + dest = self._make_one(client=client, name=dest_name) + blob = self._make_blob(source_name, blob_name) new_blob = source.copy_blob( - blob, dest, NEW_NAME, client=client, preserve_acl=False + blob, dest, new_name, client=client, preserve_acl=False ) self.assertIs(new_blob.bucket, dest) - self.assertEqual(new_blob.name, NEW_NAME) + self.assertEqual(new_blob.name, new_name) self.assertIsInstance(new_blob.acl, ObjectACL) - kw1, kw2 = connection._requested - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - SOURCE, BLOB_NAME, DEST, NEW_NAME + (kw1,) = connection._requested + copy_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + source_name, blob_name, dest_name, new_name ) - NEW_BLOB_PATH = "/b/{}/o/{}".format(DEST, NEW_NAME) - self.assertEqual(kw1["method"], "POST") - self.assertEqual(kw1["path"], COPY_PATH) + self.assertEqual(kw1["path"], copy_path) self.assertEqual(kw1["query_params"], {}) self.assertEqual(kw1["timeout"], self._get_default_timeout()) self.assertEqual(kw1["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(kw2["method"], "PATCH") - self.assertEqual(kw2["path"], NEW_BLOB_PATH) - self.assertEqual(kw2["query_params"], {"projection": "full"}) - self.assertEqual(kw2["timeout"], self._get_default_timeout()) + expected_patch_path = "/b/{}/o/{}".format(dest_name, new_name) + expected_data = {"acl": []} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + expected_patch_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) def test_copy_blobs_w_name_and_user_project(self): SOURCE = "source" @@ -2102,23 +2110,39 @@ def test_labels_setter_with_removal(self): self.assertEqual(bucket.labels, {"color": "red"}) # Make sure that a patch call correctly removes the flavor label. - client = mock.NonCallableMock(spec=("_connection",)) - client._connection = mock.NonCallableMock(spec=("api_request",)) + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = {} + bucket.patch(client=client) - client._connection.api_request.assert_called() - _, _, kwargs = client._connection.api_request.mock_calls[0] - self.assertEqual(len(kwargs["data"]["labels"]), 2) - self.assertEqual(kwargs["data"]["labels"]["color"], "red") - self.assertIsNone(kwargs["data"]["labels"]["flavor"]) - self.assertEqual(kwargs["timeout"], self._get_default_timeout()) + + expected_patch_data = { + "labels": {"color": "red", "flavor": None}, + } + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + bucket.path, + expected_patch_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=bucket, + ) # A second patch call should be a no-op for labels. - client._connection.api_request.reset_mock() + client._patch_resource.reset_mock() + bucket.patch(client=client, timeout=42) - client._connection.api_request.assert_called() - _, _, kwargs = client._connection.api_request.mock_calls[0] - self.assertNotIn("labels", kwargs["data"]) - self.assertEqual(kwargs["timeout"], 42) + + expected_patch_data = {} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + bucket.path, + expected_patch_data, + query_params=expected_query_params, + timeout=42, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=bucket, + ) def test_location_type_getter_unset(self): bucket = self._make_one() @@ -2808,59 +2832,72 @@ def test_test_iam_permissions_w_user_project_w_timeout_w_retry(self): def test_make_public_defaults(self): from google.cloud.storage.acl import _ACLEntity - NAME = "name" + name = "name" permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - after = {"acl": permissive, "defaultObjectAcl": []} - connection = _Connection(after) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + api_response = {"acl": permissive, "defaultObjectAcl": []} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True + bucket.make_public() + self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": after["acl"]}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) + + expected_path = bucket.path + expected_data = {"acl": permissive} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) def _make_public_w_future_helper(self, default_object_acl_loaded=True): from google.cloud.storage.acl import _ACLEntity - NAME = "name" + name = "name" + get_api_response = {"items": []} permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - after1 = {"acl": permissive, "defaultObjectAcl": []} - after2 = {"acl": permissive, "defaultObjectAcl": permissive} - connection = _Connection(after1, after2) - client = _Client(connection) - - # Temporary workaround until we use real mock client - client._get_resource = mock.Mock(return_value={"items": []}) + acl_patched_response = {"acl": permissive, "defaultObjectAcl": []} + dac_patched_response = {"acl": permissive, "defaultObjectAcl": permissive} + client = mock.Mock(spec=["_get_resource", "_patch_resource"]) + client._get_resource.return_value = get_api_response + client._patch_resource.side_effect = [ + acl_patched_response, + dac_patched_response, + ] - bucket = self._make_one(client=client, name=NAME) + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = default_object_acl_loaded + bucket.make_public(future=True) + self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), permissive) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": permissive}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[1]["method"], "PATCH") - self.assertEqual(kw[1]["path"], "/b/%s" % NAME) - self.assertEqual(kw[1]["data"], {"defaultObjectAcl": permissive}) - self.assertEqual(kw[1]["query_params"], {"projection": "full"}) - self.assertEqual(kw[1]["timeout"], self._get_default_timeout()) + + self.assertEqual(len(client._patch_resource.call_args_list), 2) + expected_acl_data = {"acl": permissive} + expected_dac_data = {"defaultObjectAcl": permissive} + expected_kw = { + "query_params": {"projection": "full"}, + "timeout": self._get_default_timeout(), + "retry": DEFAULT_RETRY, + } + client._patch_resource.assert_has_calls( + [ + ((bucket.path, expected_acl_data), expected_kw), + ((bucket.path, expected_dac_data), expected_kw), + ] + ) if not default_object_acl_loaded: - expected_path = "/b/%s/defaultObjectAcl" % (NAME,) + expected_path = "/b/%s/defaultObjectAcl" % (name,) expected_query_params = {} client._get_resource.assert_called_once_with( expected_path, @@ -2868,6 +2905,8 @@ def _make_public_w_future_helper(self, default_object_acl_loaded=True): timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, ) + else: + client._get_resource.assert_not_called() def test_make_public_w_future(self): self._make_public_w_future_helper(default_object_acl_loaded=True) @@ -2898,120 +2937,150 @@ def all(self): def grant_read(self): self._granted = True - def save(self, client=None, timeout=None): + def save(self, client=None, timeout=None, retry=None): _saved.append( - (self._bucket, self._name, self._granted, client, timeout) + (self._bucket, self._name, self._granted, client, timeout, retry) ) - def item_to_blob(self, item): - return _Blob(self.bucket, item["name"]) - - NAME = "name" - BLOB_NAME = "blob-name" + name = "name" + blob_name = "blob-name" permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - after = {"acl": permissive, "defaultObjectAcl": []} - connection = _Connection(after, {"items": [{"name": BLOB_NAME}]}) - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + + patch_acl_response = {"acl": permissive, "defaultObjectAcl": []} + client = mock.Mock(spec=["list_blobs", "_patch_resource"]) + client._patch_resource.return_value = patch_acl_response + + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True - with mock.patch("google.cloud.storage.client._item_to_blob", new=item_to_blob): - bucket.make_public(recursive=True, timeout=42, retry=DEFAULT_RETRY) + list_blobs_response = iter([_Blob(bucket, blob_name)]) + client.list_blobs.return_value = list_blobs_response + + timeout = 42 + retry = mock.Mock(spec=[]) + + bucket.make_public(recursive=True, timeout=timeout, retry=retry) self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), []) - self.assertEqual(_saved, [(bucket, BLOB_NAME, True, None, 42)]) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": permissive}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual(kw[1]["method"], "GET") - self.assertEqual(kw[1]["path"], "/b/%s/o" % NAME) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY) - max_results = bucket._MAX_OBJECTS_FOR_ITERATION + 1 - self.assertEqual( - kw[1]["query_params"], {"maxResults": max_results, "projection": "full"} + self.assertEqual(_saved, [(bucket, blob_name, True, None, timeout, retry)]) + + expected_patch_data = {"acl": permissive} + expected_patch_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + bucket.path, + expected_patch_data, + query_params=expected_patch_query_params, + timeout=timeout, + retry=retry, ) - self.assertEqual(kw[1]["timeout"], 42) + client.list_blobs.assert_called_once() def test_make_public_recursive_too_many(self): from google.cloud.storage.acl import _ACLEntity - PERMISSIVE = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - AFTER = {"acl": PERMISSIVE, "defaultObjectAcl": []} + permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - NAME = "name" - BLOB_NAME1 = "blob-name1" - BLOB_NAME2 = "blob-name2" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} - connection = _Connection(AFTER, GET_BLOBS_RESP) - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + name = "name" + blob1 = mock.Mock(spec=[]) + blob2 = mock.Mock(spec=[]) + patch_acl_response = {"acl": permissive, "defaultObjectAcl": []} + list_blobs_response = iter([blob1, blob2]) + client = mock.Mock(spec=["list_blobs", "_patch_resource"]) + client.list_blobs.return_value = list_blobs_response + client._patch_resource.return_value = patch_acl_response + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True # Make the Bucket refuse to make_public with 2 objects. bucket._MAX_OBJECTS_FOR_ITERATION = 1 - self.assertRaises(ValueError, bucket.make_public, recursive=True) + + with self.assertRaises(ValueError): + bucket.make_public(recursive=True) + + expected_path = bucket.path + expected_data = {"acl": permissive} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + client.list_blobs.assert_called_once() def test_make_private_defaults(self): - NAME = "name" + name = "name" no_permissions = [] - after = {"acl": no_permissions, "defaultObjectAcl": []} - connection = _Connection(after) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + api_response = {"acl": no_permissions, "defaultObjectAcl": []} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True + bucket.make_private() + self.assertEqual(list(bucket.acl), no_permissions) self.assertEqual(list(bucket.default_object_acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": after["acl"]}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) + + expected_path = bucket.path + expected_data = {"acl": no_permissions} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) def _make_private_w_future_helper(self, default_object_acl_loaded=True): - NAME = "name" + name = "name" no_permissions = [] - after1 = {"acl": no_permissions, "defaultObjectAcl": []} - after2 = {"acl": no_permissions, "defaultObjectAcl": no_permissions} - connection = _Connection(after1, after2) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + get_api_response = {"items": []} + acl_patched_response = {"acl": no_permissions, "defaultObjectAcl": []} + dac_patched_response = { + "acl": no_permissions, + "defaultObjectAcl": no_permissions, + } + client = mock.Mock(spec=["_get_resource", "_patch_resource"]) + client._get_resource.return_value = get_api_response + client._patch_resource.side_effect = [ + acl_patched_response, + dac_patched_response, + ] + + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = default_object_acl_loaded - # Temporary workaround until we use real mock client - client._get_resource = mock.Mock(return_value={"items": []}) - bucket.make_private(future=True) + self.assertEqual(list(bucket.acl), no_permissions) self.assertEqual(list(bucket.default_object_acl), no_permissions) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": no_permissions}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[1]["method"], "PATCH") - self.assertEqual(kw[1]["path"], "/b/%s" % NAME) - self.assertEqual(kw[1]["data"], {"defaultObjectAcl": no_permissions}) - self.assertEqual(kw[1]["query_params"], {"projection": "full"}) - self.assertEqual(kw[1]["timeout"], self._get_default_timeout()) + + self.assertEqual(len(client._patch_resource.call_args_list), 2) + expected_acl_data = {"acl": no_permissions} + expected_dac_data = {"defaultObjectAcl": no_permissions} + expected_kw = { + "query_params": {"projection": "full"}, + "timeout": self._get_default_timeout(), + "retry": DEFAULT_RETRY, + } + client._patch_resource.assert_has_calls( + [ + ((bucket.path, expected_acl_data), expected_kw), + ((bucket.path, expected_dac_data), expected_kw), + ] + ) if not default_object_acl_loaded: - expected_path = "/b/%s/defaultObjectAcl" % (NAME,) + expected_path = "/b/%s/defaultObjectAcl" % (name,) expected_query_params = {} client._get_resource.assert_called_once_with( expected_path, @@ -3019,6 +3088,8 @@ def _make_private_w_future_helper(self, default_object_acl_loaded=True): timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, ) + else: + client._get_resource.assert_not_called() def test_make_private_w_future(self): self._make_private_w_future_helper(default_object_acl_loaded=True) @@ -3047,64 +3118,80 @@ def all(self): def revoke_read(self): self._granted = False - def save(self, client=None, timeout=None): + def save(self, client=None, timeout=None, retry=None): _saved.append( - (self._bucket, self._name, self._granted, client, timeout) + (self._bucket, self._name, self._granted, client, timeout, retry) ) - def item_to_blob(self, item): - return _Blob(self.bucket, item["name"]) - - NAME = "name" - BLOB_NAME = "blob-name" + name = "name" + blob_name = "blob-name" no_permissions = [] - after = {"acl": no_permissions, "defaultObjectAcl": []} - connection = _Connection(after, {"items": [{"name": BLOB_NAME}]}) - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + + patch_acl_response = {"acl": no_permissions, "defaultObjectAcl": []} + client = mock.Mock(spec=["list_blobs", "_patch_resource"]) + client._patch_resource.return_value = patch_acl_response + + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True - with mock.patch("google.cloud.storage.client._item_to_blob", new=item_to_blob): - bucket.make_private(recursive=True, timeout=42, retry=DEFAULT_RETRY) + list_blobs_response = iter([_Blob(bucket, blob_name)]) + client.list_blobs.return_value = list_blobs_response + + timeout = 42 + retry = mock.Mock(spec=[]) + + bucket.make_private(recursive=True, timeout=42, retry=retry) + self.assertEqual(list(bucket.acl), no_permissions) self.assertEqual(list(bucket.default_object_acl), []) - self.assertEqual(_saved, [(bucket, BLOB_NAME, False, None, 42)]) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": no_permissions}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual(kw[1]["method"], "GET") - self.assertEqual(kw[1]["path"], "/b/%s/o" % NAME) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY) - max_results = bucket._MAX_OBJECTS_FOR_ITERATION + 1 - self.assertEqual( - kw[1]["query_params"], {"maxResults": max_results, "projection": "full"} + self.assertEqual(_saved, [(bucket, blob_name, False, None, timeout, retry)]) + + expected_patch_data = {"acl": no_permissions} + expected_patch_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + bucket.path, + expected_patch_data, + query_params=expected_patch_query_params, + timeout=timeout, + retry=retry, ) - self.assertEqual(kw[1]["timeout"], 42) + + client.list_blobs.assert_called_once() def test_make_private_recursive_too_many(self): - NO_PERMISSIONS = [] - AFTER = {"acl": NO_PERMISSIONS, "defaultObjectAcl": []} + no_permissions = [] - NAME = "name" - BLOB_NAME1 = "blob-name1" - BLOB_NAME2 = "blob-name2" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} - connection = _Connection(AFTER, GET_BLOBS_RESP) - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + name = "name" + blob1 = mock.Mock(spec=[]) + blob2 = mock.Mock(spec=[]) + patch_acl_response = {"acl": no_permissions, "defaultObjectAcl": []} + list_blobs_response = iter([blob1, blob2]) + client = mock.Mock(spec=["list_blobs", "_patch_resource"]) + client.list_blobs.return_value = list_blobs_response + client._patch_resource.return_value = patch_acl_response + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True # Make the Bucket refuse to make_private with 2 objects. bucket._MAX_OBJECTS_FOR_ITERATION = 1 - self.assertRaises(ValueError, bucket.make_private, recursive=True) + + with self.assertRaises(ValueError): + bucket.make_private(recursive=True) + + expected_path = bucket.path + expected_data = {"acl": no_permissions} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + client.list_blobs.assert_called_once() def test_page_empty_response(self): from google.api_core import page_iterator diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index c6a98951e..337daa79f 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -412,19 +412,19 @@ def test_batch(self): def test__get_resource_miss_w_defaults(self): from google.cloud.exceptions import NotFound - PROJECT = "PROJECT" - PATH = "/path/to/something" - CREDENTIALS = _make_credentials() + project = "PROJECT" + path = "/path/to/something" + credentials = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + client = self._make_one(project=project, credentials=credentials) connection = client._base_connection = _make_connection() with self.assertRaises(NotFound): - client._get_resource(PATH) + client._get_resource(path) connection.api_request.assert_called_once_with( method="GET", - path=PATH, + path=path, query_params=None, headers=None, timeout=self._get_default_timeout(), @@ -433,25 +433,25 @@ def test__get_resource_miss_w_defaults(self): ) def test__get_resource_hit_w_explicit(self): - PROJECT = "PROJECT" - PATH = "/path/to/something" - QUERY_PARAMS = {"foo": "Foo"} - HEADERS = {"bar": "Bar"} - TIMEOUT = 100 - RETRY = mock.Mock(spec=[]) - CREDENTIALS = _make_credentials() + project = "PROJECT" + path = "/path/to/something" + query_params = {"foo": "Foo"} + headers = {"bar": "Bar"} + timeout = 100 + retry = mock.Mock(spec=[]) + credentials = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + client = self._make_one(project=project, credentials=credentials) expected = mock.Mock(spec={}) connection = client._base_connection = _make_connection(expected) target = mock.Mock(spec={}) found = client._get_resource( - PATH, - query_params=QUERY_PARAMS, - headers=HEADERS, - timeout=TIMEOUT, - retry=RETRY, + path, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, _target_object=target, ) @@ -459,11 +459,74 @@ def test__get_resource_hit_w_explicit(self): connection.api_request.assert_called_once_with( method="GET", - path=PATH, - query_params=QUERY_PARAMS, - headers=HEADERS, - timeout=TIMEOUT, - retry=RETRY, + path=path, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) + + def test__patch_resource_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + + project = "PROJECT" + path = "/path/to/something" + credentials = _make_credentials() + patched = {"baz": "Baz"} + + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() + + with self.assertRaises(NotFound): + client._patch_resource(path, patched) + + connection.api_request.assert_called_once_with( + method="PATCH", + path=path, + data=patched, + query_params=None, + headers=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) + + def test__patch_resource_hit_w_explicit(self): + project = "PROJECT" + path = "/path/to/something" + patched = {"baz": "Baz"} + query_params = {"foo": "Foo"} + headers = {"bar": "Bar"} + timeout = 100 + retry = mock.Mock(spec=[]) + credentials = _make_credentials() + + client = self._make_one(project=project, credentials=credentials) + expected = mock.Mock(spec={}) + connection = client._base_connection = _make_connection(expected) + target = mock.Mock(spec={}) + + found = client._patch_resource( + path, + patched, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) + + self.assertIs(found, expected) + + connection.api_request.assert_called_once_with( + method="PATCH", + path=path, + data=patched, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, _target_object=target, ) From 3a8a92042c0d68e97570815c34be43bde8fb6cc6 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 7 Jun 2021 15:26:00 -0400 Subject: [PATCH 16/30] refactor: add / use 'Client._put_resource' method (#441) Toward #38. --- google/cloud/storage/_helpers.py | 9 +- google/cloud/storage/blob.py | 10 +- google/cloud/storage/bucket.py | 12 +- google/cloud/storage/client.py | 71 +++++++++++ google/cloud/storage/hmac_key.py | 9 +- tests/unit/test__helpers.py | 148 ++++++++++++----------- tests/unit/test_blob.py | 127 +++++++++++--------- tests/unit/test_bucket.py | 200 +++++++++++++++++-------------- tests/unit/test_client.py | 75 +++++++++++- tests/unit/test_hmac_key.py | 60 +++++----- 10 files changed, 446 insertions(+), 275 deletions(-) diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py index 858f4bccb..83ed10b87 100644 --- a/google/cloud/storage/_helpers.py +++ b/google/cloud/storage/_helpers.py @@ -416,14 +416,13 @@ def update( if_metageneration_not_match=if_metageneration_not_match, ) - api_response = client._connection.api_request( - method="PUT", - path=self.path, - data=self._properties, + api_response = client._put_resource( + self.path, + self._properties, query_params=query_params, - _target_object=self, timeout=timeout, retry=retry, + _target_object=self, ) self._set_properties(api_response) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 0679c0222..391ced253 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -2898,16 +2898,16 @@ def set_iam_policy( if self.user_project is not None: query_params["userProject"] = self.user_project + path = "{}/iam".format(self.path) resource = policy.to_api_repr() resource["resourceId"] = self.path - info = client._connection.api_request( - method="PUT", - path="%s/iam" % (self.path,), + info = client._put_resource( + path, + resource, query_params=query_params, - data=resource, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) return Policy.from_api_repr(info) diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 1407ffd88..651652619 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -2943,17 +2943,19 @@ def set_iam_policy( if self.user_project is not None: query_params["userProject"] = self.user_project + path = "{}/iam".format(self.path) resource = policy.to_api_repr() resource["resourceId"] = self.path - info = client._connection.api_request( - method="PUT", - path="%s/iam" % (self.path,), + + info = client._put_resource( + path, + resource, query_params=query_params, - data=resource, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) + return Policy.from_api_repr(info) def test_iam_permissions( diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index db81ac292..51527c1ef 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -457,6 +457,77 @@ def _patch_resource( _target_object=_target_object, ) + def _put_resource( + self, + path, + data, + query_params=None, + headers=None, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, + _target_object=None, + ): + """Helper for bucket / blob methods making API 'PUT' calls. + + Args: + path str: + The path of the resource to fetch. + + data dict: + The data to be patched. + + query_params Optional[dict]: + HTTP query parameters to be passed + + headers Optional[dict]: + HTTP headers to be passed + + timeout (Optional[Union[float, Tuple[float, float]]]): + The amount of time, in seconds, to wait for the server response. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + retry (Optional[Union[google.api_core.retry.Retry, google.cloud.storage.retry.ConditionalRetryPolicy]]): + How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. + + _target_object (Union[ \ + :class:`~google.cloud.storage.bucket.Bucket`, \ + :class:`~google.cloud.storage.bucket.blob`, \ + ]): + Object to which future data is to be applied -- only relevant + in the context of a batch. + + Returns: + dict + The JSON resource fetched + + Raises: + google.cloud.exceptions.NotFound + If the bucket is not found. + """ + return self._connection.api_request( + method="PUT", + path=path, + data=data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=_target_object, + ) + def get_bucket( self, bucket_or_name, diff --git a/google/cloud/storage/hmac_key.py b/google/cloud/storage/hmac_key.py index c6cc17a85..ad1e50562 100644 --- a/google/cloud/storage/hmac_key.py +++ b/google/cloud/storage/hmac_key.py @@ -298,13 +298,8 @@ def update(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_ETAG_IN_JSON): qs_params["userProject"] = self.user_project payload = {"state": self.state} - self._properties = self._client._connection.api_request( - method="PUT", - path=self.path, - data=payload, - query_params=qs_params, - timeout=timeout, - retry=retry, + self._properties = self._client._put_resource( + self.path, payload, query_params=qs_params, timeout=timeout, retry=retry, ) def delete(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index 143e6d239..275d01c60 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -349,82 +349,100 @@ def test_patch_w_user_project_w_explicit_client(self): _target_object=derived, ) - def test_update(self): - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + def test_update_w_defaults(self): + path = "/path" + api_response = {"foo": "Foo"} + derived = self._derivedClass(path)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + expected_data = derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Update sends 'baz' anyway. - derived.update(client=client, timeout=42) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "/path") - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["data"], {"bar": BAR, "baz": BAZ}) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED) - # Make sure changes get reset by patch(). + client = derived.client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + + derived.update() + + self.assertEqual(derived._properties, api_response) + # Make sure changes get reset by update(). self.assertEqual(derived._changes, set()) - def test_update_with_metageneration_not_match(self): - GENERATION_NUMBER = 6 + expected_query_params = {"projection": "full"} + client._put_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=derived, + ) - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + def test_update_with_metageneration_not_match_w_timeout_w_retry(self): + path = "/path" + generation_number = 6 + api_response = {"foo": "Foo"} + derived = self._derivedClass(path)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + expected_data = derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Update sends 'baz' anyway. + client = derived.client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + timeout = 42 + derived.update( - client=client, timeout=42, if_metageneration_not_match=GENERATION_NUMBER + if_metageneration_not_match=generation_number, timeout=timeout, ) + self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "/path") - self.assertEqual( - kw[0]["query_params"], - {"projection": "full", "ifMetagenerationNotMatch": GENERATION_NUMBER}, - ) - self.assertEqual(kw[0]["data"], {"bar": BAR, "baz": BAZ}) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) - def test_update_w_user_project(self): + expected_query_params = { + "projection": "full", + "ifMetagenerationNotMatch": generation_number, + } + client._put_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=derived, + ) + + def test_update_w_user_project_w_retry_w_explicit_client(self): user_project = "user-project-123" - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path", user_project)() + path = "/path" + api_response = {"foo": "Foo"} + derived = self._derivedClass(path, user_project)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + expected_data = derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Update sends 'baz' anyway. - derived.update(client=client) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "/path") - self.assertEqual( - kw[0]["query_params"], {"projection": "full", "userProject": user_project} - ) - self.assertEqual(kw[0]["data"], {"bar": BAR, "baz": BAZ}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED) + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + retry = mock.Mock(spec=[]) + + derived.update(client=client, retry=retry) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) + expected_query_params = { + "projection": "full", + "userProject": user_project, + } + client._put_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=derived, + ) + class Test__scalar_property(unittest.TestCase): def _call_fut(self, fieldName): @@ -575,17 +593,6 @@ def test_hostname_and_scheme(self): self.assertEqual(self._call_fut(host=HOST, scheme=SCHEME), EXPECTED_URL) -class _Connection(object): - def __init__(self, *responses): - self._responses = responses - self._requested = [] - - def api_request(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response - - class _MD5Hash(object): def __init__(self, digest_val): self.digest_val = digest_val @@ -617,8 +624,3 @@ def __init__(self): def b64encode(self, value): self._called_b64encode.append(value) return value - - -class _Client(object): - def __init__(self, connection): - self._connection = connection diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index a24aaf36c..548ab8619 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -27,6 +27,7 @@ from six.moves import http_client from google.cloud.storage.retry import DEFAULT_RETRY +from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED @@ -3269,84 +3270,100 @@ def test_set_iam_policy(self): from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.api_core.iam import Policy - BLOB_NAME = "blob-name" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - EDITOR1 = "domain:google.com" - EDITOR2 = "user:phred@example.com" - VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" - VIEWER2 = "user:phred@example.com" - BINDINGS = [ - {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, - {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, - {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, + blob_name = "blob-name" + path = "/b/name/o/%s" % (blob_name,) + etag = "DEADBEEF" + version = 1 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + editor1 = "domain:google.com" + editor2 = "user:phred@example.com" + viewer1 = "serviceAccount:1234-abcdef@service.example.com" + viewer2 = "user:phred@example.com" + bindings = [ + {"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [editor1, editor2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [viewer1, viewer2]}, ] - RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} - after = ({"status": http_client.OK}, RETURNED) + api_response = {"etag": etag, "version": version, "bindings": bindings} policy = Policy() - for binding in BINDINGS: + for binding in bindings: policy[binding["role"]] = binding["members"] - connection = _Connection(after) - client = _Client(connection) + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) - returned = blob.set_iam_policy(policy, timeout=42) + returned = blob.set_iam_policy(policy) - self.assertEqual(returned.etag, ETAG) - self.assertEqual(returned.version, VERSION) + self.assertEqual(returned.etag, etag) + self.assertEqual(returned.version, version) self.assertEqual(dict(returned), dict(policy)) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {}) - self.assertEqual(kw[0]["timeout"], 42) - sent = kw[0]["data"] - self.assertEqual(sent["resourceId"], PATH) - self.assertEqual(len(sent["bindings"]), len(BINDINGS)) + expected_path = "%s/iam" % (path,) + expected_data = { + "resourceId": path, + "bindings": mock.ANY, + } + expected_query_params = {} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_ETAG_IN_JSON, + _target_object=None, + ) + + sent_bindings = client._put_resource.call_args.args[1]["bindings"] key = operator.itemgetter("role") for found, expected in zip( - sorted(sent["bindings"], key=key), sorted(BINDINGS, key=key) + sorted(sent_bindings, key=key), sorted(bindings, key=key) ): self.assertEqual(found["role"], expected["role"]) self.assertEqual(sorted(found["members"]), sorted(expected["members"])) - def test_set_iam_policy_w_user_project(self): + def test_set_iam_policy_w_user_project_w_explicit_client_w_timeout_retry(self): from google.api_core.iam import Policy - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - BINDINGS = [] - RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} - after = ({"status": http_client.OK}, RETURNED) + blob_name = "blob-name" + user_project = "user-project-123" + path = "/b/name/o/%s" % (blob_name,) + etag = "DEADBEEF" + version = 1 + bindings = [] policy = Policy() - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket) + api_response = {"etag": etag, "version": version, "bindings": bindings} + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + bucket = _Bucket(client=None, user_project=user_project) + blob = self._make_one(blob_name, bucket=bucket) + timeout = 42 + retry = mock.Mock(spec=[]) - returned = blob.set_iam_policy(policy) + returned = blob.set_iam_policy( + policy, client=client, timeout=timeout, retry=retry, + ) - self.assertEqual(returned.etag, ETAG) - self.assertEqual(returned.version, VERSION) + self.assertEqual(returned.etag, etag) + self.assertEqual(returned.version, version) self.assertEqual(dict(returned), dict(policy)) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw[0]["data"], {"resourceId": PATH}) + expected_path = "%s/iam" % (path,) + expected_data = { # bindings omitted + "resourceId": path, + } + expected_query_params = {"userProject": user_project} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=None, + ) def test_test_iam_permissions_defaults(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index fa8f85f98..c0ac843cf 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -19,6 +19,7 @@ import pytest from google.cloud.storage.retry import DEFAULT_RETRY +from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED @@ -1492,34 +1493,39 @@ def test_reload_w_generation_match(self): with self.assertRaises(TypeError): bucket.reload(if_generation_match=6) - def test_update_bucket_w_metageneration_match(self): - NAME = "name" - METAGENERATION_NUMBER = 9 - - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + def test_update_w_metageneration_match(self): + name = "name" + metageneration_number = 9 + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = {} + bucket = self._make_one(client=client, name=name) - bucket.update(if_metageneration_match=METAGENERATION_NUMBER) + bucket.update(if_metageneration_match=metageneration_number) - self.assertEqual(len(connection._requested), 1) - req = connection._requested[0] - self.assertEqual(req["method"], "PUT") - self.assertEqual(req["path"], "/b/%s" % NAME) - self.assertEqual(req["timeout"], self._get_default_timeout()) - self.assertEqual( - req["query_params"], - {"projection": "full", "ifMetagenerationMatch": METAGENERATION_NUMBER}, + expected_query_params = { + "projection": "full", + "ifMetagenerationMatch": metageneration_number, + } + client._put_resource.assert_called_once_with( + bucket.path, + bucket._properties, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=bucket, ) - self.assertEqual(req["retry"], DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED) - def test_update_bucket_w_generation_match(self): - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name="name") + def test_update_w_generation_match(self): + name = "name" + generation_number = 6 + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = {} + bucket = self._make_one(client=client, name=name) with self.assertRaises(TypeError): - bucket.update(if_generation_match=6) + bucket.update(if_generation_match=generation_number) + + client._put_resource.assert_not_called() @staticmethod def _make_blob(bucket_name, blob_name): @@ -2653,109 +2659,125 @@ def test_get_iam_policy_w_requested_policy_version_w_retry(self): _target_object=None, ) - def test_set_iam_policy(self): + def test_set_iam_policy_w_defaults(self): import operator from google.cloud.storage.iam import STORAGE_OWNER_ROLE from google.cloud.storage.iam import STORAGE_EDITOR_ROLE from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.api_core.iam import Policy - NAME = "name" - PATH = "/b/%s" % (NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - EDITOR1 = "domain:google.com" - EDITOR2 = "user:phred@example.com" - VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" - VIEWER2 = "user:phred@example.com" - BINDINGS = [ - {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, - {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, - {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, + name = "name" + etag = "DEADBEEF" + version = 1 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + editor1 = "domain:google.com" + editor2 = "user:phred@example.com" + viewer1 = "serviceAccount:1234-abcdef@service.example.com" + viewer2 = "user:phred@example.com" + bindings = [ + {"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [editor1, editor2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [viewer1, viewer2]}, ] - RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} policy = Policy() - for binding in BINDINGS: + for binding in bindings: policy[binding["role"]] = binding["members"] - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME) + api_response = {"etag": etag, "version": version, "bindings": bindings} + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + bucket = self._make_one(client=client, name=name) - returned = bucket.set_iam_policy(policy, timeout=42) + returned = bucket.set_iam_policy(policy) - self.assertEqual(returned.etag, ETAG) - self.assertEqual(returned.version, VERSION) + self.assertEqual(returned.etag, etag) + self.assertEqual(returned.version, version) self.assertEqual(dict(returned), dict(policy)) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {}) - self.assertEqual(kw[0]["timeout"], 42) - sent = kw[0]["data"] - self.assertEqual(sent["resourceId"], PATH) - self.assertEqual(len(sent["bindings"]), len(BINDINGS)) + expected_path = "%s/iam" % (bucket.path,) + expected_data = { + "resourceId": bucket.path, + "bindings": mock.ANY, + } + expected_query_params = {} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_ETAG_IN_JSON, + _target_object=None, + ) + + sent_bindings = client._put_resource.call_args.args[1]["bindings"] key = operator.itemgetter("role") for found, expected in zip( - sorted(sent["bindings"], key=key), sorted(BINDINGS, key=key) + sorted(sent_bindings, key=key), sorted(bindings, key=key) ): self.assertEqual(found["role"], expected["role"]) self.assertEqual(sorted(found["members"]), sorted(expected["members"])) - def test_set_iam_policy_w_user_project(self): + def test_set_iam_policy_w_user_project_w_expl_client_w_timeout_retry(self): import operator from google.cloud.storage.iam import STORAGE_OWNER_ROLE from google.cloud.storage.iam import STORAGE_EDITOR_ROLE from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.api_core.iam import Policy - NAME = "name" - USER_PROJECT = "user-project-123" - PATH = "/b/%s" % (NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - EDITOR1 = "domain:google.com" - EDITOR2 = "user:phred@example.com" - VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" - VIEWER2 = "user:phred@example.com" - BINDINGS = [ - {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, - {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, - {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, + name = "name" + user_project = "user-project-123" + etag = "DEADBEEF" + version = 1 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + editor1 = "domain:google.com" + editor2 = "user:phred@example.com" + viewer1 = "serviceAccount:1234-abcdef@service.example.com" + viewer2 = "user:phred@example.com" + bindings = [ + {"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [editor1, editor2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [viewer1, viewer2]}, ] - RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} policy = Policy() - for binding in BINDINGS: + for binding in bindings: policy[binding["role"]] = binding["members"] - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) + api_response = {"etag": etag, "version": version, "bindings": bindings} + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + bucket = self._make_one(client=None, name=name, user_project=user_project) + timeout = 42 + retry = mock.Mock(spec=[]) - returned = bucket.set_iam_policy(policy) + returned = bucket.set_iam_policy( + policy, client=client, timeout=timeout, retry=retry + ) - self.assertEqual(returned.etag, ETAG) - self.assertEqual(returned.version, VERSION) + self.assertEqual(returned.etag, etag) + self.assertEqual(returned.version, version) self.assertEqual(dict(returned), dict(policy)) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - sent = kw[0]["data"] - self.assertEqual(sent["resourceId"], PATH) - self.assertEqual(len(sent["bindings"]), len(BINDINGS)) + expected_path = "%s/iam" % (bucket.path,) + expected_data = { + "resourceId": bucket.path, + "bindings": mock.ANY, + } + expected_query_params = {"userProject": user_project} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=None, + ) + + sent_bindings = client._put_resource.call_args.args[1]["bindings"] key = operator.itemgetter("role") for found, expected in zip( - sorted(sent["bindings"], key=key), sorted(BINDINGS, key=key) + sorted(sent_bindings, key=key), sorted(bindings, key=key) ): self.assertEqual(found["role"], expected["role"]) self.assertEqual(sorted(found["members"]), sorted(expected["members"])) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 337daa79f..77e3c7e1b 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -473,18 +473,18 @@ def test__patch_resource_miss_w_defaults(self): project = "PROJECT" path = "/path/to/something" credentials = _make_credentials() - patched = {"baz": "Baz"} + data = {"baz": "Baz"} client = self._make_one(project=project, credentials=credentials) connection = client._base_connection = _make_connection() with self.assertRaises(NotFound): - client._patch_resource(path, patched) + client._patch_resource(path, data) connection.api_request.assert_called_once_with( method="PATCH", path=path, - data=patched, + data=data, query_params=None, headers=None, timeout=self._get_default_timeout(), @@ -495,7 +495,7 @@ def test__patch_resource_miss_w_defaults(self): def test__patch_resource_hit_w_explicit(self): project = "PROJECT" path = "/path/to/something" - patched = {"baz": "Baz"} + data = {"baz": "Baz"} query_params = {"foo": "Foo"} headers = {"bar": "Bar"} timeout = 100 @@ -509,7 +509,7 @@ def test__patch_resource_hit_w_explicit(self): found = client._patch_resource( path, - patched, + data, query_params=query_params, headers=headers, timeout=timeout, @@ -522,7 +522,70 @@ def test__patch_resource_hit_w_explicit(self): connection.api_request.assert_called_once_with( method="PATCH", path=path, - data=patched, + data=data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) + + def test__put_resource_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + + project = "PROJECT" + path = "/path/to/something" + credentials = _make_credentials() + data = {"baz": "Baz"} + + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() + + with self.assertRaises(NotFound): + client._put_resource(path, data) + + connection.api_request.assert_called_once_with( + method="PUT", + path=path, + data=data, + query_params=None, + headers=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) + + def test__put_resource_hit_w_explicit(self): + project = "PROJECT" + path = "/path/to/something" + data = {"baz": "Baz"} + query_params = {"foo": "Foo"} + headers = {"bar": "Bar"} + timeout = 100 + retry = mock.Mock(spec=[]) + credentials = _make_credentials() + + client = self._make_one(project=project, credentials=credentials) + expected = mock.Mock(spec={}) + connection = client._base_connection = _make_connection(expected) + target = mock.Mock(spec={}) + + found = client._put_resource( + path, + data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) + + self.assertIs(found, expected) + + connection.api_request.assert_called_once_with( + method="PUT", + path=path, + data=data, query_params=query_params, headers=headers, timeout=timeout, diff --git a/tests/unit/test_hmac_key.py b/tests/unit/test_hmac_key.py index 06b3a65c5..45fbf3769 100644 --- a/tests/unit/test_hmac_key.py +++ b/tests/unit/test_hmac_key.py @@ -323,34 +323,33 @@ def test_reload_hit_w_project_set(self): retry=retry, ) - def test_update_miss_no_project_set(self): + def test_update_miss_no_project_set_w_defaults(self): from google.cloud.exceptions import NotFound + project = "PROJECT" access_id = "ACCESS-ID" - connection = mock.Mock(spec=["api_request"]) - connection.api_request.side_effect = NotFound("testing") - client = _Client(connection) + client = mock.Mock(spec=["_put_resource", "project"]) + client._put_resource.side_effect = NotFound("testing") + client.project = project metadata = self._make_one(client) metadata._properties["accessId"] = access_id metadata.state = "INACTIVE" with self.assertRaises(NotFound): - metadata.update(timeout=42) + metadata.update() - expected_path = "/projects/{}/hmacKeys/{}".format( - client.DEFAULT_PROJECT, access_id + expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) + expected_data = {"state": "INACTIVE"} + expected_query_params = {} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_ETAG_IN_JSON, ) - expected_kwargs = { - "method": "PUT", - "path": expected_path, - "data": {"state": "INACTIVE"}, - "query_params": {}, - "timeout": 42, - "retry": DEFAULT_RETRY_IF_ETAG_IN_JSON, - } - connection.api_request.assert_called_once_with(**expected_kwargs) - def test_update_hit_w_project_set(self): + def test_update_hit_w_project_set_w_timeout_w_retry(self): project = "PROJECT-ID" access_id = "ACCESS-ID" user_project = "billed-project" @@ -361,28 +360,29 @@ def test_update_hit_w_project_set(self): "serviceAccountEmail": email, "state": "ACTIVE", } - connection = mock.Mock(spec=["api_request"]) - connection.api_request.return_value = resource - client = _Client(connection) + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = resource metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project metadata.state = "ACTIVE" + timeout = 42 + retry = mock.Mock(spec=[]) - metadata.update() + metadata.update(timeout=42, retry=retry) self.assertEqual(metadata._properties, resource) expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) - expected_kwargs = { - "method": "PUT", - "path": expected_path, - "data": {"state": "ACTIVE"}, - "query_params": {"userProject": user_project}, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_ETAG_IN_JSON, - } - connection.api_request.assert_called_once_with(**expected_kwargs) + expected_data = {"state": "ACTIVE"} + expected_query_params = {"userProject": user_project} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) def test_delete_not_inactive(self): metadata = self._make_one() From 94fd0cae43de46db7ffa7e74e8a0117db951a113 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 7 Jun 2021 16:12:31 -0400 Subject: [PATCH 17/30] refactor: add / use 'Client._delete_resource' method (#442) Also, forward 'retry' through when deleting blobs during 'Bucket.delete'. Toward #38. --- google/cloud/storage/bucket.py | 39 +- google/cloud/storage/client.py | 66 +++ google/cloud/storage/hmac_key.py | 8 +- google/cloud/storage/notification.py | 8 +- tests/unit/test_bucket.py | 733 +++++++++++++++------------ tests/unit/test_client.py | 58 +++ tests/unit/test_hmac_key.py | 61 +-- tests/unit/test_notification.py | 50 +- 8 files changed, 621 insertions(+), 402 deletions(-) diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 651652619..c019b2f12 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -1466,9 +1466,9 @@ def delete( self, force=False, client=None, - timeout=_DEFAULT_TIMEOUT, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, ): """Delete this bucket. @@ -1496,13 +1496,6 @@ def delete( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response on each request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the blob's current metageneration matches the given value. @@ -1511,6 +1504,13 @@ def delete( :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. + :type timeout: float or tuple + :param timeout: (Optional) The amount of time, in seconds, to wait + for the server response on each request. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: (Optional) How to retry the RPC. A None value will disable retries. A google.api_core.retry.Retry value will enable retries, and the object will @@ -1545,6 +1545,7 @@ def delete( max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, client=client, timeout=timeout, + retry=retry, ) ) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: @@ -1558,19 +1559,22 @@ def delete( # Ignore 404 errors on delete. self.delete_blobs( - blobs, on_error=lambda blob: None, client=client, timeout=timeout + blobs, + on_error=lambda blob: None, + client=client, + timeout=timeout, + retry=retry, ) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). - client._connection.api_request( - method="DELETE", - path=self.path, + client._delete_resource( + self.path, query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) def delete_blob( @@ -1677,13 +1681,12 @@ def delete_blob( # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). - client._connection.api_request( - method="DELETE", - path=blob.path, + client._delete_resource( + blob.path, query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) def delete_blobs( @@ -1802,11 +1805,11 @@ def delete_blobs( self.delete_blob( blob_name, client=client, - timeout=timeout, if_generation_match=next(if_generation_match, None), if_generation_not_match=next(if_generation_not_match, None), if_metageneration_match=next(if_metageneration_match, None), if_metageneration_not_match=next(if_metageneration_not_match, None), + timeout=timeout, retry=retry, ) except NotFound: diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 51527c1ef..effcddddd 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -528,6 +528,72 @@ def _put_resource( _target_object=_target_object, ) + def _delete_resource( + self, + path, + query_params=None, + headers=None, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, + _target_object=None, + ): + """Helper for bucket / blob methods making API 'DELETE' calls. + + Args: + path str: + The path of the resource to delete. + + query_params Optional[dict]: + HTTP query parameters to be passed + + headers Optional[dict]: + HTTP headers to be passed + + timeout (Optional[Union[float, Tuple[float, float]]]): + The amount of time, in seconds, to wait for the server response. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + retry (Optional[Union[google.api_core.retry.Retry, google.cloud.storage.retry.ConditionalRetryPolicy]]): + How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. + + _target_object (Union[ \ + :class:`~google.cloud.storage.bucket.Bucket`, \ + :class:`~google.cloud.storage.bucket.blob`, \ + ]): + Object to which future data is to be applied -- only relevant + in the context of a batch. + + Returns: + dict + The JSON resource fetched + + Raises: + google.cloud.exceptions.NotFound + If the bucket is not found. + """ + return self._connection.api_request( + method="DELETE", + path=path, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=_target_object, + ) + def get_bucket( self, bucket_or_name, diff --git a/google/cloud/storage/hmac_key.py b/google/cloud/storage/hmac_key.py index ad1e50562..e59960a1c 100644 --- a/google/cloud/storage/hmac_key.py +++ b/google/cloud/storage/hmac_key.py @@ -336,10 +336,6 @@ def delete(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): if self.user_project is not None: qs_params["userProject"] = self.user_project - self._client._connection.api_request( - method="DELETE", - path=self.path, - query_params=qs_params, - timeout=timeout, - retry=retry, + self._client._delete_resource( + self.path, query_params=qs_params, timeout=timeout, retry=retry, ) diff --git a/google/cloud/storage/notification.py b/google/cloud/storage/notification.py index 5389ab51e..2f5661fce 100644 --- a/google/cloud/storage/notification.py +++ b/google/cloud/storage/notification.py @@ -429,12 +429,8 @@ def delete(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project - client._connection.api_request( - method="DELETE", - path=self.path, - query_params=query_params, - timeout=timeout, - retry=retry, + client._delete_resource( + self.path, query_params=query_params, timeout=timeout, retry=retry, ) diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index c0ac843cf..410c9d9b6 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -1107,358 +1107,472 @@ def test_get_notification_hit_w_explicit_w_user_project(self): retry=retry, ) - def test_delete_miss(self): + def test_delete_miss_w_defaults(self): from google.cloud.exceptions import NotFound - NAME = "name" - connection = _Connection() - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - self.assertRaises(NotFound, bucket.delete) - expected_cw = [ - { - "method": "DELETE", - "path": bucket.path, - "query_params": {}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - ] - self.assertEqual(connection._deleted_buckets, expected_cw) + name = "name" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.side_effect = NotFound("testing") + bucket = self._make_one(client=client, name=name) + + with self.assertRaises(NotFound): + bucket.delete() + + expected_query_params = {} + client._delete_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) + + def test_delete_hit_w_metageneration_match_w_explicit_client(self): + name = "name" + metageneration_number = 6 + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=None, name=name) + + result = bucket.delete( + client=client, if_metageneration_match=metageneration_number, + ) - def test_delete_hit_with_user_project(self): - NAME = "name" - USER_PROJECT = "user-project-123" - GET_BLOBS_RESP = {"items": []} - connection = _Connection(GET_BLOBS_RESP) - connection._delete_bucket = True - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) - result = bucket.delete(force=True, timeout=42) self.assertIsNone(result) - expected_cw = [ - { - "method": "DELETE", - "path": bucket.path, - "_target_object": None, - "query_params": {"userProject": USER_PROJECT}, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - ] - self.assertEqual(connection._deleted_buckets, expected_cw) - def test_delete_force_delete_blobs(self): - NAME = "name" - BLOB_NAME1 = "blob-name1" - BLOB_NAME2 = "blob-name2" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} - DELETE_BLOB1_RESP = DELETE_BLOB2_RESP = {} - connection = _Connection(GET_BLOBS_RESP, DELETE_BLOB1_RESP, DELETE_BLOB2_RESP) - connection._delete_bucket = True - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) - result = bucket.delete(force=True) + expected_query_params = {"ifMetagenerationMatch": metageneration_number} + client._delete_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) + + def test_delete_hit_w_force_w_user_project_w_explicit_timeout_retry(self): + name = "name" + user_project = "user-project-123" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name, user_project=user_project) + bucket.list_blobs = mock.Mock(return_value=iter([])) + bucket.delete_blobs = mock.Mock(return_value=None) + timeout = 42 + retry = mock.Mock(spec=[]) + + result = bucket.delete(force=True, timeout=timeout, retry=retry) + self.assertIsNone(result) - expected_cw = [ - { - "method": "DELETE", - "path": bucket.path, - "query_params": {}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - ] - self.assertEqual(connection._deleted_buckets, expected_cw) - def test_delete_with_metageneration_match(self): - NAME = "name" - BLOB_NAME1 = "blob-name1" - BLOB_NAME2 = "blob-name2" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} - DELETE_BLOB1_RESP = DELETE_BLOB2_RESP = {} - METAGENERATION_NUMBER = 6 - - connection = _Connection(GET_BLOBS_RESP, DELETE_BLOB1_RESP, DELETE_BLOB2_RESP) - connection._delete_bucket = True - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - result = bucket.delete(if_metageneration_match=METAGENERATION_NUMBER) + bucket.list_blobs.assert_called_once_with( + max_results=bucket._MAX_OBJECTS_FOR_ITERATION + 1, + client=client, + timeout=timeout, + retry=retry, + ) + + bucket.delete_blobs.assert_called_once_with( + [], on_error=mock.ANY, client=client, timeout=timeout, retry=retry, + ) + + expected_query_params = {"userProject": user_project} + client._delete_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=None, + ) + + def test_delete_hit_w_force_delete_blobs(self): + name = "name" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name) + blobs = [mock.Mock(spec=[]), mock.Mock(spec=[])] + bucket.list_blobs = mock.Mock(return_value=iter(blobs)) + bucket.delete_blobs = mock.Mock(return_value=None) + + result = bucket.delete(force=True) + self.assertIsNone(result) - expected_cw = [ - { - "method": "DELETE", - "path": bucket.path, - "query_params": {"ifMetagenerationMatch": METAGENERATION_NUMBER}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - ] - self.assertEqual(connection._deleted_buckets, expected_cw) - def test_delete_force_miss_blobs(self): - NAME = "name" - BLOB_NAME = "blob-name1" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME}]} - # Note the connection does not have a response for the blob. - connection = _Connection(GET_BLOBS_RESP) - connection._delete_bucket = True - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + bucket.list_blobs.assert_called_once_with( + max_results=bucket._MAX_OBJECTS_FOR_ITERATION + 1, + client=client, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + bucket.delete_blobs.assert_called_once_with( + blobs, + on_error=mock.ANY, + client=client, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + expected_query_params = {} + client._delete_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) + + def test_delete_w_force_w_user_project_w_miss_on_blob(self): + from google.cloud.exceptions import NotFound + + name = "name" + blob_name = "blob-name" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name) + blob = mock.Mock(spec=["name"]) + blob.name = blob_name + blobs = [blob] + bucket.list_blobs = mock.Mock(return_value=iter(blobs)) + bucket.delete_blob = mock.Mock(side_effect=NotFound("testing")) + result = bucket.delete(force=True) + self.assertIsNone(result) - expected_cw = [ - { - "method": "DELETE", - "path": bucket.path, - "query_params": {}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - ] - self.assertEqual(connection._deleted_buckets, expected_cw) - def test_delete_too_many(self): - NAME = "name" - BLOB_NAME1 = "blob-name1" - BLOB_NAME2 = "blob-name2" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} - connection = _Connection(GET_BLOBS_RESP) - connection._delete_bucket = True - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + bucket.delete_blob.assert_called_once_with( + blob_name, + client=client, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + expected_query_params = {} + client._delete_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) + def test_delete_w_too_many(self): + name = "name" + blob_name1 = "blob-name1" + blob_name2 = "blob-name2" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name) + blob1 = mock.Mock(spec=["name"]) + blob1.name = blob_name1 + blob2 = mock.Mock(spec=["name"]) + blob2.name = blob_name2 + blobs = [blob1, blob2] + bucket.list_blobs = mock.Mock(return_value=iter(blobs)) + bucket.delete_blobs = mock.Mock() # Make the Bucket refuse to delete with 2 objects. bucket._MAX_OBJECTS_FOR_ITERATION = 1 - self.assertRaises(ValueError, bucket.delete, force=True) - self.assertEqual(connection._deleted_buckets, []) - def test_delete_blob_miss(self): + with self.assertRaises(ValueError): + bucket.delete(force=True) + + bucket.delete_blobs.assert_not_called() + + def test_delete_blob_miss_w_defaults(self): from google.cloud.exceptions import NotFound - NAME = "name" - NONESUCH = "nonesuch" - connection = _Connection() - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - self.assertRaises(NotFound, bucket.delete_blob, NONESUCH) - (kw,) = connection._requested - self.assertEqual(kw["method"], "DELETE") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) - self.assertEqual(kw["query_params"], {}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) + name = "name" + blob_name = "nonesuch" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.side_effect = NotFound("testing") + bucket = self._make_one(client=client, name=name) + + with self.assertRaises(NotFound): + bucket.delete_blob(blob_name) + + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = {} + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=None, + ) + + def test_delete_blob_hit_w_user_project_w_timeout(self): + name = "name" + blob_name = "blob-name" + user_project = "user-project-123" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name, user_project=user_project) + timeout = 42 + + result = bucket.delete_blob(blob_name, timeout=timeout) - def test_delete_blob_hit_with_user_project(self): - NAME = "name" - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) - result = bucket.delete_blob(BLOB_NAME, timeout=42) self.assertIsNone(result) - (kw,) = connection._requested - self.assertEqual(kw["method"], "DELETE") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw["timeout"], 42) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - def test_delete_blob_hit_with_generation(self): - NAME = "name" - BLOB_NAME = "blob-name" - GENERATION = 1512565576797178 - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - result = bucket.delete_blob(BLOB_NAME, generation=GENERATION) + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = {"userProject": user_project} + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=None, + ) + + def test_delete_blob_hit_w_generation_w_retry(self): + name = "name" + blob_name = "blob-name" + generation = 1512565576797178 + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name) + retry = mock.Mock(spec=[]) + + result = bucket.delete_blob(blob_name, generation=generation, retry=retry) + self.assertIsNone(result) - (kw,) = connection._requested - self.assertEqual(kw["method"], "DELETE") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["query_params"], {"generation": GENERATION}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - def test_delete_blob_with_generation_match(self): - NAME = "name" - BLOB_NAME = "blob-name" - GENERATION = 6 - METAGENERATION = 9 + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = {"generation": generation} + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=None, + ) + + def test_delete_blob_hit_w_generation_match(self): + name = "name" + blob_name = "blob-name" + generation = 6 + metageneration = 9 + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name) - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) result = bucket.delete_blob( - BLOB_NAME, - if_generation_match=GENERATION, - if_metageneration_match=METAGENERATION, + blob_name, + if_generation_match=generation, + if_metageneration_match=metageneration, ) self.assertIsNone(result) - (kw,) = connection._requested - self.assertEqual(kw["method"], "DELETE") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual( - kw["query_params"], - {"ifGenerationMatch": GENERATION, "ifMetagenerationMatch": METAGENERATION}, + + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = { + "ifGenerationMatch": generation, + "ifMetagenerationMatch": metageneration, + } + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=None, ) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) def test_delete_blobs_empty(self): - NAME = "name" - connection = _Connection() - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + name = "name" + bucket = self._make_one(client=None, name=name) + bucket.delete_blob = mock.Mock() + bucket.delete_blobs([]) - self.assertEqual(connection._requested, []) - def test_delete_blobs_hit_w_user_project(self): - NAME = "name" - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) - bucket.delete_blobs([BLOB_NAME], timeout=42) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "DELETE") - self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - - def test_delete_blobs_w_generation_match(self): - NAME = "name" - BLOB_NAME = "blob-name" - BLOB_NAME2 = "blob-name2" - GENERATION_NUMBER = 6 - GENERATION_NUMBER2 = 9 + bucket.delete_blob.assert_not_called() - connection = _Connection({}, {}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - bucket.delete_blobs( - [BLOB_NAME, BLOB_NAME2], - timeout=42, - if_generation_match=[GENERATION_NUMBER, GENERATION_NUMBER2], - ) - kw = connection._requested - self.assertEqual(len(kw), 2) + def test_delete_blobs_hit_w_explicit_client_w_timeout(self): + name = "name" + blob_name = "blob-name" + client = mock.Mock(spec=[]) + bucket = self._make_one(client=None, name=name) + bucket.delete_blob = mock.Mock() + timeout = 42 - self.assertEqual(kw[0]["method"], "DELETE") - self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual( - kw[0]["query_params"], {"ifGenerationMatch": GENERATION_NUMBER} - ) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(kw[1]["method"], "DELETE") - self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME2)) - self.assertEqual(kw[1]["timeout"], 42) - self.assertEqual( - kw[1]["query_params"], {"ifGenerationMatch": GENERATION_NUMBER2} + bucket.delete_blobs([blob_name], client=client, timeout=timeout) + + bucket.delete_blob.assert_called_once_with( + blob_name, + client=client, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=timeout, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) def test_delete_blobs_w_generation_match_wrong_len(self): - NAME = "name" - BLOB_NAME = "blob-name" - BLOB_NAME2 = "blob-name2" - GENERATION_NUMBER = 6 + name = "name" + blob_name = "blob-name" + blob_name2 = "blob-name2" + generation_number = 6 + bucket = self._make_one(client=None, name=name) + bucket.delete_blob = mock.Mock() - connection = _Connection() - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) with self.assertRaises(ValueError): bucket.delete_blobs( - [BLOB_NAME, BLOB_NAME2], - timeout=42, - if_generation_not_match=[GENERATION_NUMBER], + [blob_name, blob_name2], if_generation_not_match=[generation_number], ) + bucket.delete_blob.assert_not_called() + + def test_delete_blobs_w_generation_match_w_retry(self): + name = "name" + blob_name = "blob-name" + blob_name2 = "blob-name2" + generation_number = 6 + generation_number2 = 9 + client = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=name) + bucket.delete_blob = mock.Mock() + retry = mock.Mock(spec=[]) + + bucket.delete_blobs( + [blob_name, blob_name2], + if_generation_match=[generation_number, generation_number2], + retry=retry, + ) + + call_1 = mock.call( + blob_name, + client=None, + if_generation_match=generation_number, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=retry, + ) + call_2 = mock.call( + blob_name2, + client=None, + if_generation_match=generation_number2, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=retry, + ) + bucket.delete_blob.assert_has_calls([call_1, call_2]) + def test_delete_blobs_w_generation_match_none(self): - NAME = "name" - BLOB_NAME = "blob-name" - BLOB_NAME2 = "blob-name2" - GENERATION_NUMBER = 6 - GENERATION_NUMBER2 = None + name = "name" + blob_name = "blob-name" + blob_name2 = "blob-name2" + generation_number = 6 + generation_number2 = None + client = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=name) + bucket.delete_blob = mock.Mock() - connection = _Connection({}, {}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) bucket.delete_blobs( - [BLOB_NAME, BLOB_NAME2], - timeout=42, - if_generation_match=[GENERATION_NUMBER, GENERATION_NUMBER2], + [blob_name, blob_name2], + if_generation_match=[generation_number, generation_number2], ) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "DELETE") - self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual( - kw[0]["query_params"], {"ifGenerationMatch": GENERATION_NUMBER} + call_1 = mock.call( + blob_name, + client=None, + if_generation_match=generation_number, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + call_2 = mock.call( + blob_name2, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(kw[1]["method"], "DELETE") - self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME2)) - self.assertEqual(kw[1]["timeout"], 42) - self.assertEqual(kw[1]["query_params"], {}) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + bucket.delete_blob.assert_has_calls([call_1, call_2]) - def test_delete_blobs_miss_no_on_error(self): + def test_delete_blobs_miss_wo_on_error(self): from google.cloud.exceptions import NotFound - NAME = "name" - BLOB_NAME = "blob-name" - NONESUCH = "nonesuch" - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - self.assertRaises(NotFound, bucket.delete_blobs, [BLOB_NAME, NONESUCH]) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "DELETE") - self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(kw[1]["method"], "DELETE") - self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) - self.assertEqual(kw[1]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + name = "name" + blob_name = "blob-name" + blob_name2 = "nonesuch" + client = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=name) + bucket.delete_blob = mock.Mock() + bucket.delete_blob.side_effect = [None, NotFound("testing")] + + with self.assertRaises(NotFound): + bucket.delete_blobs([blob_name, blob_name2]) + + call_1 = mock.call( + blob_name, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + call_2 = mock.call( + blob_name2, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + bucket.delete_blob.assert_has_calls([call_1, call_2]) def test_delete_blobs_miss_w_on_error(self): - NAME = "name" - BLOB_NAME = "blob-name" - NONESUCH = "nonesuch" - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + from google.cloud.exceptions import NotFound + + name = "name" + blob_name = "blob-name" + blob_name2 = "nonesuch" + client = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=name) + bucket.delete_blob = mock.Mock() + bucket.delete_blob.side_effect = [None, NotFound("testing")] + errors = [] - bucket.delete_blobs([BLOB_NAME, NONESUCH], errors.append) - self.assertEqual(errors, [NONESUCH]) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "DELETE") - self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(kw[1]["method"], "DELETE") - self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) - self.assertEqual(kw[1]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + bucket.delete_blobs([blob_name, blob_name2], on_error=errors.append) + + self.assertEqual(errors, [blob_name2]) + + call_1 = mock.call( + blob_name, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + call_2 = mock.call( + blob_name2, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + bucket.delete_blob.assert_has_calls([call_1, call_2]) def test_reload_w_metageneration_match(self): name = "name" @@ -3716,31 +3830,10 @@ def __init__(self, *responses): self._deleted_buckets = [] self.credentials = None - @staticmethod - def _is_bucket_path(path): - # Now just ensure the path only has /b/ and one more segment. - return path.startswith("/b/") and path.count("/") == 2 - def api_request(self, **kw): - from google.cloud.exceptions import NotFound - self._requested.append(kw) - - method = kw.get("method") - path = kw.get("path", "") - if method == "DELETE" and self._is_bucket_path(path): - self._deleted_buckets.append(kw) - if self._delete_bucket: - return - else: - raise NotFound("miss") - - try: - response, self._responses = self._responses[0], self._responses[1:] - except IndexError: - raise NotFound("miss") - else: - return response + response, self._responses = self._responses[0], self._responses[1:] + return response class _Client(object): diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 77e3c7e1b..6d34d935a 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -593,6 +593,64 @@ def test__put_resource_hit_w_explicit(self): _target_object=target, ) + def test__delete_resource_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + + project = "PROJECT" + path = "/path/to/something" + credentials = _make_credentials() + + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() + + with self.assertRaises(NotFound): + client._delete_resource(path) + + connection.api_request.assert_called_once_with( + method="DELETE", + path=path, + query_params=None, + headers=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) + + def test__delete_resource_hit_w_explicit(self): + project = "PROJECT" + path = "/path/to/something" + query_params = {"foo": "Foo"} + headers = {"bar": "Bar"} + timeout = 100 + retry = mock.Mock(spec=[]) + credentials = _make_credentials() + + client = self._make_one(project=project, credentials=credentials) + expected = mock.Mock(spec={}) + connection = client._base_connection = _make_connection(expected) + target = mock.Mock(spec={}) + + found = client._delete_resource( + path, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) + + self.assertIs(found, expected) + + connection.api_request.assert_called_once_with( + method="DELETE", + path=path, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) + def test_get_bucket_miss_w_string_w_defaults(self): from google.cloud.exceptions import NotFound from google.cloud.storage.bucket import Bucket diff --git a/tests/unit/test_hmac_key.py b/tests/unit/test_hmac_key.py index 45fbf3769..60d0c135b 100644 --- a/tests/unit/test_hmac_key.py +++ b/tests/unit/test_hmac_key.py @@ -385,62 +385,65 @@ def test_update_hit_w_project_set_w_timeout_w_retry(self): ) def test_delete_not_inactive(self): - metadata = self._make_one() + client = mock.Mock(spec=["_delete_resource", "project"]) + client.project = "PROJECT" + metadata = self._make_one(client) + for state in ("ACTIVE", "DELETED"): metadata._properties["state"] = state with self.assertRaises(ValueError): metadata.delete() - def test_delete_miss_no_project_set(self): + client._delete_resource.assert_not_called() + + def test_delete_miss_no_project_set_w_defaults(self): from google.cloud.exceptions import NotFound access_id = "ACCESS-ID" - connection = mock.Mock(spec=["api_request"]) - connection.api_request.side_effect = NotFound("testing") - client = _Client(connection) + client = mock.Mock(spec=["_delete_resource", "project"]) + client._delete_resource.side_effect = NotFound("testing") + client.project = "PROJECT" metadata = self._make_one(client) metadata._properties["accessId"] = access_id metadata.state = "INACTIVE" with self.assertRaises(NotFound): - metadata.delete(timeout=42) + metadata.delete() - expected_path = "/projects/{}/hmacKeys/{}".format( - client.DEFAULT_PROJECT, access_id + expected_path = "/projects/{}/hmacKeys/{}".format(client.project, access_id) + expected_query_params = {} + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) - expected_kwargs = { - "method": "DELETE", - "path": expected_path, - "query_params": {}, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) - def test_delete_hit_w_project_set(self): + def test_delete_hit_w_project_set_w_explicit_timeout_retry(self): project = "PROJECT-ID" access_id = "ACCESS-ID" user_project = "billed-project" - connection = mock.Mock(spec=["api_request"]) - connection.api_request.return_value = {} - client = _Client(connection) + client = mock.Mock(spec=["_delete_resource", "project"]) + client.project = "CLIENT-PROJECT" + client._delete_resource.return_value = {} metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project metadata.state = "INACTIVE" + timeout = 42 + retry = mock.Mock(spec=[]) - metadata.delete() + metadata.delete(timeout=timeout, retry=retry) expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) - expected_kwargs = { - "method": "DELETE", - "path": expected_path, - "query_params": {"userProject": user_project}, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) + expected_query_params = {"userProject": user_project} + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) class _Client(object): diff --git a/tests/unit/test_notification.py b/tests/unit/test_notification.py index e8cee0478..ae8924b08 100644 --- a/tests/unit/test_notification.py +++ b/tests/unit/test_notification.py @@ -456,51 +456,55 @@ def test_reload_hit_w_explicit_w_user_project(self): ) def test_delete_wo_notification_id(self): - client = self._make_client() + client = mock.Mock(spec=["_delete_resource", "project"]) + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) with self.assertRaises(ValueError): notification.delete() - def test_delete_miss(self): + client._delete_resource.assert_not_called() + + def test_delete_miss_w_defaults(self): from google.cloud.exceptions import NotFound - client = self._make_client() + client = mock.Mock(spec=["_delete_resource", "project"]) + client._delete_resource.side_effect = NotFound("testing") + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.side_effect = NotFound("testing") with self.assertRaises(NotFound): - notification.delete(timeout=42) + notification.delete() - api_request.assert_called_once_with( - method="DELETE", - path=self.NOTIFICATION_PATH, + client._delete_resource.assert_called_once_with( + self.NOTIFICATION_PATH, query_params={}, - timeout=42, + timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, ) - def test_delete_hit(self): - USER_PROJECT = "user-project-123" - client = self._make_client() - bucket = self._make_bucket(client, user_project=USER_PROJECT) + def test_delete_hit_w_explicit_client_timeout_retry(self): + user_project = "user-project-123" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket_client = mock.Mock(spec=["project"]) + bucket_client.project = self.BUCKET_PROJECT + bucket = self._make_bucket(bucket_client, user_project=user_project) notification = self._make_one(bucket, self.TOPIC_NAME) notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.return_value = None + timeout = 42 + retry = mock.Mock(spec=[]) - notification.delete(client=client) + notification.delete(client=client, timeout=timeout, retry=retry) - api_request.assert_called_once_with( - method="DELETE", - path=self.NOTIFICATION_PATH, - query_params={"userProject": USER_PROJECT}, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + client._delete_resource.assert_called_once_with( + self.NOTIFICATION_PATH, + query_params={"userProject": user_project}, + timeout=timeout, + retry=retry, ) From ef764acb55c077e42e7f46276e23841cd06930fb Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 8 Jun 2021 12:49:19 -0400 Subject: [PATCH 18/30] refactor: add / use 'Client._post_resource' method (#443) Toward #38. --- google/cloud/storage/blob.py | 19 +- google/cloud/storage/bucket.py | 34 +- google/cloud/storage/client.py | 103 ++- google/cloud/storage/notification.py | 25 +- tests/unit/test_blob.py | 1234 +++++++++++++------------- tests/unit/test_bucket.py | 618 ++++++------- tests/unit/test_client.py | 520 +++++------ tests/unit/test_notification.py | 83 +- 8 files changed, 1400 insertions(+), 1236 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 391ced253..73851ea02 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -3164,14 +3164,13 @@ def compose( "sourceObjects": source_objects, "destination": self._properties.copy(), } - api_response = client._connection.api_request( - method="POST", - path=self.path + "/compose", + api_response = client._post_resource( + "{}/compose".format(self.path), + request, query_params=query_params, - data=request, - _target_object=self, timeout=timeout, retry=retry, + _target_object=self, ) self._set_properties(api_response) @@ -3315,15 +3314,15 @@ def rewrite( if_source_metageneration_not_match=if_source_metageneration_not_match, ) - api_response = client._connection.api_request( - method="POST", - path=source.path + "/rewriteTo" + self.path, + path = "{}/rewriteTo{}".format(source.path, self.path) + api_response = client._post_resource( + path, + self._properties, query_params=query_params, - data=self._properties, headers=headers, - _target_object=self, timeout=timeout, retry=retry, + _target_object=self, ) rewritten = int(api_response["totalBytesRewritten"]) size = int(api_response["objectSize"]) diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index c019b2f12..7703dc234 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -1986,13 +1986,13 @@ def copy_blob( new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + "/copyTo" + new_blob.path - copy_result = client._connection.api_request( - method="POST", - path=api_path, + copy_result = client._post_resource( + api_path, + None, query_params=query_params, - _target_object=new_blob, timeout=timeout, retry=retry, + _target_object=new_blob, ) if not preserve_acl: @@ -2006,7 +2006,6 @@ def rename_blob( blob, new_name, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, @@ -2015,6 +2014,7 @@ def rename_blob( if_source_generation_not_match=None, if_source_metageneration_match=None, if_source_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Rename the given blob using copy and delete operations. @@ -2044,14 +2044,6 @@ def rename_blob( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. The timeout applies to each individual - request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_generation_match: long :param if_generation_match: (Optional) Makes the operation conditional on whether the destination @@ -2113,6 +2105,14 @@ def rename_blob( does not match the given value. Also used in the delete request. + :type timeout: float or tuple + :param timeout: (Optional) The amount of time, in seconds, to wait + for the server response. The timeout applies to each individual + request. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: (Optional) How to retry the RPC. A None value will disable retries. A google.api_core.retry.Retry value will enable retries, and the object will @@ -3311,13 +3311,13 @@ def lock_retention_policy( query_params["userProject"] = self.user_project path = "/b/{}/lockRetentionPolicy".format(self.name) - api_response = client._connection.api_request( - method="POST", - path=path, + api_response = client._post_resource( + path, + None, query_params=query_params, - _target_object=self, timeout=timeout, retry=retry, + _target_object=self, ) self._set_properties(api_response) diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index effcddddd..57c5b4103 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -528,6 +528,77 @@ def _put_resource( _target_object=_target_object, ) + def _post_resource( + self, + path, + data, + query_params=None, + headers=None, + timeout=_DEFAULT_TIMEOUT, + retry=None, + _target_object=None, + ): + """Helper for bucket / blob methods making API 'POST' calls. + + Args: + path str: + The path of the resource to which to post. + + data dict: + The data to be posted. + + query_params Optional[dict]: + HTTP query parameters to be passed + + headers Optional[dict]: + HTTP headers to be passed + + timeout (Optional[Union[float, Tuple[float, float]]]): + The amount of time, in seconds, to wait for the server response. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + retry (Optional[Union[google.api_core.retry.Retry, google.cloud.storage.retry.ConditionalRetryPolicy]]): + How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. + + _target_object (Union[ \ + :class:`~google.cloud.storage.bucket.Bucket`, \ + :class:`~google.cloud.storage.bucket.blob`, \ + ]): + Object to which future data is to be applied -- only relevant + in the context of a batch. + + Returns: + dict + The JSON resource returned from the post. + + Raises: + google.cloud.exceptions.NotFound + If the bucket is not found. + """ + return self._connection.api_request( + method="POST", + path=path, + data=data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=_target_object, + ) + def _delete_resource( self, path, @@ -875,14 +946,13 @@ def create_bucket( if location is not None: properties["location"] = location - api_response = self._connection.api_request( - method="POST", - path="/b", + api_response = self._post_resource( + "/b", + properties, query_params=query_params, - data=properties, - _target_object=bucket, timeout=timeout, retry=retry, + _target_object=bucket, ) bucket._set_properties(api_response) @@ -1278,6 +1348,7 @@ def create_hmac_key( project_id=None, user_project=None, timeout=_DEFAULT_TIMEOUT, + retry=None, ): """Create an HMAC key for a service account. @@ -1298,6 +1369,20 @@ def create_hmac_key( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. + :rtype: Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str] :returns: metadata for the created key, plus the bytes of the key's secret, which is an 40-character base64-encoded string. @@ -1311,12 +1396,8 @@ def create_hmac_key( if user_project is not None: qs_params["userProject"] = user_project - api_response = self._connection.api_request( - method="POST", - path=path, - query_params=qs_params, - timeout=timeout, - retry=None, + api_response = self._post_resource( + path, None, query_params=qs_params, timeout=timeout, retry=retry, ) metadata = HMACKeyMetadata(self) metadata._properties = api_response["metadata"] diff --git a/google/cloud/storage/notification.py b/google/cloud/storage/notification.py index 2f5661fce..e86859466 100644 --- a/google/cloud/storage/notification.py +++ b/google/cloud/storage/notification.py @@ -233,7 +233,7 @@ def _set_properties(self, response): self._properties.clear() self._properties.update(response) - def create(self, client=None, timeout=_DEFAULT_TIMEOUT): + def create(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=None): """API wrapper: create the notification. See: @@ -251,6 +251,20 @@ def create(self, client=None, timeout=_DEFAULT_TIMEOUT): Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. """ if self.notification_id is not None: raise ValueError( @@ -266,13 +280,8 @@ def create(self, client=None, timeout=_DEFAULT_TIMEOUT): path = "/b/{}/notificationConfigs".format(self.bucket.name) properties = self._properties.copy() properties["topic"] = _TOPIC_REF_FMT.format(self.topic_project, self.topic_name) - self._properties = client._connection.api_request( - method="POST", - path=path, - query_params=query_params, - data=properties, - timeout=timeout, - retry=None, + self._properties = client._post_resource( + path, properties, query_params=query_params, timeout=timeout, retry=retry, ) def exists(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 548ab8619..071033a45 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -3540,744 +3540,805 @@ def test_make_private_w_timeout_w_retry(self): ) def test_compose_wo_content_type_set(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - RESOURCE = {} - after = ({"status": http_client.OK}, RESOURCE) - connection = _Connection(after) - client = _Client(connection) + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) # no destination.content_type set destination.compose(sources=[source_1, source_2]) self.assertIsNone(destination.content_type) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "POST", - "path": "/b/name/o/%s/compose" % DESTINATION, - "query_params": {}, - "data": { - "sourceObjects": [{"name": source_1.name}, {"name": source_2.name}], - "destination": {}, - }, - "_target_object": destination, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, - }, + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [{"name": source_1_name}, {"name": source_2_name}], + "destination": {}, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, ) - def test_compose_minimal_w_user_project(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - RESOURCE = {"etag": "DEADBEEF"} - USER_PROJECT = "user-project-123" - after = ({"status": http_client.OK}, RESOURCE) - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) + def test_compose_minimal_w_user_project_w_timeout(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + api_response = {"etag": "DEADBEEF"} + user_project = "user-project-123" + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = _Bucket(client=client, user_project=user_project) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) destination.content_type = "text/plain" + timeout = 42 - destination.compose(sources=[source_1, source_2], timeout=42) + destination.compose(sources=[source_1, source_2], timeout=timeout) self.assertEqual(destination.etag, "DEADBEEF") - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "POST", - "path": "/b/name/o/%s/compose" % DESTINATION, - "query_params": {"userProject": USER_PROJECT}, - "data": { - "sourceObjects": [{"name": source_1.name}, {"name": source_2.name}], - "destination": {"contentType": "text/plain"}, - }, - "_target_object": destination, - "timeout": 42, - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, - }, + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [{"name": source_1_name}, {"name": source_2_name}], + "destination": {"contentType": "text/plain"}, + } + expected_query_params = {"userProject": user_project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, ) - def test_compose_w_additional_property_changes(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - RESOURCE = {"etag": "DEADBEEF"} - after = ({"status": http_client.OK}, RESOURCE) - connection = _Connection(after) - client = _Client(connection) + def test_compose_w_additional_property_changes_w_retry(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + api_response = {"etag": "DEADBEEF"} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) destination.content_type = "text/plain" destination.content_language = "en-US" destination.metadata = {"my-key": "my-value"} + retry = mock.Mock(spec=[]) - destination.compose(sources=[source_1, source_2]) + destination.compose(sources=[source_1, source_2], retry=retry) self.assertEqual(destination.etag, "DEADBEEF") - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "POST", - "path": "/b/name/o/%s/compose" % DESTINATION, - "query_params": {}, - "data": { - "sourceObjects": [{"name": source_1.name}, {"name": source_2.name}], - "destination": { - "contentType": "text/plain", - "contentLanguage": "en-US", - "metadata": {"my-key": "my-value"}, - }, - }, - "_target_object": destination, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [{"name": source_1_name}, {"name": source_2_name}], + "destination": { + "contentType": "text/plain", + "contentLanguage": "en-US", + "metadata": {"my-key": "my-value"}, }, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=destination, ) def test_compose_w_generation_match(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - RESOURCE = {} - GENERATION_NUMBERS = [6, 9] - METAGENERATION_NUMBERS = [7, 1] - - after = ({"status": http_client.OK}, RESOURCE) - connection = _Connection(after) - client = _Client(connection) + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + api_response = {} + generation_numbers = [6, 9] + metageneration_numbers = [7, 1] + + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) destination.compose( sources=[source_1, source_2], - if_generation_match=GENERATION_NUMBERS, - if_metageneration_match=METAGENERATION_NUMBERS, + if_generation_match=generation_numbers, + if_metageneration_match=metageneration_numbers, ) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "POST", - "path": "/b/name/o/%s/compose" % DESTINATION, - "query_params": {}, - "data": { - "sourceObjects": [ - { - "name": source_1.name, - "objectPreconditions": { - "ifGenerationMatch": GENERATION_NUMBERS[0], - "ifMetagenerationMatch": METAGENERATION_NUMBERS[0], - }, - }, - { - "name": source_2.name, - "objectPreconditions": { - "ifGenerationMatch": GENERATION_NUMBERS[1], - "ifMetagenerationMatch": METAGENERATION_NUMBERS[1], - }, - }, - ], - "destination": {}, + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + { + "name": source_1_name, + "objectPreconditions": { + "ifGenerationMatch": generation_numbers[0], + "ifMetagenerationMatch": metageneration_numbers[0], + }, }, - "_target_object": destination, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, - }, + { + "name": source_2_name, + "objectPreconditions": { + "ifGenerationMatch": generation_numbers[1], + "ifMetagenerationMatch": metageneration_numbers[1], + }, + }, + ], + "destination": {}, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, ) def test_compose_w_generation_match_bad_length(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - GENERATION_NUMBERS = [6] - METAGENERATION_NUMBERS = [7] - - after = ({"status": http_client.OK}, {}) - connection = _Connection(after) - client = _Client(connection) + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + generation_numbers = [6] + client = mock.Mock(spec=["_post_resource"]) bucket = _Bucket(client=client) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) with self.assertRaises(ValueError): destination.compose( - sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS + sources=[source_1, source_2], if_generation_match=generation_numbers ) + + client._post_resource.assert_not_called() + + def test_compose_w_metageneration_match_bad_length(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + metageneration_numbers = [7] + client = mock.Mock(spec=["_post_resource"]) + bucket = _Bucket(client=client) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) + with self.assertRaises(ValueError): destination.compose( sources=[source_1, source_2], - if_metageneration_match=METAGENERATION_NUMBERS, + if_metageneration_match=metageneration_numbers, ) - def test_compose_w_generation_match_nones(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - GENERATION_NUMBERS = [6, None] + client._post_resource.assert_not_called() - after = ({"status": http_client.OK}, {}) - connection = _Connection(after) - client = _Client(connection) + def test_compose_w_generation_match_nones(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + generation_numbers = [6, None] + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) destination.compose( - sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS + sources=[source_1, source_2], if_generation_match=generation_numbers ) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "POST", - "path": "/b/name/o/%s/compose" % DESTINATION, - "query_params": {}, - "data": { - "sourceObjects": [ - { - "name": source_1.name, - "objectPreconditions": { - "ifGenerationMatch": GENERATION_NUMBERS[0] - }, - }, - {"name": source_2.name}, - ], - "destination": {}, + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + { + "name": source_1_name, + "objectPreconditions": { + "ifGenerationMatch": generation_numbers[0], + }, }, - "_target_object": destination, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, - }, + {"name": source_2_name}, + ], + "destination": {}, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, ) - def test_rewrite_response_without_resource(self): - SOURCE_BLOB = "source" - DEST_BLOB = "dest" - DEST_BUCKET = "other-bucket" - TOKEN = "TOKEN" - RESPONSE = { - "totalBytesRewritten": 33, - "objectSize": 42, + def test_rewrite_w_response_wo_resource(self): + source_name = "source" + dest_name = "dest" + other_bucket_name = "other-bucket" + bytes_rewritten = 33 + object_size = 52 + rewrite_token = "TOKEN" + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": False, - "rewriteToken": TOKEN, + "rewriteToken": rewrite_token, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response source_bucket = _Bucket(client=client) - source_blob = self._make_one(SOURCE_BLOB, bucket=source_bucket) - dest_bucket = _Bucket(client=client, name=DEST_BUCKET) - dest_blob = self._make_one(DEST_BLOB, bucket=dest_bucket) + source_blob = self._make_one(source_name, bucket=source_bucket) + dest_bucket = _Bucket(client=client, name=other_bucket_name) + dest_blob = self._make_one(dest_name, bucket=dest_bucket) token, rewritten, size = dest_blob.rewrite(source_blob) - self.assertEqual(token, TOKEN) - self.assertEqual(rewritten, 33) - self.assertEqual(size, 42) - - def test_rewrite_w_generations(self): - SOURCE_BLOB = "source" - SOURCE_GENERATION = 42 - DEST_BLOB = "dest" - DEST_BUCKET = "other-bucket" - DEST_GENERATION = 43 - TOKEN = "TOKEN" - RESPONSE = { - "totalBytesRewritten": 33, - "objectSize": 42, + self.assertEqual(token, rewrite_token) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) + + expected_path = "/b/%s/o/%s/rewriteTo/b/%s/o/%s" % ( + source_bucket.name, + source_name, + other_bucket_name, + dest_name, + ) + expected_data = {} + expected_query_params = {} + expected_headers = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=dest_blob, + ) + + def test_rewrite_w_generations_w_timeout(self): + source_name = "source" + source_generation = 22 + dest_name = "dest" + other_bucket_name = "other-bucket" + dest_generation = 23 + bytes_rewritten = 33 + object_size = 52 + rewrite_token = "TOKEN" + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": False, - "rewriteToken": TOKEN, + "rewriteToken": rewrite_token, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response source_bucket = _Bucket(client=client) source_blob = self._make_one( - SOURCE_BLOB, bucket=source_bucket, generation=SOURCE_GENERATION + source_name, bucket=source_bucket, generation=source_generation ) - dest_bucket = _Bucket(client=client, name=DEST_BUCKET) + dest_bucket = _Bucket(client=client, name=other_bucket_name) dest_blob = self._make_one( - DEST_BLOB, bucket=dest_bucket, generation=DEST_GENERATION + dest_name, bucket=dest_bucket, generation=dest_generation ) + timeout = 42 - token, rewritten, size = dest_blob.rewrite(source_blob, timeout=42) + token, rewritten, size = dest_blob.rewrite(source_blob, timeout=timeout) - self.assertEqual(token, TOKEN) - self.assertEqual(rewritten, 33) - self.assertEqual(size, 42) + self.assertEqual(token, rewrite_token) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual( - kw["path"], - "/b/%s/o/%s/rewriteTo/b/%s/o/%s" - % ( - (source_bucket.name, source_blob.name, dest_bucket.name, dest_blob.name) - ), - ) - self.assertEqual(kw["query_params"], {"sourceGeneration": SOURCE_GENERATION}) - self.assertEqual(kw["timeout"], 42) - - def test_rewrite_w_generation_match(self): - SOURCE_BLOB = "source" - SOURCE_GENERATION_NUMBER = 42 - DEST_BLOB = "dest" - DEST_BUCKET = "other-bucket" - DEST_GENERATION_NUMBER = 16 - TOKEN = "TOKEN" - RESPONSE = { - "totalBytesRewritten": 33, - "objectSize": 42, + expected_path = "/b/%s/o/%s/rewriteTo/b/%s/o/%s" % ( + source_bucket.name, + source_name, + other_bucket_name, + dest_name, + ) + expected_data = {"generation": dest_generation} + expected_query_params = {"sourceGeneration": source_generation} + expected_headers = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=timeout, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=dest_blob, + ) + + def test_rewrite_w_generation_match_w_retry(self): + source_name = "source" + source_generation = 42 + dest_name = "dest" + other_bucket_name = "other-bucket" + dest_generation = 16 + bytes_rewritten = 33 + object_size = 52 + rewrite_token = "TOKEN" + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": False, - "rewriteToken": TOKEN, + "rewriteToken": rewrite_token, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response source_bucket = _Bucket(client=client) source_blob = self._make_one( - SOURCE_BLOB, bucket=source_bucket, generation=SOURCE_GENERATION_NUMBER + source_name, bucket=source_bucket, generation=source_generation ) - dest_bucket = _Bucket(client=client, name=DEST_BUCKET) + dest_bucket = _Bucket(client=client, name=other_bucket_name) dest_blob = self._make_one( - DEST_BLOB, bucket=dest_bucket, generation=DEST_GENERATION_NUMBER + dest_name, bucket=dest_bucket, generation=dest_generation ) + retry = mock.Mock(spec=[]) + token, rewritten, size = dest_blob.rewrite( source_blob, - timeout=42, if_generation_match=dest_blob.generation, if_source_generation_match=source_blob.generation, + retry=retry, ) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual( - kw["path"], - "/b/%s/o/%s/rewriteTo/b/%s/o/%s" - % ( - (source_bucket.name, source_blob.name, dest_bucket.name, dest_blob.name) - ), + + self.assertEqual(token, rewrite_token) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) + + expected_path = "/b/%s/o/%s/rewriteTo/b/%s/o/%s" % ( + source_bucket.name, + source_name, + other_bucket_name, + dest_name, ) - self.assertEqual( - kw["query_params"], - { - "ifSourceGenerationMatch": SOURCE_GENERATION_NUMBER, - "ifGenerationMatch": DEST_GENERATION_NUMBER, - "sourceGeneration": SOURCE_GENERATION_NUMBER, - }, + expected_data = {"generation": dest_generation} + expected_query_params = { + "ifSourceGenerationMatch": source_generation, + "ifGenerationMatch": dest_generation, + "sourceGeneration": source_generation, + } + expected_headers = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=dest_blob, ) - self.assertEqual(kw["timeout"], 42) def test_rewrite_other_bucket_other_name_no_encryption_partial(self): - SOURCE_BLOB = "source" - DEST_BLOB = "dest" - DEST_BUCKET = "other-bucket" - TOKEN = "TOKEN" - RESPONSE = { - "totalBytesRewritten": 33, - "objectSize": 42, + source_name = "source" + dest_name = "dest" + other_bucket_name = "other-bucket" + bytes_rewritten = 33 + object_size = 52 + rewrite_token = "TOKEN" + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": False, - "rewriteToken": TOKEN, - "resource": {"etag": "DEADBEEF"}, + "rewriteToken": rewrite_token, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response source_bucket = _Bucket(client=client) - source_blob = self._make_one(SOURCE_BLOB, bucket=source_bucket) - dest_bucket = _Bucket(client=client, name=DEST_BUCKET) - dest_blob = self._make_one(DEST_BLOB, bucket=dest_bucket) + source_blob = self._make_one(source_name, bucket=source_bucket) + dest_bucket = _Bucket(client=client, name=other_bucket_name) + dest_blob = self._make_one(dest_name, bucket=dest_bucket) token, rewritten, size = dest_blob.rewrite(source_blob) - self.assertEqual(token, TOKEN) - self.assertEqual(rewritten, 33) - self.assertEqual(size, 42) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/%s/o/%s" % ( - SOURCE_BLOB, - DEST_BUCKET, - DEST_BLOB, - ) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual(kw[0]["query_params"], {}) - SENT = {} - self.assertEqual(kw[0]["data"], SENT) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - self.assertNotIn("X-Goog-Copy-Source-Encryption-Algorithm", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key-Sha256", headers) - self.assertNotIn("X-Goog-Encryption-Algorithm", headers) - self.assertNotIn("X-Goog-Encryption-Key", headers) - self.assertNotIn("X-Goog-Encryption-Key-Sha256", headers) + self.assertEqual(token, rewrite_token) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) + + expected_path = "/b/name/o/%s/rewriteTo/b/%s/o/%s" % ( + source_name, + other_bucket_name, + dest_name, + ) + expected_query_params = {} + expected_data = {} + expected_headers = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=dest_blob, + ) def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self): - KEY = b"01234567890123456789012345678901" # 32 bytes - KEY_B64 = base64.b64encode(KEY).rstrip().decode("ascii") - KEY_HASH = hashlib.sha256(KEY).digest() - KEY_HASH_B64 = base64.b64encode(KEY_HASH).rstrip().decode("ascii") - BLOB_NAME = "blob" - USER_PROJECT = "user-project-123" - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, + blob_name = "blob" + user_project = "user-project-123" + key = b"01234567890123456789012345678901" # 32 bytes + key_b64 = base64.b64encode(key).rstrip().decode("ascii") + key_hash = hashlib.sha256(key).digest() + key_hash_b64 = base64.b64encode(key_hash).rstrip().decode("ascii") + bytes_rewritten = object_size = 52 + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": True, "resource": {"etag": "DEADBEEF"}, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - plain = self._make_one(BLOB_NAME, bucket=bucket) - encrypted = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=KEY) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = _Bucket(client=client, user_project=user_project) + plain = self._make_one(blob_name, bucket=bucket) + encrypted = self._make_one(blob_name, bucket=bucket, encryption_key=key) token, rewritten, size = encrypted.rewrite(plain) self.assertIsNone(token) - self.assertEqual(rewritten, 42) - self.assertEqual(size, 42) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - SENT = {} - self.assertEqual(kw[0]["data"], SENT) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - self.assertNotIn("X-Goog-Copy-Source-Encryption-Algorithm", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key-Sha256", headers) - self.assertEqual(headers["X-Goog-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Encryption-Key"], KEY_B64) - self.assertEqual(headers["X-Goog-Encryption-Key-Sha256"], KEY_HASH_B64) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) + + expected_path = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (blob_name, blob_name) + expected_query_params = {"userProject": user_project} + expected_data = {} + expected_headers = { + "X-Goog-Encryption-Algorithm": "AES256", + "X-Goog-Encryption-Key": key_b64, + "X-Goog-Encryption-Key-Sha256": key_hash_b64, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=encrypted, + ) def test_rewrite_same_name_no_key_new_key_w_token(self): - SOURCE_KEY = b"01234567890123456789012345678901" # 32 bytes - SOURCE_KEY_B64 = base64.b64encode(SOURCE_KEY).rstrip().decode("ascii") - SOURCE_KEY_HASH = hashlib.sha256(SOURCE_KEY).digest() - SOURCE_KEY_HASH_B64 = base64.b64encode(SOURCE_KEY_HASH).rstrip().decode("ascii") - DEST_KEY = b"90123456789012345678901234567890" # 32 bytes - DEST_KEY_B64 = base64.b64encode(DEST_KEY).rstrip().decode("ascii") - DEST_KEY_HASH = hashlib.sha256(DEST_KEY).digest() - DEST_KEY_HASH_B64 = base64.b64encode(DEST_KEY_HASH).rstrip().decode("ascii") - BLOB_NAME = "blob" - TOKEN = "TOKEN" - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, + blob_name = "blob" + source_key = b"01234567890123456789012345678901" # 32 bytes + source_key_b64 = base64.b64encode(source_key).rstrip().decode("ascii") + source_key_hash = hashlib.sha256(source_key).digest() + source_key_hash_b64 = base64.b64encode(source_key_hash).rstrip().decode("ascii") + dest_key = b"90123456789012345678901234567890" # 32 bytes + dest_key_b64 = base64.b64encode(dest_key).rstrip().decode("ascii") + dest_key_hash = hashlib.sha256(dest_key).digest() + dest_key_hash_b64 = base64.b64encode(dest_key_hash).rstrip().decode("ascii") + previous_token = "TOKEN" + bytes_rewritten = object_size = 52 + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": True, "resource": {"etag": "DEADBEEF"}, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) - source = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=SOURCE_KEY) - dest = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=DEST_KEY) + source = self._make_one(blob_name, bucket=bucket, encryption_key=source_key) + dest = self._make_one(blob_name, bucket=bucket, encryption_key=dest_key) - token, rewritten, size = dest.rewrite(source, token=TOKEN) + token, rewritten, size = dest.rewrite(source, token=previous_token) self.assertIsNone(token) - self.assertEqual(rewritten, 42) - self.assertEqual(size, 42) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual(kw[0]["query_params"], {"rewriteToken": TOKEN}) - SENT = {} - self.assertEqual(kw[0]["data"], SENT) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Key"], SOURCE_KEY_B64) - self.assertEqual( - headers["X-Goog-Copy-Source-Encryption-Key-Sha256"], SOURCE_KEY_HASH_B64 + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) + + expected_path = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (blob_name, blob_name) + expected_data = {} + expected_query_params = {"rewriteToken": previous_token} + expected_headers = { + "X-Goog-Copy-Source-Encryption-Algorithm": "AES256", + "X-Goog-Copy-Source-Encryption-Key": source_key_b64, + "X-Goog-Copy-Source-Encryption-Key-Sha256": source_key_hash_b64, + "X-Goog-Encryption-Algorithm": "AES256", + "X-Goog-Encryption-Key": dest_key_b64, + "X-Goog-Encryption-Key-Sha256": dest_key_hash_b64, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=dest, ) - self.assertEqual(headers["X-Goog-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Encryption-Key"], DEST_KEY_B64) - self.assertEqual(headers["X-Goog-Encryption-Key-Sha256"], DEST_KEY_HASH_B64) def test_rewrite_same_name_w_old_key_new_kms_key(self): - SOURCE_KEY = b"01234567890123456789012345678901" # 32 bytes - SOURCE_KEY_B64 = base64.b64encode(SOURCE_KEY).rstrip().decode("ascii") - SOURCE_KEY_HASH = hashlib.sha256(SOURCE_KEY).digest() - SOURCE_KEY_HASH_B64 = base64.b64encode(SOURCE_KEY_HASH).rstrip().decode("ascii") - DEST_KMS_RESOURCE = ( + blob_name = "blob" + source_key = b"01234567890123456789012345678901" # 32 bytes + source_key_b64 = base64.b64encode(source_key).rstrip().decode("ascii") + source_key_hash = hashlib.sha256(source_key).digest() + source_key_hash_b64 = base64.b64encode(source_key_hash).rstrip().decode("ascii") + dest_kms_resource = ( "projects/test-project-123/" "locations/us/" "keyRings/test-ring/" "cryptoKeys/test-key" ) - BLOB_NAME = "blob" - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, + bytes_rewritten = object_size = 42 + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": True, "resource": {"etag": "DEADBEEF"}, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) - source = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=SOURCE_KEY) - dest = self._make_one(BLOB_NAME, bucket=bucket, kms_key_name=DEST_KMS_RESOURCE) + source = self._make_one(blob_name, bucket=bucket, encryption_key=source_key) + dest = self._make_one(blob_name, bucket=bucket, kms_key_name=dest_kms_resource) token, rewritten, size = dest.rewrite(source) self.assertIsNone(token) - self.assertEqual(rewritten, 42) - self.assertEqual(size, 42) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual( - kw[0]["query_params"], {"destinationKmsKeyName": DEST_KMS_RESOURCE} - ) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - SENT = {"kmsKeyName": DEST_KMS_RESOURCE} - self.assertEqual(kw[0]["data"], SENT) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Key"], SOURCE_KEY_B64) - self.assertEqual( - headers["X-Goog-Copy-Source-Encryption-Key-Sha256"], SOURCE_KEY_HASH_B64 + expected_path = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (blob_name, blob_name) + expected_data = {"kmsKeyName": dest_kms_resource} + expected_query_params = {"destinationKmsKeyName": dest_kms_resource} + expected_headers = { + "X-Goog-Copy-Source-Encryption-Algorithm": "AES256", + "X-Goog-Copy-Source-Encryption-Key": source_key_b64, + "X-Goog-Copy-Source-Encryption-Key-Sha256": source_key_hash_b64, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=dest, ) def test_update_storage_class_invalid(self): - BLOB_NAME = "blob-name" + blob_name = "blob-name" bucket = _Bucket() - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) + blob.rewrite = mock.Mock(spec=[]) + with self.assertRaises(ValueError): blob.update_storage_class(u"BOGUS") - def test_update_storage_class_large_file(self): - BLOB_NAME = "blob-name" - STORAGE_CLASS = u"NEARLINE" - TOKEN = "TOKEN" - INCOMPLETE_RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 84, - "done": False, - "rewriteToken": TOKEN, - "resource": {"storageClass": STORAGE_CLASS}, - } - COMPLETE_RESPONSE = { - "totalBytesRewritten": 84, - "objectSize": 84, - "done": True, - "resource": {"storageClass": STORAGE_CLASS}, - } - response_1 = ({"status": http_client.OK}, INCOMPLETE_RESPONSE) - response_2 = ({"status": http_client.OK}, COMPLETE_RESPONSE) - connection = _Connection(response_1, response_2) - client = _Client(connection) + blob.rewrite.assert_not_called() + + def _update_storage_class_multi_pass_helper(self, **kw): + blob_name = "blob-name" + storage_class = u"NEARLINE" + rewrite_token = "TOKEN" + bytes_rewritten = 42 + object_size = 84 + client = mock.Mock(spec=[]) bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) + blob.rewrite = mock.Mock(spec=[]) + blob.rewrite.side_effect = [ + (rewrite_token, bytes_rewritten, object_size), + (None, object_size, object_size), + ] - blob.update_storage_class("NEARLINE") + expected_i_g_m = kw.get("if_generation_match") + expected_i_g_n_m = kw.get("if_generation_not_match") + expected_i_m_m = kw.get("if_metageneration_match") + expected_i_m_n_m = kw.get("if_metageneration_not_match") + expected_i_s_g_m = kw.get("if_source_generation_match") + expected_i_s_g_n_m = kw.get("if_source_generation_not_match") + expected_i_s_m_m = kw.get("if_source_metageneration_match") + expected_i_s_m_n_m = kw.get("if_source_metageneration_not_match") + expected_timeout = kw.get("timeout", self._get_default_timeout()) + expected_retry = kw.get("retry", DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(blob.storage_class, "NEARLINE") + blob.update_storage_class(storage_class, **kw) - def test_update_storage_class_with_custom_timeout(self): - BLOB_NAME = "blob-name" - STORAGE_CLASS = u"NEARLINE" - TOKEN = "TOKEN" - INCOMPLETE_RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 84, - "done": False, - "rewriteToken": TOKEN, - "resource": {"storageClass": STORAGE_CLASS}, - } - COMPLETE_RESPONSE = { - "totalBytesRewritten": 84, - "objectSize": 84, - "done": True, - "resource": {"storageClass": STORAGE_CLASS}, - } - response_1 = ({"status": http_client.OK}, INCOMPLETE_RESPONSE) - response_2 = ({"status": http_client.OK}, COMPLETE_RESPONSE) - connection = _Connection(response_1, response_2) - client = _Client(connection) - bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + self.assertEqual(blob.storage_class, storage_class) - blob.update_storage_class("NEARLINE", timeout=9.58) + call1 = mock.call( + blob, + if_generation_match=expected_i_g_m, + if_generation_not_match=expected_i_g_n_m, + if_metageneration_match=expected_i_m_m, + if_metageneration_not_match=expected_i_m_n_m, + if_source_generation_match=expected_i_s_g_m, + if_source_generation_not_match=expected_i_s_g_n_m, + if_source_metageneration_match=expected_i_s_m_m, + if_source_metageneration_not_match=expected_i_s_m_n_m, + timeout=expected_timeout, + retry=expected_retry, + ) + call2 = mock.call( + blob, + token=rewrite_token, + if_generation_match=expected_i_g_m, + if_generation_not_match=expected_i_g_n_m, + if_metageneration_match=expected_i_m_m, + if_metageneration_not_match=expected_i_m_n_m, + if_source_generation_match=expected_i_s_g_m, + if_source_generation_not_match=expected_i_s_g_n_m, + if_source_metageneration_match=expected_i_s_m_m, + if_source_metageneration_not_match=expected_i_s_m_n_m, + timeout=expected_timeout, + retry=expected_retry, + ) + blob.rewrite.assert_has_calls([call1, call2]) - self.assertEqual(blob.storage_class, "NEARLINE") + def test_update_storage_class_multi_pass_w_defaults(self): + self._update_storage_class_multi_pass_helper() - kw = connection._requested - self.assertEqual(len(kw), 2) + def test_update_storage_class_multi_pass_w_i_g_m(self): + generation = 16 + self._update_storage_class_multi_pass_helper(if_generation_match=generation) - for kw_item in kw: - self.assertIn("timeout", kw_item) - self.assertEqual(kw_item["timeout"], 9.58) + def test_update_storage_class_multi_pass_w_i_g_n_m(self): + generation = 16 + self._update_storage_class_multi_pass_helper(if_generation_not_match=generation) - def test_update_storage_class_wo_encryption_key(self): - BLOB_NAME = "blob-name" - STORAGE_CLASS = u"NEARLINE" - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, - "done": True, - "resource": {"storageClass": STORAGE_CLASS}, - } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + def test_update_storage_class_multi_pass_w_i_m_m(self): + metageneration = 16 + self._update_storage_class_multi_pass_helper( + if_metageneration_match=metageneration, + ) + + def test_update_storage_class_multi_pass_w_i_m_n_m(self): + metageneration = 16 + self._update_storage_class_multi_pass_helper( + if_metageneration_not_match=metageneration, + ) + + def test_update_storage_class_multi_pass_w_i_s_g_m(self): + generation = 16 + self._update_storage_class_multi_pass_helper( + if_source_generation_match=generation + ) + + def test_update_storage_class_multi_pass_w_i_s_g_n_m(self): + generation = 16 + self._update_storage_class_multi_pass_helper( + if_source_generation_not_match=generation + ) + + def test_update_storage_class_multi_pass_w_i_s_m_m(self): + metageneration = 16 + self._update_storage_class_multi_pass_helper( + if_source_metageneration_match=metageneration, + ) + + def test_update_storage_class_multi_pass_w_i_s_m_n_m(self): + metageneration = 16 + self._update_storage_class_multi_pass_helper( + if_source_metageneration_not_match=metageneration, + ) + + def test_update_storage_class_multi_pass_w_timeout(self): + timeout = 42 + self._update_storage_class_multi_pass_helper(timeout=timeout) + + def test_update_storage_class_multi_pass_w_retry(self): + retry = mock.Mock(spec=[]) + self._update_storage_class_multi_pass_helper(retry=retry) + + def _update_storage_class_single_pass_helper(self, **kw): + blob_name = "blob-name" + storage_class = u"NEARLINE" + object_size = 84 + client = mock.Mock(spec=[]) bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) + blob.rewrite = mock.Mock(spec=[]) + blob.rewrite.return_value = (None, object_size, object_size) + + expected_i_g_m = kw.get("if_generation_match") + expected_i_g_n_m = kw.get("if_generation_not_match") + expected_i_m_m = kw.get("if_metageneration_match") + expected_i_m_n_m = kw.get("if_metageneration_not_match") + expected_i_s_g_m = kw.get("if_source_generation_match") + expected_i_s_g_n_m = kw.get("if_source_generation_not_match") + expected_i_s_m_m = kw.get("if_source_metageneration_match") + expected_i_s_m_n_m = kw.get("if_source_metageneration_not_match") + expected_timeout = kw.get("timeout", self._get_default_timeout()) + expected_retry = kw.get("retry", DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + + blob.update_storage_class(storage_class, **kw) - blob.update_storage_class("NEARLINE") + self.assertEqual(blob.storage_class, storage_class) - self.assertEqual(blob.storage_class, "NEARLINE") + blob.rewrite.assert_called_once_with( + blob, + if_generation_match=expected_i_g_m, + if_generation_not_match=expected_i_g_n_m, + if_metageneration_match=expected_i_m_m, + if_metageneration_not_match=expected_i_m_n_m, + if_source_generation_match=expected_i_s_g_m, + if_source_generation_not_match=expected_i_s_g_n_m, + if_source_metageneration_match=expected_i_s_m_m, + if_source_metageneration_not_match=expected_i_s_m_n_m, + timeout=expected_timeout, + retry=expected_retry, + ) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual(kw[0]["query_params"], {}) - SENT = {"storageClass": STORAGE_CLASS} - self.assertEqual(kw[0]["data"], SENT) - - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - # Blob has no key, and therefore the relevant headers are not sent. - self.assertNotIn("X-Goog-Copy-Source-Encryption-Algorithm", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key-Sha256", headers) - self.assertNotIn("X-Goog-Encryption-Algorithm", headers) - self.assertNotIn("X-Goog-Encryption-Key", headers) - self.assertNotIn("X-Goog-Encryption-Key-Sha256", headers) - - def test_update_storage_class_w_encryption_key_w_user_project(self): - BLOB_NAME = "blob-name" - BLOB_KEY = b"01234567890123456789012345678901" # 32 bytes - BLOB_KEY_B64 = base64.b64encode(BLOB_KEY).rstrip().decode("ascii") - BLOB_KEY_HASH = hashlib.sha256(BLOB_KEY).digest() - BLOB_KEY_HASH_B64 = base64.b64encode(BLOB_KEY_HASH).rstrip().decode("ascii") - STORAGE_CLASS = u"NEARLINE" - USER_PROJECT = "user-project-123" - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, - "done": True, - "resource": {"storageClass": STORAGE_CLASS}, - } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=BLOB_KEY) + def test_update_storage_class_single_pass_w_defaults(self): + self._update_storage_class_single_pass_helper() - blob.update_storage_class("NEARLINE") + def test_update_storage_class_single_pass_w_i_g_m(self): + generation = 16 + self._update_storage_class_single_pass_helper(if_generation_match=generation) - self.assertEqual(blob.storage_class, "NEARLINE") + def test_update_storage_class_single_pass_w_i_g_n_m(self): + generation = 16 + self._update_storage_class_single_pass_helper( + if_generation_not_match=generation + ) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - SENT = {"storageClass": STORAGE_CLASS} - self.assertEqual(kw[0]["data"], SENT) - - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - # Blob has key, and therefore the relevant headers are sent. - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Key"], BLOB_KEY_B64) - self.assertEqual( - headers["X-Goog-Copy-Source-Encryption-Key-Sha256"], BLOB_KEY_HASH_B64 + def test_update_storage_class_single_pass_w_i_m_m(self): + metageneration = 16 + self._update_storage_class_single_pass_helper( + if_metageneration_match=metageneration, ) - self.assertEqual(headers["X-Goog-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Encryption-Key"], BLOB_KEY_B64) - self.assertEqual(headers["X-Goog-Encryption-Key-Sha256"], BLOB_KEY_HASH_B64) - def test_update_storage_class_w_generation_match(self): - BLOB_NAME = "blob-name" - STORAGE_CLASS = u"NEARLINE" - GENERATION_NUMBER = 6 - SOURCE_GENERATION_NUMBER = 9 - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, - "done": True, - "resource": {"storageClass": STORAGE_CLASS}, - } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) - bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + def test_update_storage_class_single_pass_w_i_m_n_m(self): + metageneration = 16 + self._update_storage_class_single_pass_helper( + if_metageneration_not_match=metageneration, + ) - blob.update_storage_class( - "NEARLINE", - if_generation_match=GENERATION_NUMBER, - if_source_generation_match=SOURCE_GENERATION_NUMBER, + def test_update_storage_class_single_pass_w_i_s_g_m(self): + generation = 16 + self._update_storage_class_single_pass_helper( + if_source_generation_match=generation ) - self.assertEqual(blob.storage_class, "NEARLINE") + def test_update_storage_class_single_pass_w_i_s_g_n_m(self): + generation = 16 + self._update_storage_class_single_pass_helper( + if_source_generation_not_match=generation + ) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual( - kw[0]["query_params"], - { - "ifGenerationMatch": GENERATION_NUMBER, - "ifSourceGenerationMatch": SOURCE_GENERATION_NUMBER, - }, + def test_update_storage_class_single_pass_w_i_s_m_m(self): + metageneration = 16 + self._update_storage_class_single_pass_helper( + if_source_metageneration_match=metageneration, ) - SENT = {"storageClass": STORAGE_CLASS} - self.assertEqual(kw[0]["data"], SENT) + + def test_update_storage_class_single_pass_w_i_s_m_n_m(self): + metageneration = 16 + self._update_storage_class_single_pass_helper( + if_source_metageneration_not_match=metageneration, + ) + + def test_update_storage_class_single_pass_w_timeout(self): + timeout = 42 + self._update_storage_class_single_pass_helper(timeout=timeout) + + def test_update_storage_class_single_pass_w_retry(self): + retry = mock.Mock(spec=[]) + self._update_storage_class_single_pass_helper(retry=retry) def test_cache_control_getter(self): BLOB_NAME = "blob-name" @@ -4956,19 +5017,8 @@ class _Connection(object): USER_AGENT = "testing 1.2.3" credentials = object() - def __init__(self, *responses): - self._responses = responses[:] - self._requested = [] - self._signed = [] - - def _respond(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response - - def api_request(self, **kw): - info, content = self._respond(**kw) - return content + def __init__(self): + pass class _Bucket(object): @@ -5016,10 +5066,6 @@ class _Client(object): def __init__(self, connection): self._base_connection = connection - @property - def _connection(self): - return self._base_connection - @property def _credentials(self): return self._base_connection.credentials diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 410c9d9b6..50fe02c0e 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -24,15 +24,6 @@ from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED -def _make_connection(*responses): - import google.cloud.storage._http - - mock_connection = mock.create_autospec(google.cloud.storage._http.Connection) - mock_connection.user_agent = "testing 1.2.3" - mock_connection.api_request.side_effect = list(responses) - return mock_connection - - def _create_signing_credentials(): import google.auth.credentials @@ -1651,111 +1642,124 @@ def _make_blob(bucket_name, blob_name): return blob def test_copy_blobs_wo_name(self): - SOURCE = "source" - DEST = "dest" - BLOB_NAME = "blob-name" - connection = _Connection({}) - client = _Client(connection) - source = self._make_one(client=client, name=SOURCE) - dest = self._make_one(client=client, name=DEST) - blob = self._make_blob(SOURCE, BLOB_NAME) + source_name = "source" + dest_name = "dest" + blob_name = "blob-name" + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + source = self._make_one(client=client, name=source_name) + dest = self._make_one(client=client, name=dest_name) + blob = self._make_blob(source_name, blob_name) - new_blob = source.copy_blob(blob, dest, timeout=42) + new_blob = source.copy_blob(blob, dest) self.assertIs(new_blob.bucket, dest) - self.assertEqual(new_blob.name, BLOB_NAME) + self.assertEqual(new_blob.name, blob_name) - (kw,) = connection._requested - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - SOURCE, BLOB_NAME, DEST, BLOB_NAME + expected_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + source_name, blob_name, dest_name, blob_name + ) + expected_data = None + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=new_blob, ) - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual(kw["query_params"], {}) - self.assertEqual(kw["timeout"], 42) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - - def test_copy_blobs_source_generation(self): - SOURCE = "source" - DEST = "dest" - BLOB_NAME = "blob-name" - GENERATION = 1512565576797178 - connection = _Connection({}) - client = _Client(connection) - source = self._make_one(client=client, name=SOURCE) - dest = self._make_one(client=client, name=DEST) - blob = self._make_blob(SOURCE, BLOB_NAME) + def test_copy_blob_w_source_generation_w_timeout(self): + source_name = "source" + dest_name = "dest" + blob_name = "blob-name" + generation = 1512565576797178 + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + source = self._make_one(client=client, name=source_name) + dest = self._make_one(client=client, name=dest_name) + blob = self._make_blob(source_name, blob_name) + timeout = 42 - new_blob = source.copy_blob(blob, dest, source_generation=GENERATION) + new_blob = source.copy_blob( + blob, dest, source_generation=generation, timeout=timeout, + ) self.assertIs(new_blob.bucket, dest) - self.assertEqual(new_blob.name, BLOB_NAME) + self.assertEqual(new_blob.name, blob_name) - (kw,) = connection._requested - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - SOURCE, BLOB_NAME, DEST, BLOB_NAME + expected_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + source_name, blob_name, dest_name, blob_name + ) + expected_data = None + expected_query_params = {"sourceGeneration": generation} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=new_blob, ) - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual(kw["query_params"], {"sourceGeneration": GENERATION}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - - def test_copy_blobs_w_generation_match(self): - SOURCE = "source" - DEST = "dest" - BLOB_NAME = "blob-name" - GENERATION_NUMBER = 6 - SOURCE_GENERATION_NUMBER = 9 - connection = _Connection({}) - client = _Client(connection) - source = self._make_one(client=client, name=SOURCE) - dest = self._make_one(client=client, name=DEST) - blob = self._make_blob(SOURCE, BLOB_NAME) + def test_copy_blob_w_generation_match_w_retry(self): + source_name = "source" + dest_name = "dest" + blob_name = "blob-name" + generation_number = 6 + source_generation_number = 9 + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + source = self._make_one(client=client, name=source_name) + dest = self._make_one(client=client, name=dest_name) + blob = self._make_blob(source_name, blob_name) + retry = mock.Mock(spec=[]) new_blob = source.copy_blob( blob, dest, - if_generation_match=GENERATION_NUMBER, - if_source_generation_match=SOURCE_GENERATION_NUMBER, + if_generation_match=generation_number, + if_source_generation_match=source_generation_number, + retry=retry, ) self.assertIs(new_blob.bucket, dest) - self.assertEqual(new_blob.name, BLOB_NAME) + self.assertEqual(new_blob.name, blob_name) - (kw,) = connection._requested - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - SOURCE, BLOB_NAME, DEST, BLOB_NAME + expected_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + source_name, blob_name, dest_name, blob_name ) - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual( - kw["query_params"], - { - "ifGenerationMatch": GENERATION_NUMBER, - "ifSourceGenerationMatch": SOURCE_GENERATION_NUMBER, - }, + expected_data = None + expected_query_params = { + "ifGenerationMatch": generation_number, + "ifSourceGenerationMatch": source_generation_number, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=new_blob, ) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - def test_copy_blobs_preserve_acl(self): + def test_copy_blob_w_preserve_acl_false_w_explicit_client(self): from google.cloud.storage.acl import ObjectACL source_name = "source" dest_name = "dest" blob_name = "blob-name" new_name = "new_name" - - connection = _Connection({}, {}) - client = _Client(connection) - - # Temporary, until we get a real client in place. - client._patch_resource = mock.Mock(return_value={}) - - source = self._make_one(client=client, name=source_name) - dest = self._make_one(client=client, name=dest_name) + post_api_response = {} + patch_api_response = {} + client = mock.Mock(spec=["_post_resource", "_patch_resource"]) + client._post_resource.return_value = post_api_response + client._patch_resource.return_value = patch_api_response + source = self._make_one(client=None, name=source_name) + dest = self._make_one(client=None, name=dest_name) blob = self._make_blob(source_name, blob_name) new_blob = source.copy_blob( @@ -1766,170 +1770,159 @@ def test_copy_blobs_preserve_acl(self): self.assertEqual(new_blob.name, new_name) self.assertIsInstance(new_blob.acl, ObjectACL) - (kw1,) = connection._requested - copy_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + expected_copy_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( source_name, blob_name, dest_name, new_name ) - self.assertEqual(kw1["method"], "POST") - self.assertEqual(kw1["path"], copy_path) - self.assertEqual(kw1["query_params"], {}) - self.assertEqual(kw1["timeout"], self._get_default_timeout()) - self.assertEqual(kw1["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + expected_copy_data = None + expected_copy_query_params = {} + client._post_resource.assert_called_once_with( + expected_copy_path, + expected_copy_data, + query_params=expected_copy_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=new_blob, + ) expected_patch_path = "/b/{}/o/{}".format(dest_name, new_name) - expected_data = {"acl": []} - expected_query_params = {"projection": "full"} + expected_patch_data = {"acl": []} + expected_patch_query_params = {"projection": "full"} client._patch_resource.assert_called_once_with( expected_patch_path, - expected_data, - query_params=expected_query_params, + expected_patch_data, + query_params=expected_patch_query_params, timeout=self._get_default_timeout(), retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ) - def test_copy_blobs_w_name_and_user_project(self): - SOURCE = "source" - DEST = "dest" - BLOB_NAME = "blob-name" - NEW_NAME = "new_name" - USER_PROJECT = "user-project-123" - connection = _Connection({}) - client = _Client(connection) - source = self._make_one(client=client, name=SOURCE, user_project=USER_PROJECT) - dest = self._make_one(client=client, name=DEST) - blob = self._make_blob(SOURCE, BLOB_NAME) + def test_copy_blob_w_name_and_user_project(self): + source_name = "source" + dest_name = "dest" + blob_name = "blob-name" + new_name = "new_name" + user_project = "user-project-123" + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + source = self._make_one( + client=client, name=source_name, user_project=user_project + ) + dest = self._make_one(client=client, name=dest_name) + blob = self._make_blob(source_name, blob_name) - new_blob = source.copy_blob(blob, dest, NEW_NAME) + new_blob = source.copy_blob(blob, dest, new_name) self.assertIs(new_blob.bucket, dest) - self.assertEqual(new_blob.name, NEW_NAME) + self.assertEqual(new_blob.name, new_name) - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - SOURCE, BLOB_NAME, DEST, NEW_NAME + expected_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + source_name, blob_name, dest_name, new_name ) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual(kw["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - - def test_rename_blob(self): - BUCKET_NAME = "BUCKET_NAME" - BLOB_NAME = "blob-name" - NEW_BLOB_NAME = "new-blob-name" - DATA = {"name": NEW_BLOB_NAME} - connection = _Connection(DATA) - client = _Client(connection) - bucket = self._make_one(client=client, name=BUCKET_NAME) - blob = self._make_blob(BUCKET_NAME, BLOB_NAME) - - renamed_blob = bucket.rename_blob( - blob, NEW_BLOB_NAME, client=client, timeout=42 + expected_data = None + expected_query_params = {"userProject": user_project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=new_blob, ) - self.assertIs(renamed_blob.bucket, bucket) - self.assertEqual(renamed_blob.name, NEW_BLOB_NAME) + def _rename_blob_helper(self, explicit_client=False, same_name=False, **kw): + bucket_name = "BUCKET_NAME" + blob_name = "blob-name" - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - BUCKET_NAME, BLOB_NAME, BUCKET_NAME, NEW_BLOB_NAME - ) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual(kw["query_params"], {}) - self.assertEqual(kw["timeout"], 42) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + if same_name: + new_blob_name = blob_name + else: + new_blob_name = "new-blob-name" - blob.delete.assert_called_once_with( - client=client, - timeout=42, - if_generation_match=None, - if_generation_not_match=None, - if_metageneration_match=None, - if_metageneration_not_match=None, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, - ) + client = mock.Mock(spec=[]) + kw = kw.copy() - def test_rename_blob_with_generation_match(self): - BUCKET_NAME = "BUCKET_NAME" - BLOB_NAME = "blob-name" - NEW_BLOB_NAME = "new-blob-name" - DATA = {"name": NEW_BLOB_NAME} - GENERATION_NUMBER = 6 - SOURCE_GENERATION_NUMBER = 7 - SOURCE_METAGENERATION_NUMBER = 9 - - connection = _Connection(DATA) - client = _Client(connection) - bucket = self._make_one(client=client, name=BUCKET_NAME) - blob = self._make_blob(BUCKET_NAME, BLOB_NAME) + if explicit_client: + bucket = self._make_one(client=None, name=bucket_name) + expected_client = kw["client"] = client + else: + bucket = self._make_one(client=client, name=bucket_name) + expected_client = None - renamed_blob = bucket.rename_blob( - blob, - NEW_BLOB_NAME, - client=client, - timeout=42, - if_generation_match=GENERATION_NUMBER, - if_source_generation_match=SOURCE_GENERATION_NUMBER, - if_source_metageneration_not_match=SOURCE_METAGENERATION_NUMBER, - ) + expected_i_g_m = kw.get("if_generation_match") + expected_i_g_n_m = kw.get("if_generation_not_match") + expected_i_m_m = kw.get("if_metageneration_match") + expected_i_m_n_m = kw.get("if_metageneration_not_match") + expected_i_s_g_m = kw.get("if_source_generation_match") + expected_i_s_g_n_m = kw.get("if_source_generation_not_match") + expected_i_s_m_m = kw.get("if_source_metageneration_match") + expected_i_s_m_n_m = kw.get("if_source_metageneration_not_match") + expected_timeout = kw.get("timeout", self._get_default_timeout()) + expected_retry = kw.get("retry", DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertIs(renamed_blob.bucket, bucket) - self.assertEqual(renamed_blob.name, NEW_BLOB_NAME) + bucket.copy_blob = mock.Mock(spec=[]) + blob = self._make_blob(bucket_name, blob_name) - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - BUCKET_NAME, BLOB_NAME, BUCKET_NAME, NEW_BLOB_NAME - ) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual( - kw["query_params"], - { - "ifGenerationMatch": GENERATION_NUMBER, - "ifSourceGenerationMatch": SOURCE_GENERATION_NUMBER, - "ifSourceMetagenerationNotMatch": SOURCE_METAGENERATION_NUMBER, - }, - ) - self.assertEqual(kw["timeout"], 42) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + renamed_blob = bucket.rename_blob(blob, new_blob_name, **kw) - blob.delete.assert_called_once_with( - client=client, - timeout=42, - if_generation_match=SOURCE_GENERATION_NUMBER, - if_generation_not_match=None, - if_metageneration_match=None, - if_metageneration_not_match=SOURCE_METAGENERATION_NUMBER, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, - ) + self.assertIs(renamed_blob, bucket.copy_blob.return_value) - def test_rename_blob_to_itself(self): - BUCKET_NAME = "BUCKET_NAME" - BLOB_NAME = "blob-name" - DATA = {"name": BLOB_NAME} - connection = _Connection(DATA) - client = _Client(connection) - bucket = self._make_one(client=client, name=BUCKET_NAME) - blob = self._make_blob(BUCKET_NAME, BLOB_NAME) + bucket.copy_blob.assert_called_once_with( + blob, + bucket, + new_blob_name, + client=expected_client, + if_generation_match=expected_i_g_m, + if_generation_not_match=expected_i_g_n_m, + if_metageneration_match=expected_i_m_m, + if_metageneration_not_match=expected_i_m_n_m, + if_source_generation_match=expected_i_s_g_m, + if_source_generation_not_match=expected_i_s_g_n_m, + if_source_metageneration_match=expected_i_s_m_m, + if_source_metageneration_not_match=expected_i_s_m_n_m, + timeout=expected_timeout, + retry=expected_retry, + ) + + if same_name: + blob.delete.assert_not_called() + else: + blob.delete.assert_called_once_with( + client=expected_client, + if_generation_match=expected_i_s_g_m, + if_generation_not_match=expected_i_s_g_n_m, + if_metageneration_match=expected_i_s_m_m, + if_metageneration_not_match=expected_i_s_m_n_m, + timeout=expected_timeout, + retry=expected_retry, + ) - renamed_blob = bucket.rename_blob(blob, BLOB_NAME) + def test_rename_blob_w_defaults(self): + self._rename_blob_helper() - self.assertIs(renamed_blob.bucket, bucket) - self.assertEqual(renamed_blob.name, BLOB_NAME) + def test_rename_blob_w_explicit_client(self): + self._rename_blob_helper(explicit_client=True) + + def test_rename_blob_w_generation_match(self): + generation_number = 6 + source_generation_number = 7 + source_metageneration_number = 9 - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - BUCKET_NAME, BLOB_NAME, BUCKET_NAME, BLOB_NAME + self._rename_blob_helper( + if_generation_match=generation_number, + if_source_generation_match=source_generation_number, + if_source_metageneration_not_match=source_metageneration_number, ) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual(kw["query_params"], {}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - blob.delete.assert_not_called() + def test_rename_blob_w_timeout(self): + timeout = 42 + self._rename_blob_helper(timeout=timeout) + + def test_rename_blob_w_retry(self): + retry = mock.Mock(spec={}) + self._rename_blob_helper(retry=retry) + + def test_rename_blob_to_itself(self): + self._rename_blob_helper(same_name=True) def test_etag(self): ETAG = "ETAG" @@ -2543,23 +2536,22 @@ def test_versioning_enabled_getter(self): self.assertEqual(bucket.versioning_enabled, True) @mock.patch("warnings.warn") - def test_create_deprecated(self, mock_warn): - PROJECT = "PROJECT" - BUCKET_NAME = "bucket-name" - DATA = {"name": BUCKET_NAME} - connection = _make_connection(DATA) - client = self._make_client(project=PROJECT) - client._base_connection = connection + def test_create_w_defaults_deprecated(self, mock_warn): + bucket_name = "bucket-name" + api_response = {"name": bucket_name} + client = mock.Mock(spec=["create_bucket"]) + client.create_bucket.return_value = api_response + bucket = self._make_one(client=client, name=bucket_name) - bucket = self._make_one(client=client, name=BUCKET_NAME) bucket.create() - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": PROJECT}, - data=DATA, - _target_object=bucket, + client.create_bucket.assert_called_once_with( + bucket_or_name=bucket, + project=None, + user_project=None, + location=None, + predefined_acl=None, + predefined_default_object_acl=None, timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, ) @@ -2572,26 +2564,40 @@ def test_create_deprecated(self, mock_warn): ) @mock.patch("warnings.warn") - def test_create_w_user_project(self, mock_warn): - PROJECT = "PROJECT" - BUCKET_NAME = "bucket-name" - DATA = {"name": BUCKET_NAME} - connection = _make_connection(DATA) - client = self._make_client(project=PROJECT) - client._base_connection = connection + def test_create_w_explicit_deprecated(self, mock_warn): + project = "PROJECT" + location = "eu" + user_project = "USER_PROJECT" + bucket_name = "bucket-name" + predefined_acl = "authenticatedRead" + predefined_default_object_acl = "bucketOwnerFullControl" + api_response = {"name": bucket_name} + client = mock.Mock(spec=["create_bucket"]) + client.create_bucket.return_value = api_response + bucket = self._make_one(client=None, name=bucket_name) + bucket._user_project = user_project + timeout = 42 + retry = mock.Mock(spec=[]) - bucket = self._make_one(client=client, name=BUCKET_NAME) - bucket._user_project = "USER_PROJECT" - bucket.create() + bucket.create( + client=client, + project=project, + location=location, + predefined_acl=predefined_acl, + predefined_default_object_acl=predefined_default_object_acl, + timeout=timeout, + retry=retry, + ) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": PROJECT, "userProject": "USER_PROJECT"}, - data=DATA, - _target_object=bucket, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + client.create_bucket.assert_called_once_with( + bucket_or_name=bucket, + project=project, + user_project=user_project, + location=location, + predefined_acl=predefined_acl, + predefined_default_object_acl=predefined_default_object_acl, + timeout=timeout, + retry=retry, ) mock_warn.assert_called_with( @@ -3483,10 +3489,7 @@ def test_generate_upload_policy_bad_credentials(self): bucket.generate_upload_policy([]) def test_lock_retention_policy_no_policy_set(self): - credentials = object() - connection = _Connection() - connection.credentials = credentials - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) name = "name" bucket = self._make_one(client=client, name=name) bucket._properties["metageneration"] = 1234 @@ -3494,11 +3497,10 @@ def test_lock_retention_policy_no_policy_set(self): with self.assertRaises(ValueError): bucket.lock_retention_policy() + client._post_resource.assert_not_called() + def test_lock_retention_policy_no_metageneration(self): - credentials = object() - connection = _Connection() - connection.credentials = credentials - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) name = "name" bucket = self._make_one(client=client, name=name) bucket._properties["retentionPolicy"] = { @@ -3509,11 +3511,10 @@ def test_lock_retention_policy_no_metageneration(self): with self.assertRaises(ValueError): bucket.lock_retention_policy() + client._post_resource.assert_not_called() + def test_lock_retention_policy_already_locked(self): - credentials = object() - connection = _Connection() - connection.credentials = credentials - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) name = "name" bucket = self._make_one(client=client, name=name) bucket._properties["metageneration"] = 1234 @@ -3526,69 +3527,88 @@ def test_lock_retention_policy_already_locked(self): with self.assertRaises(ValueError): bucket.lock_retention_policy() - def test_lock_retention_policy_ok(self): + client._post_resource.assert_not_called() + + def test_lock_retention_policy_ok_w_timeout_w_retry(self): name = "name" - response = { + effective_time = "2018-03-01T16:46:27.123456Z" + one_hundred_days = 86400 * 100 # seconds in 100 days + metageneration = 1234 + api_response = { "name": name, - "metageneration": 1235, + "metageneration": metageneration + 1, "retentionPolicy": { - "effectiveTime": "2018-03-01T16:46:27.123456Z", + "effectiveTime": effective_time, "isLocked": True, - "retentionPeriod": 86400 * 100, # 100 days + "retentionPeriod": one_hundred_days, }, } - credentials = object() - connection = _Connection(response) - connection.credentials = credentials - client = _Client(connection) + metageneration = 1234 + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = self._make_one(client=client, name=name) - bucket._properties["metageneration"] = 1234 + bucket._properties["metageneration"] = metageneration bucket._properties["retentionPolicy"] = { - "effectiveTime": "2018-03-01T16:46:27.123456Z", - "retentionPeriod": 86400 * 100, # 100 days + "effectiveTime": effective_time, + "retentionPeriod": one_hundred_days, } + timeout = 42 + retry = mock.Mock(spec=[]) - bucket.lock_retention_policy(timeout=42) + bucket.lock_retention_policy(timeout=timeout, retry=retry) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], "/b/{}/lockRetentionPolicy".format(name)) - self.assertEqual(kw["query_params"], {"ifMetagenerationMatch": 1234}) - self.assertEqual(kw["timeout"], 42) + expected_path = "/b/{}/lockRetentionPolicy".format(name) + expected_data = None + expected_query_params = {"ifMetagenerationMatch": metageneration} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=bucket, + ) def test_lock_retention_policy_w_user_project(self): name = "name" user_project = "user-project-123" - response = { + metageneration = 1234 + effective_time = "2018-03-01T16:46:27.123456Z" + one_hundred_days = 86400 * 100 # seconds in 100 days + api_response = { "name": name, - "metageneration": 1235, + "metageneration": metageneration + 1, "retentionPolicy": { - "effectiveTime": "2018-03-01T16:46:27.123456Z", + "effectiveTime": effective_time, "isLocked": True, - "retentionPeriod": 86400 * 100, # 100 days + "retentionPeriod": one_hundred_days, }, } - credentials = object() - connection = _Connection(response) - connection.credentials = credentials - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = self._make_one(client=client, name=name, user_project=user_project) bucket._properties["metageneration"] = 1234 bucket._properties["retentionPolicy"] = { - "effectiveTime": "2018-03-01T16:46:27.123456Z", - "retentionPeriod": 86400 * 100, # 100 days + "effectiveTime": effective_time, + "retentionPeriod": one_hundred_days, } bucket.lock_retention_policy() - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], "/b/{}/lockRetentionPolicy".format(name)) - self.assertEqual( - kw["query_params"], - {"ifMetagenerationMatch": 1234, "userProject": user_project}, + expected_path = "/b/{}/lockRetentionPolicy".format(name) + expected_data = None + expected_query_params = { + "ifMetagenerationMatch": metageneration, + "userProject": user_project, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=bucket, ) - self.assertEqual(kw["timeout"], self._get_default_timeout()) def test_generate_signed_url_w_invalid_version(self): expiration = "2014-10-16T20:34:37.000Z" diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 6d34d935a..7a9c0e880 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -593,6 +593,69 @@ def test__put_resource_hit_w_explicit(self): _target_object=target, ) + def test__post_resource_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + + project = "PROJECT" + path = "/path/to/something" + credentials = _make_credentials() + data = {"baz": "Baz"} + + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() + + with self.assertRaises(NotFound): + client._post_resource(path, data) + + connection.api_request.assert_called_once_with( + method="POST", + path=path, + data=data, + query_params=None, + headers=None, + timeout=self._get_default_timeout(), + retry=None, + _target_object=None, + ) + + def test__post_resource_hit_w_explicit(self): + project = "PROJECT" + path = "/path/to/something" + data = {"baz": "Baz"} + query_params = {"foo": "Foo"} + headers = {"bar": "Bar"} + timeout = 100 + retry = mock.Mock(spec=[]) + credentials = _make_credentials() + + client = self._make_one(project=project, credentials=credentials) + expected = mock.Mock(spec={}) + connection = client._base_connection = _make_connection(expected) + target = mock.Mock(spec={}) + + found = client._post_resource( + path, + data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) + + self.assertIs(found, expected) + + connection.api_request.assert_called_once_with( + method="POST", + path=path, + data=data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) + def test__delete_resource_miss_w_defaults(self): from google.cloud.exceptions import NotFound @@ -955,7 +1018,7 @@ def test_create_bucket_w_missing_client_project(self): with self.assertRaises(ValueError): client.create_bucket("bucket") - def test_create_bucket_w_conflict(self): + def test_create_bucket_w_conflict_w_user_project(self): from google.cloud.exceptions import Conflict project = "PROJECT" @@ -963,62 +1026,60 @@ def test_create_bucket_w_conflict(self): other_project = "OTHER_PROJECT" credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - connection = _make_connection() - client._base_connection = connection - connection.api_request.side_effect = Conflict("testing") + client._post_resource = mock.Mock() + client._post_resource.side_effect = Conflict("testing") bucket_name = "bucket-name" - data = {"name": bucket_name} with self.assertRaises(Conflict): client.create_bucket( bucket_name, project=other_project, user_project=user_project ) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": other_project, "userProject": user_project}, - data=data, - _target_object=mock.ANY, + expected_path = "/b" + expected_data = {"name": bucket_name} + expected_query_params = { + "project": other_project, + "userProject": user_project, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, + _target_object=mock.ANY, ) @mock.patch("warnings.warn") - def test_create_requester_pays_deprecated(self, mock_warn): + def test_create_bucket_w_requester_pays_deprecated(self, mock_warn): from google.cloud.storage.bucket import Bucket + bucket_name = "bucket-name" project = "PROJECT" credentials = _make_credentials() + api_respone = {"name": bucket_name, "billing": {"requesterPays": True}} client = self._make_one(project=project, credentials=credentials) - bucket_name = "bucket-name" - json_expected = {"name": bucket_name, "billing": {"requesterPays": True}} - http = _make_requests_session([_make_json_response(json_expected)]) - client._http_internal = http + client._post_resource = mock.Mock() + client._post_resource.return_value = api_respone bucket = client.create_bucket(bucket_name, requester_pays=True) self.assertIsInstance(bucket, Bucket) self.assertEqual(bucket.name, bucket_name) self.assertTrue(bucket.requester_pays) - http.request.assert_called_once_with( - method="POST", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=mock.ANY, - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]) + + expected_path = "/b" + expected_data = api_respone + expected_query_params = {"project": project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=mock.ANY, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], project) - json_sent = http.request.call_args_list[0][1]["data"] - self.assertEqual(json_expected, json.loads(json_sent)) mock_warn.assert_called_with( "requester_pays arg is deprecated. Use Bucket().requester_pays instead.", @@ -1031,31 +1092,40 @@ def test_create_bucket_w_predefined_acl_invalid(self): bucket_name = "bucket-name" credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() with self.assertRaises(ValueError): client.create_bucket(bucket_name, predefined_acl="bogus") - def test_create_bucket_w_predefined_acl_valid(self): + client._post_resource.assert_not_called() + + def test_create_bucket_w_predefined_acl_valid_w_timeout(self): project = "PROJECT" bucket_name = "bucket-name" - data = {"name": bucket_name} - + api_response = {"name": bucket_name} credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - connection = _make_connection(data) - client._base_connection = connection + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response + timeout = 42 + bucket = client.create_bucket( - bucket_name, predefined_acl="publicRead", timeout=42 + bucket_name, predefined_acl="publicRead", timeout=timeout, ) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": project, "predefinedAcl": "publicRead"}, - data=data, - _target_object=bucket, - timeout=42, + expected_path = "/b" + expected_data = api_response + expected_query_params = { + "project": project, + "predefinedAcl": "publicRead", + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, retry=DEFAULT_RETRY, + _target_object=bucket, ) def test_create_bucket_w_predefined_default_object_acl_invalid(self): @@ -1064,93 +1134,98 @@ def test_create_bucket_w_predefined_default_object_acl_invalid(self): credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() with self.assertRaises(ValueError): client.create_bucket(bucket_name, predefined_default_object_acl="bogus") - def test_create_bucket_w_predefined_default_object_acl_valid(self): + client._post_resource.assert_not_called() + + def test_create_bucket_w_predefined_default_object_acl_valid_w_retry(self): project = "PROJECT" bucket_name = "bucket-name" - data = {"name": bucket_name} - + api_response = {"name": bucket_name} credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - connection = _make_connection(data) - client._base_connection = connection + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response + retry = mock.Mock(spec=[]) + bucket = client.create_bucket( - bucket_name, predefined_default_object_acl="publicRead" + bucket_name, predefined_default_object_acl="publicRead", retry=retry, ) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={ - "project": project, - "predefinedDefaultObjectAcl": "publicRead", - }, - data=data, - _target_object=bucket, + expected_path = "/b" + expected_data = api_response + expected_query_params = { + "project": project, + "predefinedDefaultObjectAcl": "publicRead", + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=retry, + _target_object=bucket, ) def test_create_bucket_w_explicit_location(self): project = "PROJECT" bucket_name = "bucket-name" location = "us-central1" - data = {"location": location, "name": bucket_name} - - connection = _make_connection( - data, "{'location': 'us-central1', 'name': 'bucket-name'}" - ) - + api_response = {"location": location, "name": bucket_name} credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - client._base_connection = connection + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response bucket = client.create_bucket(bucket_name, location=location) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - data=data, - _target_object=bucket, - query_params={"project": project}, + self.assertEqual(bucket.location, location) + + expected_path = "/b" + expected_data = {"location": location, "name": bucket_name} + expected_query_params = {"project": project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, + _target_object=bucket, ) - self.assertEqual(bucket.location, location) def test_create_bucket_w_explicit_project(self): - from google.cloud.storage.client import Client - - PROJECT = "PROJECT" - OTHER_PROJECT = "other-project-123" - BUCKET_NAME = "bucket-name" - DATA = {"name": BUCKET_NAME} - connection = _make_connection(DATA) + project = "PROJECT" + other_project = "other-project-123" + bucket_name = "bucket-name" + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response - client = Client(project=PROJECT) - client._base_connection = connection + bucket = client.create_bucket(bucket_name, project=other_project) - bucket = client.create_bucket(BUCKET_NAME, project=OTHER_PROJECT) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": OTHER_PROJECT}, - data=DATA, - _target_object=bucket, + expected_path = "/b" + expected_data = api_response + expected_query_params = {"project": other_project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, + _target_object=bucket, ) - def test_create_w_extra_properties(self): - from google.cloud.storage.client import Client + def test_create_bucket_w_extra_properties(self): from google.cloud.storage.bucket import Bucket - BUCKET_NAME = "bucket-name" - PROJECT = "PROJECT" - CORS = [ + bucket_name = "bucket-name" + project = "PROJECT" + cors = [ { "maxAgeSeconds": 60, "methods": ["*"], @@ -1158,144 +1233,69 @@ def test_create_w_extra_properties(self): "responseHeader": ["X-Custom-Header"], } ] - LIFECYCLE_RULES = [{"action": {"type": "Delete"}, "condition": {"age": 365}}] - LOCATION = "eu" - LABELS = {"color": "red", "flavor": "cherry"} - STORAGE_CLASS = "NEARLINE" - DATA = { - "name": BUCKET_NAME, - "cors": CORS, - "lifecycle": {"rule": LIFECYCLE_RULES}, - "location": LOCATION, - "storageClass": STORAGE_CLASS, + lifecycle_rules = [{"action": {"type": "Delete"}, "condition": {"age": 365}}] + location = "eu" + labels = {"color": "red", "flavor": "cherry"} + storage_class = "NEARLINE" + api_response = { + "name": bucket_name, + "cors": cors, + "lifecycle": {"rule": lifecycle_rules}, + "location": location, + "storageClass": storage_class, "versioning": {"enabled": True}, "billing": {"requesterPays": True}, - "labels": LABELS, + "labels": labels, } + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response - connection = _make_connection(DATA) - client = Client(project=PROJECT) - client._base_connection = connection - - bucket = Bucket(client=client, name=BUCKET_NAME) - bucket.cors = CORS - bucket.lifecycle_rules = LIFECYCLE_RULES - bucket.storage_class = STORAGE_CLASS + bucket = Bucket(client=client, name=bucket_name) + bucket.cors = cors + bucket.lifecycle_rules = lifecycle_rules + bucket.storage_class = storage_class bucket.versioning_enabled = True bucket.requester_pays = True - bucket.labels = LABELS - client.create_bucket(bucket, location=LOCATION) - - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": PROJECT}, - data=DATA, - _target_object=bucket, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, - ) - - def test_create_hit(self): - from google.cloud.storage.client import Client - - PROJECT = "PROJECT" - BUCKET_NAME = "bucket-name" - DATA = {"name": BUCKET_NAME} - connection = _make_connection(DATA) - client = Client(project=PROJECT) - client._base_connection = connection + bucket.labels = labels - bucket = client.create_bucket(BUCKET_NAME) + client.create_bucket(bucket, location=location) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": PROJECT}, - data=DATA, - _target_object=bucket, + expected_path = "/b" + expected_data = api_response + expected_query_params = {"project": project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, + _target_object=bucket, ) - def test_create_bucket_w_string_success(self): - from google.cloud.storage.bucket import Bucket - + def test_create_bucket_w_name_only(self): project = "PROJECT" - credentials = _make_credentials() - client = self._make_one(project=project, credentials=credentials) - bucket_name = "bucket-name" - json_expected = {"name": bucket_name} - data = json_expected - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http - - bucket = client.create_bucket(bucket_name) - - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, bucket_name) - http.request.assert_called_once_with( - method="POST", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=mock.ANY, - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]), - ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], project) - json_sent = http.request.call_args_list[0][1]["data"] - self.assertEqual(json_expected, json.loads(json_sent)) - - def test_create_bucket_w_object_success(self): - from google.cloud.storage.bucket import Bucket - - project = "PROJECT" + api_response = {"name": bucket_name} credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response - bucket_name = "bucket-name" - bucket_obj = Bucket(client, bucket_name) - bucket_obj.storage_class = "COLDLINE" - bucket_obj.requester_pays = True - - json_expected = { - "name": bucket_name, - "billing": {"requesterPays": True}, - "storageClass": "COLDLINE", - } - data = json_expected - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http - - bucket = client.create_bucket(bucket_obj) + bucket = client.create_bucket(bucket_name) - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, bucket_name) - self.assertTrue(bucket.requester_pays) - http.request.assert_called_once_with( - method="POST", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=mock.ANY, - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]), + expected_path = "/b" + expected_data = api_response + expected_query_params = {"project": project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=bucket, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], project) - json_sent = http.request.call_args_list[0][1]["data"] - self.assertEqual(json_expected, json.loads(json_sent)) def test_download_blob_to_file_with_failure(self): from google.resumable_media import InvalidResponse @@ -1679,43 +1679,43 @@ def fake_response(): self.assertEqual(bucket.name, blob_name) def _create_hmac_key_helper( - self, explicit_project=None, user_project=None, timeout=None + self, explicit_project=None, user_project=None, timeout=None, retry=None, ): import datetime from pytz import UTC from google.cloud.storage.hmac_key import HMACKeyMetadata - PROJECT = "PROJECT" - ACCESS_ID = "ACCESS-ID" - CREDENTIALS = _make_credentials() - EMAIL = "storage-user-123@example.com" - SECRET = "a" * 40 + project = "PROJECT" + access_id = "ACCESS-ID" + credentials = _make_credentials() + email = "storage-user-123@example.com" + secret = "a" * 40 now = datetime.datetime.utcnow().replace(tzinfo=UTC) now_stamp = "{}Z".format(now.isoformat()) if explicit_project is not None: expected_project = explicit_project else: - expected_project = PROJECT + expected_project = project - RESOURCE = { + api_response = { "kind": "storage#hmacKey", "metadata": { - "accessId": ACCESS_ID, + "accessId": access_id, "etag": "ETAG", - "id": "projects/{}/hmacKeys/{}".format(PROJECT, ACCESS_ID), + "id": "projects/{}/hmacKeys/{}".format(project, access_id), "project": expected_project, "state": "ACTIVE", - "serviceAccountEmail": EMAIL, + "serviceAccountEmail": email, "timeCreated": now_stamp, "updated": now_stamp, }, - "secret": SECRET, + "secret": secret, } - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - http = _make_requests_session([_make_json_response(RESOURCE)]) - client._http_internal = http + client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response kwargs = {} if explicit_project is not None: @@ -1725,43 +1725,37 @@ def _create_hmac_key_helper( kwargs["user_project"] = user_project if timeout is None: - timeout = self._get_default_timeout() - kwargs["timeout"] = timeout + expected_timeout = self._get_default_timeout() + else: + expected_timeout = kwargs["timeout"] = timeout + + if retry is None: + expected_retry = None + else: + expected_retry = kwargs["retry"] = retry - metadata, secret = client.create_hmac_key(service_account_email=EMAIL, **kwargs) + metadata, secret = client.create_hmac_key(service_account_email=email, **kwargs) self.assertIsInstance(metadata, HMACKeyMetadata) + self.assertIs(metadata._client, client) - self.assertEqual(metadata._properties, RESOURCE["metadata"]) - self.assertEqual(secret, RESOURCE["secret"]) + self.assertEqual(metadata._properties, api_response["metadata"]) + self.assertEqual(secret, api_response["secret"]) - qs_params = {"serviceAccountEmail": EMAIL} + expected_path = "/projects/{}/hmacKeys".format(expected_project) + expected_data = None + expected_query_params = {"serviceAccountEmail": email} if user_project is not None: - qs_params["userProject"] = user_project + expected_query_params["userProject"] = user_project - http.request.assert_called_once_with( - method="POST", url=mock.ANY, data=None, headers=mock.ANY, timeout=timeout - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join( - [ - "", - "storage", - client._connection.API_VERSION, - "projects", - expected_project, - "hmacKeys", - ] - ), + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=expected_timeout, + retry=expected_retry, ) - parms = dict(urlparse.parse_qsl(qs)) - for param, expected in qs_params.items(): - self.assertEqual(parms[param], expected) def test_create_hmac_key_defaults(self): self._create_hmac_key_helper() @@ -1769,8 +1763,14 @@ def test_create_hmac_key_defaults(self): def test_create_hmac_key_explicit_project(self): self._create_hmac_key_helper(explicit_project="other-project-456") - def test_create_hmac_key_user_project(self): - self._create_hmac_key_helper(user_project="billed-project", timeout=42) + def test_create_hmac_key_w_user_project(self): + self._create_hmac_key_helper(user_project="billed-project") + + def test_create_hmac_key_w_timeout(self): + self._create_hmac_key_helper(timeout=42) + + def test_create_hmac_key_w_retry(self): + self._create_hmac_key_helper(retry=mock.Mock(spec=[])) def test_list_hmac_keys_defaults_empty(self): PROJECT = "PROJECT" diff --git a/tests/unit/test_notification.py b/tests/unit/test_notification.py index ae8924b08..04ffd68a1 100644 --- a/tests/unit/test_notification.py +++ b/tests/unit/test_notification.py @@ -231,7 +231,8 @@ def test_self_link(self): self.assertEqual(notification.self_link, self.SELF_LINK) def test_create_w_existing_notification_id(self): - client = self._make_client() + client = mock.Mock(spec=["_post_resource", "project"]) + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) notification._properties["id"] = self.NOTIFICATION_ID @@ -239,20 +240,23 @@ def test_create_w_existing_notification_id(self): with self.assertRaises(ValueError): notification.create() + client._post_resource.assert_not_called() + def test_create_w_defaults(self): from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT - client = self._make_client() - bucket = self._make_bucket(client) - notification = self._make_one(bucket, self.TOPIC_NAME) - api_request = client._connection.api_request - api_request.return_value = { + api_response = { "topic": self.TOPIC_REF, "id": self.NOTIFICATION_ID, "etag": self.ETAG, "selfLink": self.SELF_LINK, "payload_format": NONE_PAYLOAD_FORMAT, } + client = mock.Mock(spec=["_post_resource", "project"]) + client.project = self.BUCKET_PROJECT + client._post_resource.return_value = api_response + bucket = self._make_bucket(client) + notification = self._make_one(bucket, self.TOPIC_NAME) notification.create() @@ -264,32 +268,22 @@ def test_create_w_defaults(self): self.assertIsNone(notification.blob_name_prefix) self.assertEqual(notification.payload_format, NONE_PAYLOAD_FORMAT) - data = {"topic": self.TOPIC_REF, "payload_format": NONE_PAYLOAD_FORMAT} - api_request.assert_called_once_with( - method="POST", - path=self.CREATE_PATH, - query_params={}, - data=data, + expected_data = { + "topic": self.TOPIC_REF, + "payload_format": NONE_PAYLOAD_FORMAT, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + self.CREATE_PATH, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), retry=None, ) - def test_create_w_explicit_client(self): - USER_PROJECT = "user-project-123" - client = self._make_client() - alt_client = self._make_client() - bucket = self._make_bucket(client, user_project=USER_PROJECT) - notification = self._make_one( - bucket, - self.TOPIC_NAME, - topic_project=self.TOPIC_ALT_PROJECT, - custom_attributes=self.CUSTOM_ATTRIBUTES, - event_types=self.event_types(), - blob_name_prefix=self.BLOB_NAME_PREFIX, - payload_format=self.payload_format(), - ) - api_request = alt_client._connection.api_request - api_request.return_value = { + def test_create_w_explicit_client_w_timeout_w_retry(self): + user_project = "user-project-123" + api_response = { "topic": self.TOPIC_ALT_REF, "custom_attributes": self.CUSTOM_ATTRIBUTES, "event_types": self.event_types(), @@ -299,8 +293,23 @@ def test_create_w_explicit_client(self): "etag": self.ETAG, "selfLink": self.SELF_LINK, } + bucket = self._make_bucket(client=None, user_project=user_project) + notification = self._make_one( + bucket, + self.TOPIC_NAME, + topic_project=self.TOPIC_ALT_PROJECT, + custom_attributes=self.CUSTOM_ATTRIBUTES, + event_types=self.event_types(), + blob_name_prefix=self.BLOB_NAME_PREFIX, + payload_format=self.payload_format(), + ) + client = mock.Mock(spec=["_post_resource", "project"]) + client.project = self.BUCKET_PROJECT + client._post_resource.return_value = api_response + timeout = 42 + retry = mock.Mock(spec=[]) - notification.create(client=alt_client, timeout=42) + notification.create(client=client, timeout=timeout, retry=retry) self.assertEqual(notification.custom_attributes, self.CUSTOM_ATTRIBUTES) self.assertEqual(notification.event_types, self.event_types()) @@ -310,20 +319,20 @@ def test_create_w_explicit_client(self): self.assertEqual(notification.etag, self.ETAG) self.assertEqual(notification.self_link, self.SELF_LINK) - data = { + expected_data = { "topic": self.TOPIC_ALT_REF, "custom_attributes": self.CUSTOM_ATTRIBUTES, "event_types": self.event_types(), "object_name_prefix": self.BLOB_NAME_PREFIX, "payload_format": self.payload_format(), } - api_request.assert_called_once_with( - method="POST", - path=self.CREATE_PATH, - query_params={"userProject": USER_PROJECT}, - data=data, - timeout=42, - retry=None, + expected_query_params = {"userProject": user_project} + client._post_resource.assert_called_once_with( + self.CREATE_PATH, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, ) def test_exists_wo_notification_id(self): From 2232f38933dbdfeb4f6585291794d332771ffdf2 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 8 Jun 2021 13:46:52 -0400 Subject: [PATCH 19/30] fix: apply idempotency policies for ACLs (#458) * fix: use 'retry=None' as default for 'Client._put_resource' * fix: use 'retry=None' as default for 'Client._patch_resource' * fix: force 'retry=None' for 'ACL.{save,save_predefined,clear}' * fix: remove 'retry' arg from 'Bucket.make_public' * fix: remove 'retry' arg from 'Bucket.make_private' * fix: remove 'retry' arg from 'Blob.make_public' * fix: remove 'retry' arg from 'Blob.make_private' Per idempotency policy. --- google/cloud/storage/acl.py | 49 +++++------------------------- google/cloud/storage/blob.py | 40 +++---------------------- google/cloud/storage/bucket.py | 54 +++++----------------------------- google/cloud/storage/client.py | 4 +-- tests/unit/test_acl.py | 31 +++++++++---------- tests/unit/test_blob.py | 18 +++++------- tests/unit/test_bucket.py | 36 +++++++++++------------ tests/unit/test_client.py | 4 +-- 8 files changed, 62 insertions(+), 174 deletions(-) diff --git a/google/cloud/storage/acl.py b/google/cloud/storage/acl.py index ec6c5bed9..bdb17bfc9 100644 --- a/google/cloud/storage/acl.py +++ b/google/cloud/storage/acl.py @@ -474,9 +474,7 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): for entry in found.get("items", ()): self.add_entity(self.entity_from_dict(entry)) - def _save( - self, acl, predefined, client, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, - ): + def _save(self, acl, predefined, client, timeout=_DEFAULT_TIMEOUT): """Helper for :meth:`save` and :meth:`save_predefined`. :type acl: :class:`google.cloud.storage.acl.ACL`, or a compatible list. @@ -524,7 +522,7 @@ def _save( {self._URL_PATH_ELEM: list(acl)}, query_params=query_params, timeout=timeout, - retry=retry, + retry=None, ) self.entities.clear() @@ -534,9 +532,7 @@ def _save( self.loaded = True - def save( - self, acl=None, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY - ): + def save(self, acl=None, client=None, timeout=_DEFAULT_TIMEOUT): """Save this ACL for the current bucket. If :attr:`user_project` is set, bills the API request to that project. @@ -555,15 +551,6 @@ def save( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. - - :type retry: :class:`~google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. - - A None value will disable retries. - - A google.api_core.retry.Retry value will enable retries, - and the object will define retriable response codes and errors - and configure backoff and timeout options. """ if acl is None: acl = self @@ -572,11 +559,9 @@ def save( save_to_backend = True if save_to_backend: - self._save(acl, None, client, timeout=timeout, retry=retry) + self._save(acl, None, client, timeout=timeout) - def save_predefined( - self, predefined, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, - ): + def save_predefined(self, predefined, client=None, timeout=_DEFAULT_TIMEOUT): """Save this ACL for the current bucket using a predefined ACL. If :attr:`user_project` is set, bills the API request to that project. @@ -598,20 +583,11 @@ def save_predefined( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. - - :type retry: :class:`~google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. - - A None value will disable retries. - - A google.api_core.retry.Retry value will enable retries, - and the object will define retriable response codes and errors - and configure backoff and timeout options. """ predefined = self.validate_predefined(predefined) - self._save(None, predefined, client, timeout=timeout, retry=retry) + self._save(None, predefined, client, timeout=timeout) - def clear(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): + def clear(self, client=None, timeout=_DEFAULT_TIMEOUT): """Remove all ACL entries. If :attr:`user_project` is set, bills the API request to that project. @@ -631,17 +607,8 @@ def clear(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. - - :type retry: :class:`~google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. - - A None value will disable retries. - - A google.api_core.retry.Retry value will enable retries, - and the object will define retriable response codes and errors - and configure backoff and timeout options. """ - self.save([], client=client, timeout=timeout, retry=retry) + self.save([], client=client, timeout=timeout) class BucketACL(ACL): diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 73851ea02..fa3f5c7ac 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -2978,9 +2978,7 @@ def test_iam_permissions( return resp.get("permissions", []) - def make_public( - self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, - ): + def make_public(self, client=None, timeout=_DEFAULT_TIMEOUT): """Update blob's ACL, granting read access to anonymous users. :type client: :class:`~google.cloud.storage.client.Client` or @@ -2995,27 +2993,11 @@ def make_public( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. - - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. """ self.acl.all().grant_read() - self.acl.save(client=client, timeout=timeout, retry=retry) + self.acl.save(client=client, timeout=timeout) - def make_private( - self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, - ): + def make_private(self, client=None, timeout=_DEFAULT_TIMEOUT): """Update blob's ACL, revoking read access for anonymous users. :type client: :class:`~google.cloud.storage.client.Client` or @@ -3030,23 +3012,9 @@ def make_private( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. - - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. """ self.acl.all().revoke_read() - self.acl.save(client=client, timeout=timeout, retry=retry) + self.acl.save(client=client, timeout=timeout) def compose( self, diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 7703dc234..9b5746f20 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -1996,7 +1996,7 @@ def copy_blob( ) if not preserve_acl: - new_blob.acl.save(acl={}, client=client, timeout=timeout, retry=retry) + new_blob.acl.save(acl={}, client=client, timeout=timeout) new_blob._set_properties(copy_result) return new_blob @@ -3021,12 +3021,7 @@ def test_iam_permissions( return resp.get("permissions", []) def make_public( - self, - recursive=False, - future=False, - client=None, - timeout=_DEFAULT_TIMEOUT, - retry=DEFAULT_RETRY, + self, recursive=False, future=False, client=None, timeout=_DEFAULT_TIMEOUT, ): """Update bucket's ACL, granting read access to anonymous users. @@ -3050,20 +3045,6 @@ def make_public( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. - :raises ValueError: If ``recursive`` is True, and the bucket contains more than 256 blobs. This is to prevent extremely long runtime of this @@ -3073,7 +3054,7 @@ def make_public( for each blob. """ self.acl.all().grant_read() - self.acl.save(client=client, timeout=timeout, retry=retry) + self.acl.save(client=client, timeout=timeout) if future: doa = self.default_object_acl @@ -3089,7 +3070,6 @@ def make_public( max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, client=client, timeout=timeout, - retry=retry, ) ) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: @@ -3104,15 +3084,10 @@ def make_public( for blob in blobs: blob.acl.all().grant_read() - blob.acl.save(client=client, timeout=timeout, retry=retry) + blob.acl.save(client=client, timeout=timeout) def make_private( - self, - recursive=False, - future=False, - client=None, - timeout=_DEFAULT_TIMEOUT, - retry=DEFAULT_RETRY, + self, recursive=False, future=False, client=None, timeout=_DEFAULT_TIMEOUT, ): """Update bucket's ACL, revoking read access for anonymous users. @@ -3137,20 +3112,6 @@ def make_private( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. - :raises ValueError: If ``recursive`` is True, and the bucket contains more than 256 blobs. This is to prevent extremely long runtime of this @@ -3160,7 +3121,7 @@ def make_private( for each blob. """ self.acl.all().revoke_read() - self.acl.save(client=client, timeout=timeout, retry=retry) + self.acl.save(client=client, timeout=timeout) if future: doa = self.default_object_acl @@ -3176,7 +3137,6 @@ def make_private( max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, client=client, timeout=timeout, - retry=retry, ) ) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: @@ -3191,7 +3151,7 @@ def make_private( for blob in blobs: blob.acl.all().revoke_read() - blob.acl.save(client=client, timeout=timeout, retry=retry) + blob.acl.save(client=client, timeout=timeout) def generate_upload_policy(self, conditions, expiration=None, client=None): """Create a signed upload policy for uploading objects. diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 57c5b4103..41580dbdd 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -393,7 +393,7 @@ def _patch_resource( query_params=None, headers=None, timeout=_DEFAULT_TIMEOUT, - retry=DEFAULT_RETRY, + retry=None, _target_object=None, ): """Helper for bucket / blob methods making API 'PATCH' calls. @@ -464,7 +464,7 @@ def _put_resource( query_params=None, headers=None, timeout=_DEFAULT_TIMEOUT, - retry=DEFAULT_RETRY, + retry=None, _target_object=None, ): """Helper for bucket / blob methods making API 'PUT' calls. diff --git a/tests/unit/test_acl.py b/tests/unit/test_acl.py index a31b90840..aad44809e 100644 --- a/tests/unit/test_acl.py +++ b/tests/unit/test_acl.py @@ -646,7 +646,7 @@ class Derived(self._get_target_class()): expected_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=None, ) def test_save_no_acl_w_timeout(self): @@ -673,10 +673,10 @@ def test_save_no_acl_w_timeout(self): expected_data, query_params=expected_query_params, timeout=timeout, - retry=DEFAULT_RETRY, + retry=None, ) - def test_save_w_acl_w_user_project_w_retry(self): + def test_save_w_acl_w_user_project(self): save_path = "/testing" user_project = "user-project-123" role1 = "role1" @@ -690,9 +690,8 @@ def test_save_w_acl_w_user_project_w_retry(self): acl.save_path = save_path acl.loaded = True acl.user_project = user_project - retry = mock.Mock(spec=[]) - acl.save(new_acl, client=client, retry=retry) + acl.save(new_acl, client=client) entries = list(acl) self.assertEqual(len(entries), 2) @@ -706,7 +705,7 @@ def test_save_w_acl_w_user_project_w_retry(self): expected_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=retry, + retry=None, ) def test_save_prefefined_invalid(self): @@ -750,7 +749,7 @@ class Derived(self._get_target_class()): expected_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=None, ) def test_save_predefined_w_XML_alias_w_timeout(self): @@ -780,10 +779,10 @@ def test_save_predefined_w_XML_alias_w_timeout(self): expected_data, query_params=expected_query_params, timeout=timeout, - retry=DEFAULT_RETRY, + retry=None, ) - def test_save_predefined_w_alternate_query_param_w_retry(self): + def test_save_predefined_w_alternate_query_param(self): # Cover case where subclass overrides _PREDEFINED_QUERY_PARAM save_path = "/testing" predefined = "publicRead" @@ -794,9 +793,8 @@ def test_save_predefined_w_alternate_query_param_w_retry(self): acl.save_path = save_path acl.loaded = True acl._PREDEFINED_QUERY_PARAM = "alternate" - retry = mock.Mock(spec=[]) - acl.save_predefined(predefined, client=client, retry=retry) + acl.save_predefined(predefined, client=client) entries = list(acl) self.assertEqual(len(entries), 0) @@ -811,7 +809,7 @@ def test_save_predefined_w_alternate_query_param_w_retry(self): expected_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=retry, + retry=None, ) def test_clear_w_defaults(self): @@ -844,10 +842,10 @@ class Derived(self._get_target_class()): expected_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=None, ) - def test_clear_w_explicit_client_w_timeout_w_retry(self): + def test_clear_w_explicit_client_w_timeout(self): save_path = "/testing" role1 = "role1" role2 = "role2" @@ -860,9 +858,8 @@ def test_clear_w_explicit_client_w_timeout_w_retry(self): acl.loaded = True acl.entity("allUsers", role1) timeout = 42 - retry = mock.Mock(spec=[]) - acl.clear(client=client, timeout=timeout, retry=retry) + acl.clear(client=client, timeout=timeout) self.assertEqual(list(acl), [sticky]) @@ -875,7 +872,7 @@ def test_clear_w_explicit_client_w_timeout_w_retry(self): expected_data, query_params=expected_query_params, timeout=timeout, - retry=retry, + retry=None, ) diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 071033a45..44959f54f 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -3458,10 +3458,10 @@ def test_make_public_w_defaults(self): expected_patch_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=None, ) - def test_make_public_w_timeout_w_retry(self): + def test_make_public_w_timeout(self): from google.cloud.storage.acl import _ACLEntity blob_name = "blob-name" @@ -3473,9 +3473,8 @@ def test_make_public_w_timeout_w_retry(self): blob = self._make_one(blob_name, bucket=bucket) blob.acl.loaded = True timeout = 42 - retry = mock.Mock(spec=[]) - blob.make_public(timeout=timeout, retry=retry) + blob.make_public(timeout=timeout) self.assertEqual(list(blob.acl), permissive) @@ -3486,7 +3485,7 @@ def test_make_public_w_timeout_w_retry(self): expected_patch_data, query_params=expected_query_params, timeout=timeout, - retry=retry, + retry=None, ) def test_make_private_w_defaults(self): @@ -3510,10 +3509,10 @@ def test_make_private_w_defaults(self): expected_patch_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=None, ) - def test_make_private_w_timeout_w_retry(self): + def test_make_private_w_timeout(self): blob_name = "blob-name" no_permissions = [] api_response = {"acl": no_permissions} @@ -3523,9 +3522,8 @@ def test_make_private_w_timeout_w_retry(self): blob = self._make_one(blob_name, bucket=bucket) blob.acl.loaded = True timeout = 42 - retry = mock.Mock(spec=[]) - blob.make_private(timeout=timeout, retry=retry) + blob.make_private(timeout=timeout) self.assertEqual(list(blob.acl), no_permissions) @@ -3536,7 +3534,7 @@ def test_make_private_w_timeout_w_retry(self): expected_patch_data, query_params=expected_query_params, timeout=timeout, - retry=retry, + retry=None, ) def test_compose_wo_content_type_set(self): diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 50fe02c0e..018d753f7 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -1792,7 +1792,7 @@ def test_copy_blob_w_preserve_acl_false_w_explicit_client(self): expected_patch_data, query_params=expected_patch_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=None, ) def test_copy_blob_w_name_and_user_project(self): @@ -2996,7 +2996,7 @@ def test_make_public_defaults(self): expected_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=None, ) def _make_public_w_future_helper(self, default_object_acl_loaded=True): @@ -3029,7 +3029,7 @@ def _make_public_w_future_helper(self, default_object_acl_loaded=True): expected_kw = { "query_params": {"projection": "full"}, "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, + "retry": None, } client._patch_resource.assert_has_calls( [ @@ -3079,9 +3079,9 @@ def all(self): def grant_read(self): self._granted = True - def save(self, client=None, timeout=None, retry=None): + def save(self, client=None, timeout=None): _saved.append( - (self._bucket, self._name, self._granted, client, timeout, retry) + (self._bucket, self._name, self._granted, client, timeout) ) name = "name" @@ -3100,13 +3100,12 @@ def save(self, client=None, timeout=None, retry=None): client.list_blobs.return_value = list_blobs_response timeout = 42 - retry = mock.Mock(spec=[]) - bucket.make_public(recursive=True, timeout=timeout, retry=retry) + bucket.make_public(recursive=True, timeout=timeout) self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), []) - self.assertEqual(_saved, [(bucket, blob_name, True, None, timeout, retry)]) + self.assertEqual(_saved, [(bucket, blob_name, True, None, timeout)]) expected_patch_data = {"acl": permissive} expected_patch_query_params = {"projection": "full"} @@ -3115,7 +3114,7 @@ def save(self, client=None, timeout=None, retry=None): expected_patch_data, query_params=expected_patch_query_params, timeout=timeout, - retry=retry, + retry=None, ) client.list_blobs.assert_called_once() @@ -3150,7 +3149,7 @@ def test_make_public_recursive_too_many(self): expected_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=None, ) client.list_blobs.assert_called_once() @@ -3178,7 +3177,7 @@ def test_make_private_defaults(self): expected_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=None, ) def _make_private_w_future_helper(self, default_object_acl_loaded=True): @@ -3212,7 +3211,7 @@ def _make_private_w_future_helper(self, default_object_acl_loaded=True): expected_kw = { "query_params": {"projection": "full"}, "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, + "retry": None, } client._patch_resource.assert_has_calls( [ @@ -3260,9 +3259,9 @@ def all(self): def revoke_read(self): self._granted = False - def save(self, client=None, timeout=None, retry=None): + def save(self, client=None, timeout=None): _saved.append( - (self._bucket, self._name, self._granted, client, timeout, retry) + (self._bucket, self._name, self._granted, client, timeout) ) name = "name" @@ -3281,13 +3280,12 @@ def save(self, client=None, timeout=None, retry=None): client.list_blobs.return_value = list_blobs_response timeout = 42 - retry = mock.Mock(spec=[]) - bucket.make_private(recursive=True, timeout=42, retry=retry) + bucket.make_private(recursive=True, timeout=42) self.assertEqual(list(bucket.acl), no_permissions) self.assertEqual(list(bucket.default_object_acl), []) - self.assertEqual(_saved, [(bucket, blob_name, False, None, timeout, retry)]) + self.assertEqual(_saved, [(bucket, blob_name, False, None, timeout)]) expected_patch_data = {"acl": no_permissions} expected_patch_query_params = {"projection": "full"} @@ -3296,7 +3294,7 @@ def save(self, client=None, timeout=None, retry=None): expected_patch_data, query_params=expected_patch_query_params, timeout=timeout, - retry=retry, + retry=None, ) client.list_blobs.assert_called_once() @@ -3330,7 +3328,7 @@ def test_make_private_recursive_too_many(self): expected_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=None, ) client.list_blobs.assert_called_once() diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 7a9c0e880..6b0bb1fb5 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -488,7 +488,7 @@ def test__patch_resource_miss_w_defaults(self): query_params=None, headers=None, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=None, _target_object=None, ) @@ -551,7 +551,7 @@ def test__put_resource_miss_w_defaults(self): query_params=None, headers=None, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=None, _target_object=None, ) From 6bd8a20080db0c889c075a6a93c5a69648bcb699 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 8 Jun 2021 14:24:59 -0400 Subject: [PATCH 20/30] refactor: add / use 'Client._list_resource' method (#446) * Adjust tests for 'Bucket.list_blobs' not to depend on anything but calling 'Client.list_blobs'. * Provide explicit coverage for 'client._item_to_bucket' and 'client._item_to_hmack_key_metadata' helpers. * Provide explicit coverage for 'bucket._item_to_notification'. Toward #38 --- google/cloud/storage/bucket.py | 12 +- google/cloud/storage/client.py | 64 ++-- tests/unit/test_bucket.py | 292 ++++++++------ tests/unit/test_client.py | 679 ++++++++++++++++----------------- 4 files changed, 541 insertions(+), 506 deletions(-) diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 9b5746f20..685d3b532 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -17,14 +17,12 @@ import base64 import copy import datetime -import functools import json import warnings import six from six.moves.urllib.parse import urlsplit -from google.api_core import page_iterator from google.api_core import datetime_helpers from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import _NOW @@ -1392,14 +1390,8 @@ def list_notifications( """ client = self._require_client(client) path = self.path + "/notificationConfigs" - api_request = functools.partial( - client._connection.api_request, timeout=timeout, retry=retry - ) - iterator = page_iterator.HTTPIterator( - client=client, - api_request=api_request, - path=path, - item_to_value=_item_to_notification, + iterator = client._list_resource( + path, _item_to_notification, timeout=timeout, retry=retry, ) iterator.bucket = self return iterator diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 41580dbdd..8fcc12b69 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -386,6 +386,31 @@ def _get_resource( _target_object=_target_object, ) + def _list_resource( + self, + path, + item_to_value, + page_token=None, + max_results=None, + extra_params=None, + page_start=page_iterator._do_nothing_page_start, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, + ): + api_request = functools.partial( + self._connection.api_request, timeout=timeout, retry=retry + ) + return page_iterator.HTTPIterator( + client=self, + api_request=api_request, + path=path, + item_to_value=item_to_value, + page_token=page_token, + max_results=max_results, + extra_params=extra_params, + page_start=page_start, + ) + def _patch_resource( self, path, @@ -1214,14 +1239,9 @@ def list_blobs( extra_params["userProject"] = bucket.user_project path = bucket.path + "/o" - api_request = functools.partial( - self._connection.api_request, timeout=timeout, retry=retry - ) - iterator = page_iterator.HTTPIterator( - client=self, - api_request=api_request, - path=path, - item_to_value=_item_to_blob, + iterator = self._list_resource( + path, + _item_to_blob, page_token=page_token, max_results=max_results, extra_params=extra_params, @@ -1328,18 +1348,14 @@ def list_buckets( if fields is not None: extra_params["fields"] = fields - api_request = functools.partial( - self._connection.api_request, retry=retry, timeout=timeout - ) - - return page_iterator.HTTPIterator( - client=self, - api_request=api_request, - path="/b", - item_to_value=_item_to_bucket, + return self._list_resource( + "/b", + _item_to_bucket, page_token=page_token, max_results=max_results, extra_params=extra_params, + timeout=timeout, + retry=retry, ) def create_hmac_key( @@ -1476,17 +1492,13 @@ def list_hmac_keys( if user_project is not None: extra_params["userProject"] = user_project - api_request = functools.partial( - self._connection.api_request, timeout=timeout, retry=retry - ) - - return page_iterator.HTTPIterator( - client=self, - api_request=api_request, - path=path, - item_to_value=_item_to_hmac_key_metadata, + return self._list_resource( + path, + _item_to_hmac_key_metadata, max_results=max_results, extra_params=extra_params, + timeout=timeout, + retry=retry, ) def get_hmac_key_metadata( diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 018d753f7..63c1e4de3 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -902,130 +902,148 @@ def test_get_blob_hit_with_kwargs_w_explicit_client(self): _target_object=blob, ) - def test_list_blobs_defaults(self): - NAME = "name" - connection = _Connection({"items": []}) + def test_list_blobs_w_defaults(self): + name = "name" client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + client.list_blobs = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=name) + iterator = bucket.list_blobs() - blobs = list(iterator) - self.assertEqual(blobs, []) - (kw,) = connection._requested - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o" % NAME) - self.assertEqual(kw["query_params"], {"projection": "noAcl"}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - def test_list_blobs_w_all_arguments_and_user_project(self): - NAME = "name" - USER_PROJECT = "user-project-123" - MAX_RESULTS = 10 - PAGE_TOKEN = "ABCD" - PREFIX = "subfolder" - DELIMITER = "/" - START_OFFSET = "c" - END_OFFSET = "g" - INCLUDE_TRAILING_DELIMITER = True - VERSIONS = True - PROJECTION = "full" - FIELDS = "items/contentLanguage,nextPageToken" - EXPECTED = { - "maxResults": 10, - "pageToken": PAGE_TOKEN, - "prefix": PREFIX, - "delimiter": DELIMITER, - "startOffset": START_OFFSET, - "endOffset": END_OFFSET, - "includeTrailingDelimiter": INCLUDE_TRAILING_DELIMITER, - "versions": VERSIONS, - "projection": PROJECTION, - "fields": FIELDS, - "userProject": USER_PROJECT, - } - connection = _Connection({"items": []}) - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(name=NAME, user_project=USER_PROJECT) + self.assertIs(iterator, client.list_blobs.return_value) + + expected_page_token = None + expected_max_results = None + expected_prefix = None + expected_delimiter = None + expected_start_offset = None + expected_end_offset = None + expected_include_trailing_delimiter = None + expected_versions = None + expected_projection = "noAcl" + expected_fields = None + client.list_blobs.assert_called_once_with( + bucket, + max_results=expected_max_results, + page_token=expected_page_token, + prefix=expected_prefix, + delimiter=expected_delimiter, + start_offset=expected_start_offset, + end_offset=expected_end_offset, + include_trailing_delimiter=expected_include_trailing_delimiter, + versions=expected_versions, + projection=expected_projection, + fields=expected_fields, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + def test_list_blobs_w_explicit(self): + name = "name" + max_results = 10 + page_token = "ABCD" + prefix = "subfolder" + delimiter = "/" + start_offset = "c" + end_offset = "g" + include_trailing_delimiter = True + versions = True + projection = "full" + fields = "items/contentLanguage,nextPageToken" + bucket = self._make_one(client=None, name=name) + other_client = self._make_client() + other_client.list_blobs = mock.Mock(spec=[]) + timeout = 42 + retry = mock.Mock(spec=[]) + iterator = bucket.list_blobs( - max_results=MAX_RESULTS, - page_token=PAGE_TOKEN, - prefix=PREFIX, - delimiter=DELIMITER, - start_offset=START_OFFSET, - end_offset=END_OFFSET, - include_trailing_delimiter=INCLUDE_TRAILING_DELIMITER, - versions=VERSIONS, - projection=PROJECTION, - fields=FIELDS, - client=client, - timeout=42, + max_results=max_results, + page_token=page_token, + prefix=prefix, + delimiter=delimiter, + start_offset=start_offset, + end_offset=end_offset, + include_trailing_delimiter=include_trailing_delimiter, + versions=versions, + projection=projection, + fields=fields, + client=other_client, + timeout=timeout, + retry=retry, ) - blobs = list(iterator) - self.assertEqual(blobs, []) - (kw,) = connection._requested - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o" % NAME) - self.assertEqual(kw["query_params"], EXPECTED) - self.assertEqual(kw["timeout"], 42) - def test_list_notifications(self): - from google.cloud.storage.notification import BucketNotification - from google.cloud.storage.notification import _TOPIC_REF_FMT - from google.cloud.storage.notification import ( - JSON_API_V1_PAYLOAD_FORMAT, - NONE_PAYLOAD_FORMAT, + self.assertIs(iterator, other_client.list_blobs.return_value) + + expected_page_token = page_token + expected_max_results = max_results + expected_prefix = prefix + expected_delimiter = delimiter + expected_start_offset = start_offset + expected_end_offset = end_offset + expected_include_trailing_delimiter = include_trailing_delimiter + expected_versions = versions + expected_projection = projection + expected_fields = fields + other_client.list_blobs.assert_called_once_with( + bucket, + max_results=expected_max_results, + page_token=expected_page_token, + prefix=expected_prefix, + delimiter=expected_delimiter, + start_offset=expected_start_offset, + end_offset=expected_end_offset, + include_trailing_delimiter=expected_include_trailing_delimiter, + versions=expected_versions, + projection=expected_projection, + fields=expected_fields, + timeout=timeout, + retry=retry, ) - NAME = "name" + def test_list_notifications_w_defaults(self): + from google.cloud.storage.bucket import _item_to_notification - topic_refs = [("my-project-123", "topic-1"), ("other-project-456", "topic-2")] + bucket_name = "name" + client = self._make_client() + client._list_resource = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=bucket_name) - resources = [ - { - "topic": _TOPIC_REF_FMT.format(*topic_refs[0]), - "id": "1", - "etag": "DEADBEEF", - "selfLink": "https://example.com/notification/1", - "payload_format": NONE_PAYLOAD_FORMAT, - }, - { - "topic": _TOPIC_REF_FMT.format(*topic_refs[1]), - "id": "2", - "etag": "FACECABB", - "selfLink": "https://example.com/notification/2", - "payload_format": JSON_API_V1_PAYLOAD_FORMAT, - }, - ] - connection = _Connection({"items": resources}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + iterator = bucket.list_notifications() - notifications = list(bucket.list_notifications(timeout=42)) + self.assertIs(iterator, client._list_resource.return_value) + self.assertIs(iterator.bucket, bucket) - req_args = client._connection._requested[0] - self.assertEqual(req_args.get("timeout"), 42) + expected_path = "/b/{}/notificationConfigs".format(bucket_name) + expected_item_to_value = _item_to_notification + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) - self.assertEqual(len(notifications), len(resources)) - for notification, resource, topic_ref in zip( - notifications, resources, topic_refs - ): - self.assertIsInstance(notification, BucketNotification) - self.assertEqual(notification.topic_project, topic_ref[0]) - self.assertEqual(notification.topic_name, topic_ref[1]) - self.assertEqual(notification.notification_id, resource["id"]) - self.assertEqual(notification.etag, resource["etag"]) - self.assertEqual(notification.self_link, resource["selfLink"]) - self.assertEqual( - notification.custom_attributes, resource.get("custom_attributes") - ) - self.assertEqual(notification.event_types, resource.get("event_types")) - self.assertEqual( - notification.blob_name_prefix, resource.get("blob_name_prefix") - ) - self.assertEqual( - notification.payload_format, resource.get("payload_format") - ) + def test_list_notifications_w_explicit(self): + from google.cloud.storage.bucket import _item_to_notification + + bucket_name = "name" + other_client = self._make_client() + other_client._list_resource = mock.Mock(spec=[]) + bucket = self._make_one(client=None, name=bucket_name) + timeout = 42 + retry = mock.Mock(spec=[]) + + iterator = bucket.list_notifications( + client=other_client, timeout=timeout, retry=retry, + ) + + self.assertIs(iterator, other_client._list_resource.return_value) + self.assertIs(iterator.bucket, bucket) + + expected_path = "/b/{}/notificationConfigs".format(bucket_name) + expected_item_to_value = _item_to_notification + other_client._list_resource.assert_called_once_with( + expected_path, expected_item_to_value, timeout=timeout, retry=retry, + ) def test_get_notification_miss_w_defaults(self): from google.cloud.exceptions import NotFound @@ -1591,7 +1609,7 @@ def test_reload_w_metageneration_match(self): ) def test_reload_w_generation_match(self): - connection = _Connection({}) + connection = _Connection() client = _Client(connection) bucket = self._make_one(client=client, name="name") @@ -3839,19 +3857,45 @@ def test_generate_signed_url_v4_w_bucket_bound_hostname_w_bare_hostname(self): self._generate_signed_url_v4_helper(bucket_bound_hostname="cdn.example.com") +class Test__item_to_notification(unittest.TestCase): + def _call_fut(self, iterator, item): + from google.cloud.storage.bucket import _item_to_notification + + return _item_to_notification(iterator, item) + + def test_it(self): + from google.cloud.storage.notification import BucketNotification + from google.cloud.storage.notification import _TOPIC_REF_FMT + from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT + + iterator = mock.Mock(spec=["bucket"]) + project = "my-project-123" + topic = "topic-1" + item = { + "topic": _TOPIC_REF_FMT.format(project, topic), + "id": "1", + "etag": "DEADBEEF", + "selfLink": "https://example.com/notification/1", + "payload_format": NONE_PAYLOAD_FORMAT, + } + + notification = self._call_fut(iterator, item) + + self.assertIsInstance(notification, BucketNotification) + self.assertIs(notification._bucket, iterator.bucket) + self.assertEqual(notification._topic_name, topic) + self.assertEqual(notification._topic_project, project) + self.assertEqual(notification._properties, item) + + class _Connection(object): - _delete_bucket = False + credentials = None - def __init__(self, *responses): - self._responses = responses - self._requested = [] - self._deleted_buckets = [] - self.credentials = None + def __init__(self): + pass - def api_request(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response + def api_request(self, **kw): # pragma: NO COVER + pass class _Client(object): @@ -3859,10 +3903,6 @@ def __init__(self, connection, project=None): self._base_connection = connection self.project = project - @property - def _connection(self): - return self._base_connection - @property def _credentials(self): return self._base_connection.credentials diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 6b0bb1fb5..50ce3c159 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -467,6 +467,74 @@ def test__get_resource_hit_w_explicit(self): _target_object=target, ) + def test__list_resource_w_defaults(self): + import functools + from google.api_core.page_iterator import HTTPIterator + from google.api_core.page_iterator import _do_nothing_page_start + + project = "PROJECT" + path = "/path/to/list/resource" + item_to_value = mock.Mock(spec=[]) + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() + + iterator = client._list_resource(path=path, item_to_value=item_to_value,) + + self.assertIsInstance(iterator, HTTPIterator) + self.assertIs(iterator.client, client) + self.assertIsInstance(iterator.api_request, functools.partial) + self.assertIs(iterator.api_request.func, connection.api_request) + self.assertEqual(iterator.api_request.args, ()) + expected_keywords = { + "timeout": self._get_default_timeout(), + "retry": DEFAULT_RETRY, + } + self.assertEqual(iterator.api_request.keywords, expected_keywords) + self.assertEqual(iterator.path, path) + self.assertEqual(iterator.next_page_token, None) + self.assertEqual(iterator.max_results, None) + self.assertIs(iterator._page_start, _do_nothing_page_start) + + def test__list_resource_w_explicit(self): + import functools + from google.api_core.page_iterator import HTTPIterator + + project = "PROJECT" + path = "/path/to/list/resource" + item_to_value = mock.Mock(spec=[]) + page_token = "PAGE-TOKEN" + max_results = 47 + extra_params = {"foo": "Foo"} + page_start = mock.Mock(spec=[]) + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() + + iterator = client._list_resource( + path=path, + item_to_value=item_to_value, + page_token=page_token, + max_results=max_results, + extra_params=extra_params, + page_start=page_start, + ) + + self.assertIsInstance(iterator, HTTPIterator) + self.assertIs(iterator.client, client) + self.assertIsInstance(iterator.api_request, functools.partial) + self.assertIs(iterator.api_request.func, connection.api_request) + self.assertEqual(iterator.api_request.args, ()) + expected_keywords = { + "timeout": self._get_default_timeout(), + "retry": DEFAULT_RETRY, + } + self.assertEqual(iterator.api_request.keywords, expected_keywords) + self.assertEqual(iterator.path, path) + self.assertEqual(iterator.next_page_token, page_token) + self.assertEqual(iterator.max_results, max_results) + self.assertIs(iterator._page_start, page_start) + def test__patch_resource_miss_w_defaults(self): from google.cloud.exceptions import NotFound @@ -1419,264 +1487,198 @@ def test_download_blob_to_file_wo_chunks_w_raw(self): def test_download_blob_to_file_w_chunks_w_raw(self): self._download_blob_to_file_helper(use_chunks=True, raw_download=True) - def test_list_blobs(self): + def test_list_blobs_w_defaults_w_bucket_obj(self): from google.cloud.storage.bucket import Bucket + from google.cloud.storage.bucket import _blobs_page_start + from google.cloud.storage.bucket import _item_to_blob - BUCKET_NAME = "bucket-name" - + project = "PROJECT" + bucket_name = "bucket-name" credentials = _make_credentials() - client = self._make_one(project="PROJECT", credentials=credentials) - connection = _make_connection({"items": []}) - - with mock.patch( - "google.cloud.storage.client.Client._connection", - new_callable=mock.PropertyMock, - ) as client_mock: - client_mock.return_value = connection - - bucket_obj = Bucket(client, BUCKET_NAME) - iterator = client.list_blobs(bucket_obj) - blobs = list(iterator) - - self.assertEqual(blobs, []) - connection.api_request.assert_called_once_with( - method="GET", - path="/b/%s/o" % BUCKET_NAME, - query_params={"projection": "noAcl"}, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, - ) - - def test_list_blobs_w_all_arguments_and_user_project(self): - from google.cloud.storage.bucket import Bucket + client = self._make_one(project=project, credentials=credentials) + client._list_resource = mock.Mock(spec=[]) + bucket = Bucket(client, bucket_name) + + iterator = client.list_blobs(bucket) + + self.assertIs(iterator, client._list_resource.return_value) + self.assertIs(iterator.bucket, bucket) + self.assertEqual(iterator.prefixes, set()) + + expected_path = "/b/{}/o".format(bucket_name) + expected_item_to_value = _item_to_blob + expected_page_token = None + expected_max_results = None + expected_extra_params = {"projection": "noAcl"} + expected_page_start = _blobs_page_start + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + page_token=expected_page_token, + max_results=expected_max_results, + extra_params=expected_extra_params, + page_start=expected_page_start, + ) - BUCKET_NAME = "name" - USER_PROJECT = "user-project-123" - MAX_RESULTS = 10 - PAGE_TOKEN = "ABCD" - PREFIX = "subfolder" - DELIMITER = "/" - START_OFFSET = "c" - END_OFFSET = "g" - INCLUDE_TRAILING_DELIMITER = True - VERSIONS = True - PROJECTION = "full" - FIELDS = "items/contentLanguage,nextPageToken" - EXPECTED = { - "maxResults": 10, - "pageToken": PAGE_TOKEN, - "prefix": PREFIX, - "delimiter": DELIMITER, - "startOffset": START_OFFSET, - "endOffset": END_OFFSET, - "includeTrailingDelimiter": INCLUDE_TRAILING_DELIMITER, - "versions": VERSIONS, - "projection": PROJECTION, - "fields": FIELDS, - "userProject": USER_PROJECT, - } + def test_list_blobs_w_explicit_w_user_project(self): + from google.cloud.storage.bucket import _blobs_page_start + from google.cloud.storage.bucket import _item_to_blob + project = "PROJECT" + user_project = "user-project-123" + bucket_name = "name" + max_results = 10 + page_token = "ABCD" + prefix = "subfolder" + delimiter = "/" + start_offset = "c" + end_offset = "g" + include_trailing_delimiter = True + versions = True + projection = "full" + fields = "items/contentLanguage,nextPageToken" credentials = _make_credentials() - client = self._make_one(project=USER_PROJECT, credentials=credentials) - connection = _make_connection({"items": []}) - - with mock.patch( - "google.cloud.storage.client.Client._connection", - new_callable=mock.PropertyMock, - ) as client_mock: - client_mock.return_value = connection - - bucket = Bucket(client, BUCKET_NAME, user_project=USER_PROJECT) - iterator = client.list_blobs( - bucket_or_name=bucket, - max_results=MAX_RESULTS, - page_token=PAGE_TOKEN, - prefix=PREFIX, - delimiter=DELIMITER, - start_offset=START_OFFSET, - end_offset=END_OFFSET, - include_trailing_delimiter=INCLUDE_TRAILING_DELIMITER, - versions=VERSIONS, - projection=PROJECTION, - fields=FIELDS, - timeout=42, - ) - blobs = list(iterator) - - self.assertEqual(blobs, []) - connection.api_request.assert_called_once_with( - method="GET", - path="/b/%s/o" % BUCKET_NAME, - query_params=EXPECTED, - timeout=42, - retry=DEFAULT_RETRY, - ) - - def test_list_buckets_wo_project(self): - CREDENTIALS = _make_credentials() - client = self._make_one(project=None, credentials=CREDENTIALS) - - with self.assertRaises(ValueError): - client.list_buckets() - - def test_list_buckets_empty(self): - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - - http = _make_requests_session([_make_json_response({})]) - client._http_internal = http - - buckets = list(client.list_buckets()) - - self.assertEqual(len(buckets), 0) - - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=mock.ANY, - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]) + client = self._make_one(project=project, credentials=credentials) + client._list_resource = mock.Mock(spec=[]) + client._bucket_arg_to_bucket = mock.Mock(spec=[]) + bucket = client._bucket_arg_to_bucket.return_value = mock.Mock( + spec=["path", "user_project"], ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], PROJECT) - self.assertEqual(parms["projection"], "noAcl") - - def test_list_buckets_explicit_project(self): - PROJECT = "PROJECT" - OTHER_PROJECT = "OTHER_PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - - http = _make_requests_session([_make_json_response({})]) - client._http_internal = http - - buckets = list(client.list_buckets(project=OTHER_PROJECT)) - - self.assertEqual(len(buckets), 0) + bucket.path = "/b/{}".format(bucket_name) + bucket.user_project = user_project + timeout = 42 + retry = mock.Mock(spec=[]) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=mock.ANY, - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]) + iterator = client.list_blobs( + bucket_or_name=bucket_name, + max_results=max_results, + page_token=page_token, + prefix=prefix, + delimiter=delimiter, + start_offset=start_offset, + end_offset=end_offset, + include_trailing_delimiter=include_trailing_delimiter, + versions=versions, + projection=projection, + fields=fields, + timeout=timeout, + retry=retry, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], str(OTHER_PROJECT)) - self.assertEqual(parms["projection"], "noAcl") - - def test_list_buckets_non_empty(self): - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - - BUCKET_NAME = "bucket-name" - - data = {"items": [{"name": BUCKET_NAME}]} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http - - buckets = list(client.list_buckets()) - self.assertEqual(len(buckets), 1) - self.assertEqual(buckets[0].name, BUCKET_NAME) - - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=self._get_default_timeout(), + self.assertIs(iterator, client._list_resource.return_value) + self.assertIs(iterator.bucket, bucket) + self.assertEqual(iterator.prefixes, set()) + + expected_path = "/b/{}/o".format(bucket_name) + expected_item_to_value = _item_to_blob + expected_page_token = page_token + expected_max_results = max_results + expected_extra_params = { + "projection": projection, + "prefix": prefix, + "delimiter": delimiter, + "startOffset": start_offset, + "endOffset": end_offset, + "includeTrailingDelimiter": include_trailing_delimiter, + "versions": versions, + "fields": fields, + "userProject": user_project, + } + expected_page_start = _blobs_page_start + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + page_token=expected_page_token, + max_results=expected_max_results, + extra_params=expected_extra_params, + page_start=expected_page_start, ) - def test_list_buckets_all_arguments(self): - PROJECT = "foo-bar" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - - MAX_RESULTS = 10 - PAGE_TOKEN = "ABCD" - PREFIX = "subfolder" - PROJECTION = "full" - FIELDS = "items/id,nextPageToken" + def test_list_buckets_wo_project(self): + credentials = _make_credentials() + client = self._make_one(project=None, credentials=credentials) - data = {"items": []} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http - iterator = client.list_buckets( - max_results=MAX_RESULTS, - page_token=PAGE_TOKEN, - prefix=PREFIX, - projection=PROJECTION, - fields=FIELDS, - timeout=42, - ) - buckets = list(iterator) - self.assertEqual(buckets, []) - http.request.assert_called_once_with( - method="GET", url=mock.ANY, data=mock.ANY, headers=mock.ANY, timeout=42 - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]) - ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], PROJECT) - self.assertEqual(parms["maxResults"], str(MAX_RESULTS)) - self.assertEqual(parms["pageToken"], PAGE_TOKEN) - self.assertEqual(parms["prefix"], PREFIX) - self.assertEqual(parms["projection"], PROJECTION) - self.assertEqual(parms["fields"], FIELDS) + with self.assertRaises(ValueError): + client.list_buckets() - def test_list_buckets_page_empty_response(self): - from google.api_core import page_iterator + def test_list_buckets_w_defaults(self): + from google.cloud.storage.client import _item_to_bucket project = "PROJECT" credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) + client._list_resource = mock.Mock(spec=[]) + iterator = client.list_buckets() - page = page_iterator.Page(iterator, (), None) - iterator._page = page - self.assertEqual(list(page), []) - def test_list_buckets_page_non_empty_response(self): - import six - from google.cloud.storage.bucket import Bucket + self.assertIs(iterator, client._list_resource.return_value) - project = "PROJECT" + expected_path = "/b" + expected_item_to_value = _item_to_bucket + expected_page_token = None + expected_max_results = None + expected_extra_params = { + "project": project, + "projection": "noAcl", + } + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + page_token=expected_page_token, + max_results=expected_max_results, + extra_params=expected_extra_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + def test_list_buckets_w_explicit(self): + from google.cloud.storage.client import _item_to_bucket + + project = "foo-bar" + other_project = "OTHER_PROJECT" + max_results = 10 + page_token = "ABCD" + prefix = "subfolder" + projection = "full" + fields = "items/id,nextPageToken" credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) + client._list_resource = mock.Mock(spec=[]) + timeout = 42 + retry = mock.Mock(spec=[]) - blob_name = "bucket-name" - response = {"items": [{"name": blob_name}]} - - def fake_response(): - return response + iterator = client.list_buckets( + project=other_project, + max_results=max_results, + page_token=page_token, + prefix=prefix, + projection=projection, + fields=fields, + timeout=timeout, + retry=retry, + ) - iterator = client.list_buckets() - iterator._get_next_page_response = fake_response + self.assertIs(iterator, client._list_resource.return_value) - page = six.next(iterator.pages) - self.assertEqual(page.num_items, 1) - bucket = six.next(page) - self.assertEqual(page.remaining, 0) - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, blob_name) + expected_path = "/b" + expected_item_to_value = _item_to_bucket + expected_page_token = page_token + expected_max_results = max_results + expected_extra_params = { + "project": other_project, + "prefix": prefix, + "projection": projection, + "fields": fields, + } + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + page_token=expected_page_token, + max_results=expected_max_results, + extra_params=expected_extra_params, + timeout=timeout, + retry=retry, + ) def _create_hmac_key_helper( self, explicit_project=None, user_project=None, timeout=None, retry=None, @@ -1772,110 +1774,74 @@ def test_create_hmac_key_w_timeout(self): def test_create_hmac_key_w_retry(self): self._create_hmac_key_helper(retry=mock.Mock(spec=[])) - def test_list_hmac_keys_defaults_empty(self): - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + def test_list_hmac_keys_w_defaults(self): + from google.cloud.storage.client import _item_to_hmac_key_metadata - http = _make_requests_session([_make_json_response({})]) - client._http_internal = http + project = "PROJECT" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._list_resource = mock.Mock(spec=[]) - metadatas = list(client.list_hmac_keys()) + iterator = client.list_hmac_keys() - self.assertEqual(len(metadatas), 0) + self.assertIs(iterator, client._list_resource.return_value) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=None, - headers=mock.ANY, + expected_path = "/projects/{}/hmacKeys".format(project) + expected_item_to_value = _item_to_hmac_key_metadata + expected_max_results = None + expected_extra_params = {} + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + max_results=expected_max_results, + extra_params=expected_extra_params, timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join( - [ - "", - "storage", - client._connection.API_VERSION, - "projects", - PROJECT, - "hmacKeys", - ] - ), - ) - - def test_list_hmac_keys_explicit_non_empty(self): - from google.cloud.storage.hmac_key import HMACKeyMetadata - - PROJECT = "PROJECT" - OTHER_PROJECT = "other-project-456" - MAX_RESULTS = 3 - EMAIL = "storage-user-123@example.com" - ACCESS_ID = "ACCESS-ID" - USER_PROJECT = "billed-project" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - response = { - "kind": "storage#hmacKeysMetadata", - "items": [ - { - "kind": "storage#hmacKeyMetadata", - "accessId": ACCESS_ID, - "serviceAccountEmail": EMAIL, - } - ], - } + def test_list_hmac_keys_w_explicit(self): + from google.cloud.storage.client import _item_to_hmac_key_metadata - http = _make_requests_session([_make_json_response(response)]) - client._http_internal = http + project = "PROJECT" + other_project = "other-project-456" + max_results = 3 + show_deleted_keys = True + service_account_email = "storage-user-123@example.com" + user_project = "billed-project" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._list_resource = mock.Mock(spec=[]) + timeout = 42 + retry = mock.Mock(spec=[]) - metadatas = list( - client.list_hmac_keys( - max_results=MAX_RESULTS, - service_account_email=EMAIL, - show_deleted_keys=True, - project_id=OTHER_PROJECT, - user_project=USER_PROJECT, - timeout=42, - ) + iterator = client.list_hmac_keys( + max_results=max_results, + service_account_email=service_account_email, + show_deleted_keys=show_deleted_keys, + project_id=other_project, + user_project=user_project, + timeout=timeout, + retry=retry, ) - self.assertEqual(len(metadatas), len(response["items"])) - - for metadata, resource in zip(metadatas, response["items"]): - self.assertIsInstance(metadata, HMACKeyMetadata) - self.assertIs(metadata._client, client) - self.assertEqual(metadata._properties, resource) + self.assertIs(iterator, client._list_resource.return_value) - http.request.assert_called_once_with( - method="GET", url=mock.ANY, data=None, headers=mock.ANY, timeout=42 - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join( - [ - "", - "storage", - client._connection.API_VERSION, - "projects", - OTHER_PROJECT, - "hmacKeys", - ] - ), + expected_path = "/projects/{}/hmacKeys".format(other_project) + expected_item_to_value = _item_to_hmac_key_metadata + expected_max_results = max_results + expected_extra_params = { + "serviceAccountEmail": service_account_email, + "showDeletedKeys": show_deleted_keys, + "userProject": user_project, + } + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + max_results=expected_max_results, + extra_params=expected_extra_params, + timeout=timeout, + retry=retry, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["maxResults"], str(MAX_RESULTS)) - self.assertEqual(parms["serviceAccountEmail"], EMAIL) - self.assertEqual(parms["showDeletedKeys"], "True") - self.assertEqual(parms["userProject"], USER_PROJECT) def test_get_hmac_key_metadata_wo_project(self): from google.cloud.storage.hmac_key import HMACKeyMetadata @@ -2216,32 +2182,57 @@ def test_get_signed_policy_v4_with_access_token(self): self.assertEqual(fields["x-goog-signature"], EXPECTED_SIGN) self.assertEqual(fields["policy"], EXPECTED_POLICY) - def test_list_buckets_retries_error(self): - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - BUCKET_NAME = "bucket-name" +class Test__item_to_bucket(unittest.TestCase): + def _call_fut(self, iterator, item): + from google.cloud.storage.client import _item_to_bucket - data = {"items": [{"name": BUCKET_NAME}]} - http = _make_requests_session( - [exceptions.InternalServerError("mock error"), _make_json_response(data)] - ) - client._http_internal = http + return _item_to_bucket(iterator, item) - buckets = list(client.list_buckets()) + def test_w_empty_item(self): + from google.cloud.storage.bucket import Bucket - self.assertEqual(len(buckets), 1) - self.assertEqual(buckets[0].name, BUCKET_NAME) + iterator = mock.Mock(spec=["client"]) + item = {} - call = mock.call( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=self._get_default_timeout(), - ) - http.request.assert_has_calls([call, call]) + bucket = self._call_fut(iterator, item) + + self.assertIsInstance(bucket, Bucket) + self.assertIs(bucket.client, iterator.client) + self.assertIsNone(bucket.name) + + def test_w_name(self): + from google.cloud.storage.bucket import Bucket + + name = "name" + iterator = mock.Mock(spec=["client"]) + item = {"name": name} + + bucket = self._call_fut(iterator, item) + + self.assertIsInstance(bucket, Bucket) + self.assertIs(bucket.client, iterator.client) + self.assertEqual(bucket.name, name) + + +class Test__item_to_hmac_key_metadata(unittest.TestCase): + def _call_fut(self, iterator, item): + from google.cloud.storage.client import _item_to_hmac_key_metadata + + return _item_to_hmac_key_metadata(iterator, item) + + def test_it(self): + from google.cloud.storage.hmac_key import HMACKeyMetadata + + access_id = "ABCDE" + iterator = mock.Mock(spec=["client"]) + item = {"id": access_id} + + metadata = self._call_fut(iterator, item) + + self.assertIsInstance(metadata, HMACKeyMetadata) + self.assertIs(metadata._client, iterator.client) + self.assertEqual(metadata._properties, item) @pytest.mark.parametrize("test_data", _POST_POLICY_TESTS) From 2c87f2f1438150bd93c7d544d2ccd4fdf95998db Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 9 Jun 2021 14:43:06 -0400 Subject: [PATCH 21/30] tests: avoid requiring real credentials in unit tests (#459) Adjust 'Bucket.get_upload_policy' to use credentials from the client, rather than its connection. Closes #416. --- google/cloud/storage/bucket.py | 2 +- tests/unit/test_blob.py | 540 ++++++++++++++------------------- tests/unit/test_bucket.py | 223 ++++++-------- tests/unit/test_client.py | 42 ++- 4 files changed, 355 insertions(+), 452 deletions(-) diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 685d3b532..b79fc12e5 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -3181,7 +3181,7 @@ def generate_upload_policy(self, conditions, expiration=None, client=None): to attach the signature. """ client = self._require_client(client) - credentials = client._base_connection.credentials + credentials = client._credentials _signing.ensure_signed_credentials(credentials) if expiration is None: diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 44959f54f..3ec0db716 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -57,7 +57,7 @@ def _get_default_timeout(): def _make_client(*args, **kw): from google.cloud.storage.client import Client - return Client(*args, **kw) + return mock.create_autospec(Client, instance=True, **kw) def test_ctor_wo_encryption_key(self): BLOB_NAME = "blob-name" @@ -422,10 +422,10 @@ def test_public_url_with_non_ascii(self): def test_generate_signed_url_w_invalid_version(self): BLOB_NAME = "blob-name" EXPIRATION = "2014-10-16T20:34:37.000Z" - connection = _Connection() - client = _Client(connection) + client = self._make_client() bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) + with self.assertRaises(ValueError): blob.generate_signed_url(EXPIRATION, version="nonesuch") @@ -464,8 +464,13 @@ def _generate_signed_url_helper( if expiration is None: expiration = datetime.datetime.utcnow().replace(tzinfo=UTC) + delta - connection = _Connection() - client = _Client(connection) + if credentials is None: + expected_creds = _make_credentials() + client = self._make_client(_credentials=expected_creds) + else: + expected_creds = credentials + client = self._make_client(_credentials=object()) + bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket, encryption_key=encryption_key) @@ -500,11 +505,6 @@ def _generate_signed_url_helper( self.assertEqual(signed_uri, signer.return_value) - if credentials is None: - expected_creds = _Connection.credentials - else: - expected_creds = credentials - encoded_name = blob_name.encode("utf-8") quoted_name = parse.quote(encoded_name, safe=b"/~") @@ -784,8 +784,7 @@ def test_exists_w_generation_match(self): def test_delete_wo_generation(self): BLOB_NAME = "blob-name" - connection = _Connection() # no requests will be made - client = _Client(connection) + client = self._make_client() bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) bucket._blobs[BLOB_NAME] = 1 @@ -812,8 +811,7 @@ def test_delete_wo_generation(self): def test_delete_w_generation(self): BLOB_NAME = "blob-name" GENERATION = 123456 - connection = _Connection() # no requests will be made - client = _Client(connection) + client = self._make_client() bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket, generation=GENERATION) bucket._blobs[BLOB_NAME] = 1 @@ -840,8 +838,7 @@ def test_delete_w_generation(self): def test_delete_w_generation_match(self): BLOB_NAME = "blob-name" GENERATION = 123456 - connection = _Connection() # no requests will be made - client = _Client(connection) + client = self._make_client() bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket, generation=GENERATION) bucket._blobs[BLOB_NAME] = 1 @@ -1024,6 +1021,99 @@ def _mock_requests_response(status_code, headers, content=b""): response.request = requests.Request("POST", "http://example.com").prepare() return response + def test__extract_headers_from_download_gzipped(self): + blob_name = "blob-name" + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) + blob = self._make_one(blob_name, bucket=bucket) + + response = self._mock_requests_response( + http_client.OK, + headers={ + "Content-Type": "application/json", + "Content-Language": "ko-kr", + "Cache-Control": "max-age=1337;public", + "Content-Encoding": "gzip", + "X-Goog-Storage-Class": "STANDARD", + "X-Goog-Hash": "crc32c=4gcgLQ==,md5=CS9tHYTtyFntzj7B9nkkJQ==", + }, + # { "x": 5 } gzipped + content=b"\x1f\x8b\x08\x00\xcfo\x17_\x02\xff\xabVP\xaaP\xb2R0U\xa8\x05\x00\xa1\xcaQ\x93\n\x00\x00\x00", + ) + blob._extract_headers_from_download(response) + + self.assertEqual(blob.content_type, "application/json") + self.assertEqual(blob.content_language, "ko-kr") + self.assertEqual(blob.content_encoding, "gzip") + self.assertEqual(blob.cache_control, "max-age=1337;public") + self.assertEqual(blob.storage_class, "STANDARD") + self.assertEqual(blob.md5_hash, "CS9tHYTtyFntzj7B9nkkJQ==") + self.assertEqual(blob.crc32c, "4gcgLQ==") + + def test__extract_headers_from_download_empty(self): + blob_name = "blob-name" + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) + blob = self._make_one(blob_name, bucket=bucket) + + response = self._mock_requests_response( + http_client.OK, + headers={ + "Content-Type": "application/octet-stream", + "Content-Language": "en-US", + "Cache-Control": "max-age=1337;public", + "Content-Encoding": "gzip", + "X-Goog-Storage-Class": "STANDARD", + "X-Goog-Hash": "crc32c=4/c+LQ==,md5=CS9tHYTt/+ntzj7B9nkkJQ==", + }, + content=b"", + ) + blob._extract_headers_from_download(response) + self.assertEqual(blob.content_type, "application/octet-stream") + self.assertEqual(blob.content_language, "en-US") + self.assertEqual(blob.md5_hash, "CS9tHYTt/+ntzj7B9nkkJQ==") + self.assertEqual(blob.crc32c, "4/c+LQ==") + + def test__extract_headers_from_download_w_hash_response_header_none(self): + blob_name = "blob-name" + md5_hash = "CS9tHYTtyFntzj7B9nkkJQ==" + crc32c = "4gcgLQ==" + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) + properties = { + "md5Hash": md5_hash, + "crc32c": crc32c, + } + blob = self._make_one(blob_name, bucket=bucket, properties=properties) + + response = self._mock_requests_response( + http_client.OK, + headers={"X-Goog-Hash": ""}, + # { "x": 5 } gzipped + content=b"\x1f\x8b\x08\x00\xcfo\x17_\x02\xff\xabVP\xaaP\xb2R0U\xa8\x05\x00\xa1\xcaQ\x93\n\x00\x00\x00", + ) + blob._extract_headers_from_download(response) + + self.assertEqual(blob.md5_hash, md5_hash) + self.assertEqual(blob.crc32c, crc32c) + + def test__extract_headers_from_download_w_response_headers_not_match(self): + blob_name = "blob-name" + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) + blob = self._make_one(blob_name, bucket=bucket) + + response = self._mock_requests_response( + http_client.OK, + headers={"X-Goog-Hash": "bogus=4gcgLQ==,"}, + # { "x": 5 } gzipped + content=b"", + ) + blob._extract_headers_from_download(response) + + self.assertIsNone(blob.md5_hash) + self.assertIsNone(blob.crc32c) + def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): blob_name = "blob-name" client = mock.Mock() @@ -1114,7 +1204,7 @@ def _do_download_helper_w_chunks( self, w_range, raw_download, timeout=None, checksum="md5" ): blob_name = "blob-name" - client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) + client = self._make_client() bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 @@ -1217,41 +1307,32 @@ def test__do_download_w_chunks_wo_checksum(self): patch.assert_not_called() def test_download_to_file_with_failure(self): - import requests - from google.resumable_media import InvalidResponse - from google.cloud import exceptions - - raw_response = requests.Response() - raw_response.status_code = http_client.NOT_FOUND - raw_request = requests.Request("GET", "http://example.com") - raw_response.request = raw_request.prepare() - grmp_response = InvalidResponse(raw_response) + from google.cloud.exceptions import NotFound blob_name = "blob-name" - media_link = "http://test.invalid" client = self._make_client() + client.download_blob_to_file.side_effect = NotFound("testing") bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) - blob._properties["mediaLink"] = media_link - blob._do_download = mock.Mock() - blob._do_download.side_effect = grmp_response - file_obj = io.BytesIO() - with self.assertRaises(exceptions.NotFound): + + with self.assertRaises(NotFound): blob.download_to_file(file_obj) self.assertEqual(file_obj.tell(), 0) - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, + expected_timeout = self._get_default_timeout() + client.download_blob_to_file.assert_called_once_with( + blob, file_obj, - media_link, - headers, - None, - None, - False, - timeout=self._get_default_timeout(), + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=False, + timeout=expected_timeout, checksum="md5", ) @@ -1260,7 +1341,6 @@ def test_download_to_file_wo_media_link(self): client = self._make_client() bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) - blob._do_download = mock.Mock() file_obj = io.BytesIO() blob.download_to_file(file_obj) @@ -1268,49 +1348,41 @@ def test_download_to_file_wo_media_link(self): # Make sure the media link is still unknown. self.assertIsNone(blob.media_link) - expected_url = ( - "https://storage.googleapis.com/download/storage/v1/b/" - "name/o/blob-name?alt=media" - ) - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, + expected_timeout = self._get_default_timeout() + client.download_blob_to_file.assert_called_once_with( + blob, file_obj, - expected_url, - headers, - None, - None, - False, - timeout=self._get_default_timeout(), + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=False, + timeout=expected_timeout, checksum="md5", ) def test_download_to_file_w_generation_match(self): - GENERATION_NUMBER = 6 - HEADERS = {"accept-encoding": "gzip"} - EXPECTED_URL = ( - "https://storage.googleapis.com/download/storage/v1/b/" - "name/o/blob-name?alt=media&ifGenerationNotMatch={}".format( - GENERATION_NUMBER - ) - ) - + generation_number = 6 client = self._make_client() blob = self._make_one("blob-name", bucket=_Bucket(client)) - blob._do_download = mock.Mock() file_obj = io.BytesIO() - blob.download_to_file(file_obj, if_generation_not_match=GENERATION_NUMBER) + blob.download_to_file(file_obj, if_generation_not_match=generation_number) - blob._do_download.assert_called_once_with( - client._http, + expected_timeout = self._get_default_timeout() + client.download_blob_to_file.assert_called_once_with( + blob, file_obj, - EXPECTED_URL, - HEADERS, - None, - None, - False, - timeout=self._get_default_timeout(), + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=generation_number, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=False, + timeout=expected_timeout, checksum="md5", ) @@ -1324,7 +1396,6 @@ def _download_to_file_helper(self, use_chunks, raw_download, timeout=None): if use_chunks: blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 3 - blob._do_download = mock.Mock() if timeout is None: expected_timeout = self._get_default_timeout() @@ -1339,15 +1410,16 @@ def _download_to_file_helper(self, use_chunks, raw_download, timeout=None): else: blob.download_to_file(file_obj, **timeout_kwarg) - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, + client.download_blob_to_file.assert_called_once_with( + blob, file_obj, - media_link, - headers, - None, - None, - raw_download, + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=raw_download, timeout=expected_timeout, checksum="md5", ) @@ -1377,13 +1449,11 @@ def _download_to_filename_helper(self, updated, raw_download, timeout=None): blob_name = "blob-name" client = self._make_client() bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} + properties = {} if updated is not None: properties["updated"] = updated blob = self._make_one(blob_name, bucket=bucket, properties=properties) - blob._do_download = mock.Mock() with _NamedTemporaryFile() as temp: if timeout is None: @@ -1405,51 +1475,22 @@ def _download_to_filename_helper(self, updated, raw_download, timeout=None): expected_timeout = self._get_default_timeout() if timeout is None else timeout - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, + client.download_blob_to_file.assert_called_once_with( + blob, mock.ANY, - media_link, - headers, - None, - None, - raw_download, + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=raw_download, timeout=expected_timeout, checksum="md5", ) - stream = blob._do_download.mock_calls[0].args[1] + stream = client.download_blob_to_file.mock_calls[0].args[1] self.assertEqual(stream.name, temp.name) - def test_download_to_filename_w_generation_match(self): - from google.cloud._testing import _NamedTemporaryFile - - GENERATION_NUMBER = 6 - MEDIA_LINK = "http://example.com/media/" - EXPECTED_LINK = MEDIA_LINK + "?ifGenerationMatch={}".format(GENERATION_NUMBER) - HEADERS = {"accept-encoding": "gzip"} - - client = self._make_client() - - blob = self._make_one( - "blob-name", bucket=_Bucket(client), properties={"mediaLink": MEDIA_LINK} - ) - blob._do_download = mock.Mock() - - with _NamedTemporaryFile() as temp: - blob.download_to_filename(temp.name, if_generation_match=GENERATION_NUMBER) - - blob._do_download.assert_called_once_with( - client._http, - mock.ANY, - EXPECTED_LINK, - HEADERS, - None, - None, - False, - timeout=self._get_default_timeout(), - checksum="md5", - ) - def test_download_to_filename_w_updated_wo_raw(self): updated = "2014-12-06T13:13:50.690Z" self._download_to_filename_helper(updated=updated, raw_download=False) @@ -1469,18 +1510,41 @@ def test_download_to_filename_w_custom_timeout(self): updated=None, raw_download=False, timeout=9.58 ) + def test_download_to_filename_w_generation_match(self): + from google.cloud._testing import _NamedTemporaryFile + + generation_number = 6 + client = self._make_client() + blob = self._make_one("blob-name", bucket=_Bucket(client)) + + with _NamedTemporaryFile() as temp: + blob.download_to_filename(temp.name, if_generation_match=generation_number) + + expected_timeout = self._get_default_timeout() + client.download_blob_to_file.assert_called_once_with( + blob, + mock.ANY, + start=None, + end=None, + if_generation_match=generation_number, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=False, + timeout=expected_timeout, + checksum="md5", + ) + stream = client.download_blob_to_file.mock_calls[0].args[1] + self.assertEqual(stream.name, temp.name) + def test_download_to_filename_corrupted(self): from google.resumable_media import DataCorruption blob_name = "blob-name" client = self._make_client() bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} - - blob = self._make_one(blob_name, bucket=bucket, properties=properties) - blob._do_download = mock.Mock() - blob._do_download.side_effect = DataCorruption("testing") + blob = self._make_one(blob_name, bucket=bucket) + client.download_blob_to_file.side_effect = DataCorruption("testing") # Try to download into a temporary file (don't use # `_NamedTemporaryFile` it will try to remove after the file is @@ -1495,64 +1559,28 @@ def test_download_to_filename_corrupted(self): # Make sure the file was cleaned up. self.assertFalse(os.path.exists(filename)) - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, + expected_timeout = self._get_default_timeout() + client.download_blob_to_file.assert_called_once_with( + blob, mock.ANY, - media_link, - headers, - None, - None, - False, - timeout=self._get_default_timeout(), + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=False, + timeout=expected_timeout, checksum="md5", ) - stream = blob._do_download.mock_calls[0].args[1] + stream = client.download_blob_to_file.mock_calls[0].args[1] self.assertEqual(stream.name, filename) - def test_download_to_filename_w_key(self): - from google.cloud._testing import _NamedTemporaryFile - from google.cloud.storage.blob import _get_encryption_headers - - blob_name = "blob-name" - # Create a fake client/bucket and use them in the Blob() constructor. - client = self._make_client() - bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} - key = b"aa426195405adee2c8081bb9e7e74b19" - blob = self._make_one( - blob_name, bucket=bucket, properties=properties, encryption_key=key - ) - blob._do_download = mock.Mock() - - with _NamedTemporaryFile() as temp: - blob.download_to_filename(temp.name) - - headers = {"accept-encoding": "gzip"} - headers.update(_get_encryption_headers(key)) - blob._do_download.assert_called_once_with( - client._http, - mock.ANY, - media_link, - headers, - None, - None, - False, - timeout=self._get_default_timeout(), - checksum="md5", - ) - stream = blob._do_download.mock_calls[0].args[1] - self.assertEqual(stream.name, temp.name) - def _download_as_bytes_helper(self, raw_download, timeout=None): blob_name = "blob-name" client = self._make_client() bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} - blob = self._make_one(blob_name, bucket=bucket, properties=properties) - blob._do_download = mock.Mock() + blob = self._make_one(blob_name, bucket=bucket) if timeout is None: expected_timeout = self._get_default_timeout() @@ -1562,113 +1590,30 @@ def _download_as_bytes_helper(self, raw_download, timeout=None): fetched = blob.download_as_bytes(raw_download=raw_download, timeout=timeout) self.assertEqual(fetched, b"") - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, + client.download_blob_to_file.assert_called_once_with( + blob, mock.ANY, - media_link, - headers, - None, - None, - raw_download, + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=raw_download, timeout=expected_timeout, checksum="md5", ) - stream = blob._do_download.mock_calls[0].args[1] + stream = client.download_blob_to_file.mock_calls[0].args[1] self.assertIsInstance(stream, io.BytesIO) - def test_download_as_string_w_response_headers(self): - blob_name = "blob-name" - client = mock.Mock(spec=["_http"]) - bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} - blob = self._make_one(blob_name, bucket=bucket, properties=properties) - - response = self._mock_requests_response( - http_client.OK, - headers={ - "Content-Type": "application/json", - "Content-Language": "ko-kr", - "Cache-Control": "max-age=1337;public", - "Content-Encoding": "gzip", - "X-Goog-Storage-Class": "STANDARD", - "X-Goog-Hash": "crc32c=4gcgLQ==,md5=CS9tHYTtyFntzj7B9nkkJQ==", - }, - # { "x": 5 } gzipped - content=b"\x1f\x8b\x08\x00\xcfo\x17_\x02\xff\xabVP\xaaP\xb2R0U\xa8\x05\x00\xa1\xcaQ\x93\n\x00\x00\x00", - ) - blob._extract_headers_from_download(response) - - self.assertEqual(blob.content_type, "application/json") - self.assertEqual(blob.content_language, "ko-kr") - self.assertEqual(blob.content_encoding, "gzip") - self.assertEqual(blob.cache_control, "max-age=1337;public") - self.assertEqual(blob.storage_class, "STANDARD") - self.assertEqual(blob.md5_hash, "CS9tHYTtyFntzj7B9nkkJQ==") - self.assertEqual(blob.crc32c, "4gcgLQ==") - - response = self._mock_requests_response( - http_client.OK, - headers={ - "Content-Type": "application/octet-stream", - "Content-Language": "en-US", - "Cache-Control": "max-age=1337;public", - "Content-Encoding": "gzip", - "X-Goog-Storage-Class": "STANDARD", - "X-Goog-Hash": "crc32c=4/c+LQ==,md5=CS9tHYTt/+ntzj7B9nkkJQ==", - }, - content=b"", - ) - blob._extract_headers_from_download(response) - self.assertEqual(blob.content_type, "application/octet-stream") - self.assertEqual(blob.content_language, "en-US") - self.assertEqual(blob.md5_hash, "CS9tHYTt/+ntzj7B9nkkJQ==") - self.assertEqual(blob.crc32c, "4/c+LQ==") - - def test_download_as_string_w_hash_response_header_none(self): - blob_name = "blob-name" - md5_hash = "CS9tHYTtyFntzj7B9nkkJQ==" - crc32c = "4gcgLQ==" - client = mock.Mock(spec=["_http"]) - bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = { - "mediaLink": media_link, - "md5Hash": md5_hash, - "crc32c": crc32c, - } - blob = self._make_one(blob_name, bucket=bucket, properties=properties) - - response = self._mock_requests_response( - http_client.OK, - headers={"X-Goog-Hash": ""}, - # { "x": 5 } gzipped - content=b"\x1f\x8b\x08\x00\xcfo\x17_\x02\xff\xabVP\xaaP\xb2R0U\xa8\x05\x00\xa1\xcaQ\x93\n\x00\x00\x00", - ) - blob._extract_headers_from_download(response) - - self.assertEqual(blob.md5_hash, md5_hash) - self.assertEqual(blob.crc32c, crc32c) - - def test_download_as_string_w_response_headers_not_match(self): - blob_name = "blob-name" - client = mock.Mock(spec=["_http"]) - bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} - blob = self._make_one(blob_name, bucket=bucket, properties=properties) + def test_download_as_bytes_wo_raw(self): + self._download_as_bytes_helper(raw_download=False) - response = self._mock_requests_response( - http_client.OK, - headers={"X-Goog-Hash": "bogus=4gcgLQ==,"}, - # { "x": 5 } gzipped - content=b"", - ) - blob._extract_headers_from_download(response) + def test_download_as_bytes_w_raw(self): + self._download_as_bytes_helper(raw_download=True) - self.assertIsNone(blob.md5_hash) - self.assertIsNone(blob.crc32c) + def test_download_as_bytes_w_custom_timeout(self): + self._download_as_bytes_helper(raw_download=False, timeout=9.58) def test_download_as_bytes_w_generation_match(self): GENERATION_NUMBER = 6 @@ -1697,15 +1642,6 @@ def test_download_as_bytes_w_generation_match(self): checksum="md5", ) - def test_download_as_bytes_wo_raw(self): - self._download_as_bytes_helper(raw_download=False) - - def test_download_as_bytes_w_raw(self): - self._download_as_bytes_helper(raw_download=True) - - def test_download_as_byte_w_custom_timeout(self): - self._download_as_bytes_helper(raw_download=False, timeout=9.58) - def _download_as_text_helper( self, raw_download, @@ -1730,7 +1666,8 @@ def _download_as_text_helper( payload = expected_value.encode() blob_name = "blob-name" - bucket = _Bucket() + bucket_client = self._make_client() + bucket = _Bucket(bucket_client) properties = {} if charset is not None: @@ -4804,8 +4741,7 @@ def test_custom_time_unset(self): def test_from_string_w_valid_uri(self): from google.cloud.storage.blob import Blob - connection = _Connection() - client = _Client(connection) + client = self._make_client() uri = "gs://BUCKET_NAME/b" blob = Blob.from_string(uri, client) @@ -4817,8 +4753,7 @@ def test_from_string_w_valid_uri(self): def test_from_string_w_invalid_uri(self): from google.cloud.storage.blob import Blob - connection = _Connection() - client = _Client(connection) + client = self._make_client() with pytest.raises(ValueError, match="URI scheme must be gs"): Blob.from_string("http://bucket_name/b", client) @@ -4826,8 +4761,7 @@ def test_from_string_w_invalid_uri(self): def test_from_string_w_domain_name_bucket(self): from google.cloud.storage.blob import Blob - connection = _Connection() - client = _Client(connection) + client = self._make_client() uri = "gs://buckets.example.com/b" blob = Blob.from_string(uri, client) @@ -5015,15 +4949,12 @@ class _Connection(object): USER_AGENT = "testing 1.2.3" credentials = object() - def __init__(self): - pass - class _Bucket(object): def __init__(self, client=None, name="name", user_project=None): if client is None: - connection = _Connection() - client = _Client(connection) + client = Test_Blob._make_client() + self.client = client self._blobs = {} self._copied = [] @@ -5058,12 +4989,3 @@ def delete_blob( retry, ) ) - - -class _Client(object): - def __init__(self, connection): - self._base_connection = connection - - @property - def _credentials(self): - return self._base_connection.credentials diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 63c1e4de3..244c26b2a 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -37,6 +37,79 @@ class _SigningCredentials( return credentials +class Test__blobs_page_start(unittest.TestCase): + @staticmethod + def _call_fut(iterator, page, response): + from google.cloud.storage.bucket import _blobs_page_start + + return _blobs_page_start(iterator, page, response) + + def test_wo_any_prefixes(self): + iterator = mock.Mock(spec=["prefixes"], prefixes=set()) + page = mock.Mock(spec=["prefixes"]) + response = {} + + self._call_fut(iterator, page, response) + + self.assertEqual(page.prefixes, ()) + self.assertEqual(iterator.prefixes, set()) + + def test_w_prefixes(self): + iterator_prefixes = set(["foo/", "qux/"]) + iterator = mock.Mock(spec=["prefixes"], prefixes=iterator_prefixes) + page = mock.Mock(spec=["prefixes"]) + page_prefixes = ["foo/", "bar/", "baz/"] + response = {"prefixes": page_prefixes} + + self._call_fut(iterator, page, response) + + self.assertEqual(page.prefixes, tuple(page_prefixes)) + self.assertEqual(iterator.prefixes, iterator_prefixes.union(page_prefixes)) + + +class Test__item_to_blob(unittest.TestCase): + @staticmethod + def _call_fut(iterator, item): + from google.cloud.storage.bucket import _item_to_blob + + return _item_to_blob(iterator, item) + + def test_wo_extra_properties(self): + from google.cloud.storage.blob import Blob + + blob_name = "blob-name" + bucket = mock.Mock(spec=[]) + iterator = mock.Mock(spec=["bucket"], bucket=bucket) + item = {"name": blob_name} + + blob = self._call_fut(iterator, item) + + self.assertIsInstance(blob, Blob) + self.assertIs(blob.bucket, bucket) + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob._properties, item) + + def test_w_extra_properties(self): + from google.cloud.storage.blob import Blob + + blob_name = "blob-name" + bucket = mock.Mock(spec=[]) + iterator = mock.Mock(spec=["bucket"], bucket=bucket) + item = { + "name": blob_name, + "generation": 123, + "contentType": "text/plain", + "contentLanguage": "en-US", + } + + blob = self._call_fut(iterator, item) + + self.assertIsInstance(blob, Blob) + self.assertIs(blob.bucket, bucket) + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob._properties, item) + + class Test_LifecycleRuleConditions(unittest.TestCase): @staticmethod def _get_target_class(): @@ -442,15 +515,14 @@ def _get_default_timeout(): return _DEFAULT_TIMEOUT @staticmethod - def _make_client(*args, **kw): + def _make_client(**kw): from google.cloud.storage.client import Client - return Client(*args, **kw) + return mock.create_autospec(Client, instance=True, **kw) def _make_one(self, client=None, name=None, properties=None, user_project=None): if client is None: - connection = _Connection() - client = _Client(connection) + client = self._make_client() if user_project is None: bucket = self._get_target_class()(client, name=name) else: @@ -482,8 +554,7 @@ def test_ctor(self): def test_ctor_w_user_project(self): NAME = "name" USER_PROJECT = "user-project-123" - connection = _Connection() - client = _Client(connection) + client = self._make_client() bucket = self._make_one(client, name=NAME, user_project=USER_PROJECT) self.assertEqual(bucket.name, NAME) self.assertEqual(bucket._properties, {}) @@ -575,7 +646,7 @@ def test_notification_defaults(self): PROJECT = "PROJECT" BUCKET_NAME = "BUCKET_NAME" TOPIC_NAME = "TOPIC_NAME" - client = _Client(_Connection(), project=PROJECT) + client = self._make_client(project=PROJECT) bucket = self._make_one(client, name=BUCKET_NAME) notification = bucket.notification(TOPIC_NAME) @@ -603,7 +674,7 @@ def test_notification_explicit(self): CUSTOM_ATTRIBUTES = {"attr1": "value1", "attr2": "value2"} EVENT_TYPES = [OBJECT_FINALIZE_EVENT_TYPE, OBJECT_DELETE_EVENT_TYPE] BLOB_NAME_PREFIX = "blob-name-prefix/" - client = _Client(_Connection(), project=PROJECT) + client = self._make_client(project=PROJECT) bucket = self._make_one(client, name=BUCKET_NAME) notification = bucket.notification( @@ -1609,8 +1680,7 @@ def test_reload_w_metageneration_match(self): ) def test_reload_w_generation_match(self): - connection = _Connection() - client = _Client(connection) + client = self._make_client() bucket = self._make_one(client=client, name="name") with self.assertRaises(TypeError): @@ -3351,96 +3421,14 @@ def test_make_private_recursive_too_many(self): client.list_blobs.assert_called_once() - def test_page_empty_response(self): - from google.api_core import page_iterator - - connection = _Connection() - client = self._make_client() - client._base_connection = connection - name = "name" - bucket = self._make_one(client=client, name=name) - iterator = bucket.list_blobs() - page = page_iterator.Page(iterator, (), None) - iterator._page = page - blobs = list(page) - self.assertEqual(blobs, []) - self.assertEqual(iterator.prefixes, set()) - - def test_page_non_empty_response(self): - import six - from google.cloud.storage.blob import Blob - - blob_name = "blob-name" - response = {"items": [{"name": blob_name}], "prefixes": ["foo"]} - connection = _Connection() - client = self._make_client() - client._base_connection = connection - name = "name" - bucket = self._make_one(client=client, name=name) - - def fake_response(): - return response - - iterator = bucket.list_blobs() - iterator._get_next_page_response = fake_response - - page = six.next(iterator.pages) - self.assertEqual(page.prefixes, ("foo",)) - self.assertEqual(page.num_items, 1) - blob = six.next(page) - self.assertEqual(page.remaining, 0) - self.assertIsInstance(blob, Blob) - self.assertEqual(blob.name, blob_name) - self.assertEqual(iterator.prefixes, set(["foo"])) - - def test_cumulative_prefixes(self): - import six - from google.cloud.storage.blob import Blob - - BLOB_NAME = "blob-name1" - response1 = { - "items": [{"name": BLOB_NAME}], - "prefixes": ["foo"], - "nextPageToken": "s39rmf9", - } - response2 = {"items": [], "prefixes": ["bar"]} - client = self._make_client() - name = "name" - bucket = self._make_one(client=client, name=name) - responses = [response1, response2] - - def fake_response(): - return responses.pop(0) - - iterator = bucket.list_blobs() - iterator._get_next_page_response = fake_response - - # Parse first response. - pages_iter = iterator.pages - page1 = six.next(pages_iter) - self.assertEqual(page1.prefixes, ("foo",)) - self.assertEqual(page1.num_items, 1) - blob = six.next(page1) - self.assertEqual(page1.remaining, 0) - self.assertIsInstance(blob, Blob) - self.assertEqual(blob.name, BLOB_NAME) - self.assertEqual(iterator.prefixes, set(["foo"])) - # Parse second response. - page2 = six.next(pages_iter) - self.assertEqual(page2.prefixes, ("bar",)) - self.assertEqual(page2.num_items, 0) - self.assertEqual(iterator.prefixes, set(["foo", "bar"])) - - def _test_generate_upload_policy_helper(self, **kwargs): + def _generate_upload_policy_helper(self, **kwargs): import base64 import json credentials = _create_signing_credentials() credentials.signer_email = mock.sentinel.signer_email credentials.sign_bytes.return_value = b"DEADBEEF" - connection = _Connection() - connection.credentials = credentials - client = _Client(connection) + client = self._make_client(_credentials=credentials) name = "name" bucket = self._make_one(client=client, name=name) @@ -3477,7 +3465,7 @@ def _test_generate_upload_policy_helper(self, **kwargs): def test_generate_upload_policy(self, now): from google.cloud._helpers import _datetime_to_rfc3339 - _, policy = self._test_generate_upload_policy_helper() + _, policy = self._generate_upload_policy_helper() self.assertEqual( policy["expiration"], @@ -3489,15 +3477,13 @@ def test_generate_upload_policy_args(self): expiration = datetime.datetime(1990, 5, 29) - _, policy = self._test_generate_upload_policy_helper(expiration=expiration) + _, policy = self._generate_upload_policy_helper(expiration=expiration) self.assertEqual(policy["expiration"], _datetime_to_rfc3339(expiration)) def test_generate_upload_policy_bad_credentials(self): credentials = object() - connection = _Connection() - connection.credentials = credentials - client = _Client(connection) + client = self._make_client(_credentials=credentials) name = "name" bucket = self._make_one(client=client, name=name) @@ -3628,8 +3614,7 @@ def test_lock_retention_policy_w_user_project(self): def test_generate_signed_url_w_invalid_version(self): expiration = "2014-10-16T20:34:37.000Z" - connection = _Connection() - client = _Client(connection) + client = self._make_client() bucket = self._make_one(name="bucket_name", client=client) with self.assertRaises(ValueError): bucket.generate_signed_url(expiration, version="nonesuch") @@ -3665,8 +3650,7 @@ def _generate_signed_url_helper( if expiration is None: expiration = datetime.datetime.utcnow().replace(tzinfo=UTC) + delta - connection = _Connection() - client = _Client(connection) + client = self._make_client(_credentials=credentials) bucket = self._make_one(name=bucket_name, client=client) if version is None: @@ -3726,11 +3710,12 @@ def _generate_signed_url_helper( def test_get_bucket_from_string_w_valid_uri(self): from google.cloud.storage.bucket import Bucket - connection = _Connection() - client = _Client(connection) + client = self._make_client() BUCKET_NAME = "BUCKET_NAME" uri = "gs://" + BUCKET_NAME + bucket = Bucket.from_string(uri, client) + self.assertIsInstance(bucket, Bucket) self.assertIs(bucket.client, client) self.assertEqual(bucket.name, BUCKET_NAME) @@ -3738,8 +3723,7 @@ def test_get_bucket_from_string_w_valid_uri(self): def test_get_bucket_from_string_w_invalid_uri(self): from google.cloud.storage.bucket import Bucket - connection = _Connection() - client = _Client(connection) + client = self._make_client() with pytest.raises(ValueError, match="URI scheme must be gs"): Bucket.from_string("http://bucket_name", client) @@ -3747,11 +3731,12 @@ def test_get_bucket_from_string_w_invalid_uri(self): def test_get_bucket_from_string_w_domain_name_bucket(self): from google.cloud.storage.bucket import Bucket - connection = _Connection() - client = _Client(connection) + client = self._make_client() BUCKET_NAME = "buckets.example.com" uri = "gs://" + BUCKET_NAME + bucket = Bucket.from_string(uri, client) + self.assertIsInstance(bucket, Bucket) self.assertIs(bucket.client, client) self.assertEqual(bucket.name, BUCKET_NAME) @@ -3886,23 +3871,3 @@ def test_it(self): self.assertEqual(notification._topic_name, topic) self.assertEqual(notification._topic_project, project) self.assertEqual(notification._properties, item) - - -class _Connection(object): - credentials = None - - def __init__(self): - pass - - def api_request(self, **kw): # pragma: NO COVER - pass - - -class _Client(object): - def __init__(self, connection, project=None): - self._base_connection = connection - self.project = project - - @property - def _credentials(self): - return self._base_connection.credentials diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 50ce3c159..4c99a3860 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -219,7 +219,8 @@ def test_ctor_w_client_info(self): self.assertIs(client._connection._client_info, client_info) def test_ctor_mtls(self): - credentials = _make_credentials() + PROJECT = "PROJECT" + credentials = _make_credentials(project=PROJECT) client = self._make_one(credentials=credentials) self.assertEqual(client._connection.ALLOW_AUTO_SWITCH_TO_MTLS_URL, True) @@ -1370,13 +1371,13 @@ def test_download_blob_to_file_with_failure(self): from google.cloud.storage.blob import Blob from google.cloud.storage.constants import _DEFAULT_TIMEOUT + project = "PROJECT" raw_response = requests.Response() raw_response.status_code = http_client.NOT_FOUND raw_request = requests.Request("GET", "http://example.com") raw_response.request = raw_request.prepare() grmp_response = InvalidResponse(raw_response) - - credentials = _make_credentials() + credentials = _make_credentials(project=project) client = self._make_one(credentials=credentials) blob = mock.create_autospec(Blob) blob._encryption_key = None @@ -1407,7 +1408,7 @@ def test_download_blob_to_file_with_uri(self): from google.cloud.storage.constants import _DEFAULT_TIMEOUT project = "PROJECT" - credentials = _make_credentials() + credentials = _make_credentials(project=project) client = self._make_one(project=project, credentials=credentials) blob = mock.Mock() file_obj = io.BytesIO() @@ -1435,7 +1436,7 @@ def test_download_blob_to_file_with_uri(self): def test_download_blob_to_file_with_invalid_uri(self): project = "PROJECT" - credentials = _make_credentials() + credentials = _make_credentials(project=project) client = self._make_one(project=project, credentials=credentials) file_obj = io.BytesIO() @@ -1446,7 +1447,8 @@ def _download_blob_to_file_helper(self, use_chunks, raw_download): from google.cloud.storage.blob import Blob from google.cloud.storage.constants import _DEFAULT_TIMEOUT - credentials = _make_credentials() + project = "PROJECT" + credentials = _make_credentials(project=project) client = self._make_one(credentials=credentials) blob = mock.create_autospec(Blob) blob._encryption_key = None @@ -1955,7 +1957,9 @@ def test_get_signed_policy_v4(self): EXPECTED_SIGN = "5369676e61747572655f6279746573" EXPECTED_POLICY = "eyJjb25kaXRpb25zIjpbeyJidWNrZXQiOiJidWNrZXQtbmFtZSJ9LHsiYWNsIjoicHJpdmF0ZSJ9LFsic3RhcnRzLXdpdGgiLCIkQ29udGVudC1UeXBlIiwidGV4dC9wbGFpbiJdLHsiYnVja2V0IjoiYnVja2V0LW5hbWUifSx7ImtleSI6Im9iamVjdC1uYW1lIn0seyJ4LWdvb2ctZGF0ZSI6IjIwMjAwMzEyVDExNDcxNloifSx7IngtZ29vZy1jcmVkZW50aWFsIjoidGVzdEBtYWlsLmNvbS8yMDIwMDMxMi9hdXRvL3N0b3JhZ2UvZ29vZzRfcmVxdWVzdCJ9LHsieC1nb29nLWFsZ29yaXRobSI6IkdPT0c0LVJTQS1TSEEyNTYifV0sImV4cGlyYXRpb24iOiIyMDIwLTAzLTI2VDAwOjAwOjEwWiJ9" - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, now_patch, expire_secs_patch = _time_functions_patches() with dtstamps_patch, now_patch, expire_secs_patch: @@ -2033,7 +2037,9 @@ def test_get_signed_policy_v4_with_fields(self): EXPECTED_SIGN = "5369676e61747572655f6279746573" EXPECTED_POLICY = "eyJjb25kaXRpb25zIjpbeyJidWNrZXQiOiJidWNrZXQtbmFtZSJ9LHsiYWNsIjoicHJpdmF0ZSJ9LFsic3RhcnRzLXdpdGgiLCIkQ29udGVudC1UeXBlIiwidGV4dC9wbGFpbiJdLHsiZmllbGQxIjoiVmFsdWUxIn0seyJidWNrZXQiOiJidWNrZXQtbmFtZSJ9LHsia2V5Ijoib2JqZWN0LW5hbWUifSx7IngtZ29vZy1kYXRlIjoiMjAyMDAzMTJUMTE0NzE2WiJ9LHsieC1nb29nLWNyZWRlbnRpYWwiOiJ0ZXN0QG1haWwuY29tLzIwMjAwMzEyL2F1dG8vc3RvcmFnZS9nb29nNF9yZXF1ZXN0In0seyJ4LWdvb2ctYWxnb3JpdGhtIjoiR09PRzQtUlNBLVNIQTI1NiJ9XSwiZXhwaXJhdGlvbiI6IjIwMjAtMDMtMjZUMDA6MDA6MTBaIn0=" - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, now_patch, expire_secs_patch = _time_functions_patches() with dtstamps_patch, now_patch, expire_secs_patch: @@ -2071,7 +2077,9 @@ def test_get_signed_policy_v4_virtual_hosted_style(self): BUCKET_NAME = "bucket-name" - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, _, _ = _time_functions_patches() with dtstamps_patch: @@ -2089,7 +2097,9 @@ def test_get_signed_policy_v4_virtual_hosted_style(self): def test_get_signed_policy_v4_bucket_bound_hostname(self): import datetime - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, _, _ = _time_functions_patches() with dtstamps_patch: @@ -2105,7 +2115,9 @@ def test_get_signed_policy_v4_bucket_bound_hostname(self): def test_get_signed_policy_v4_bucket_bound_hostname_with_scheme(self): import datetime - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, _, _ = _time_functions_patches() with dtstamps_patch: @@ -2123,7 +2135,9 @@ def test_get_signed_policy_v4_no_expiration(self): BUCKET_NAME = "bucket-name" EXPECTED_POLICY = "eyJjb25kaXRpb25zIjpbeyJidWNrZXQiOiJidWNrZXQtbmFtZSJ9LHsia2V5Ijoib2JqZWN0LW5hbWUifSx7IngtZ29vZy1kYXRlIjoiMjAyMDAzMTJUMTE0NzE2WiJ9LHsieC1nb29nLWNyZWRlbnRpYWwiOiJ0ZXN0QG1haWwuY29tLzIwMjAwMzEyL2F1dG8vc3RvcmFnZS9nb29nNF9yZXF1ZXN0In0seyJ4LWdvb2ctYWxnb3JpdGhtIjoiR09PRzQtUlNBLVNIQTI1NiJ9XSwiZXhwaXJhdGlvbiI6IjIwMjAtMDMtMjZUMDA6MDA6MTBaIn0=" - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, now_patch, expire_secs_patch = _time_functions_patches() with dtstamps_patch, now_patch, expire_secs_patch: @@ -2147,7 +2161,9 @@ def test_get_signed_policy_v4_with_access_token(self): EXPECTED_SIGN = "0c4003044105" EXPECTED_POLICY = "eyJjb25kaXRpb25zIjpbeyJidWNrZXQiOiJidWNrZXQtbmFtZSJ9LHsiYWNsIjoicHJpdmF0ZSJ9LFsic3RhcnRzLXdpdGgiLCIkQ29udGVudC1UeXBlIiwidGV4dC9wbGFpbiJdLHsiYnVja2V0IjoiYnVja2V0LW5hbWUifSx7ImtleSI6Im9iamVjdC1uYW1lIn0seyJ4LWdvb2ctZGF0ZSI6IjIwMjAwMzEyVDExNDcxNloifSx7IngtZ29vZy1jcmVkZW50aWFsIjoidGVzdEBtYWlsLmNvbS8yMDIwMDMxMi9hdXRvL3N0b3JhZ2UvZ29vZzRfcmVxdWVzdCJ9LHsieC1nb29nLWFsZ29yaXRobSI6IkdPT0c0LVJTQS1TSEEyNTYifV0sImV4cGlyYXRpb24iOiIyMDIwLTAzLTI2VDAwOjAwOjEwWiJ9" - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, now_patch, expire_secs_patch = _time_functions_patches() with dtstamps_patch, now_patch, expire_secs_patch: From 78b2eba81003b437cd24f2b8d269ea2455682507 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 10 Jun 2021 12:34:06 -0400 Subject: [PATCH 22/30] docs: streamline 'timeout' / 'retry' docs in docstrings (#461) * Add 'requests' intersphinx refs * Add narrative docs for timeouts and retries * Include API docs for the 'retry' module, as well, to unbreak links. * Replace boilerplate docstring entries for 'timeout' / 'retry' with links to new narrative docs. * Add docstrings for default conditional policies. Closes #455. --- docs/conf.py | 1 + docs/index.rst | 1 + docs/retry_timeout.rst | 152 ++++++++++ google/cloud/storage/_helpers.py | 66 +---- google/cloud/storage/acl.py | 70 ++--- google/cloud/storage/batch.py | 8 +- google/cloud/storage/blob.py | 284 +++++------------- google/cloud/storage/bucket.py | 416 +++++++-------------------- google/cloud/storage/client.py | 104 ++----- google/cloud/storage/hmac_key.py | 88 ++---- google/cloud/storage/notification.py | 88 ++---- google/cloud/storage/retry.py | 20 ++ owlbot.py | 3 + 13 files changed, 450 insertions(+), 851 deletions(-) create mode 100644 docs/retry_timeout.rst diff --git a/docs/conf.py b/docs/conf.py index c61d72fcc..1691c5c04 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -364,6 +364,7 @@ "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), + "requests": ("https://docs.python-requests.org/en/master/", None), } diff --git a/docs/index.rst b/docs/index.rst index 7a74f12cd..051bac888 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -21,6 +21,7 @@ API Reference constants hmac_key notification + retry_timeout Changelog --------- diff --git a/docs/retry_timeout.rst b/docs/retry_timeout.rst new file mode 100644 index 000000000..b7fc4ff41 --- /dev/null +++ b/docs/retry_timeout.rst @@ -0,0 +1,152 @@ +Configuring Timeouts and Retries +================================ + +When using object methods which invoke Google Cloud Storage API methods, +you have several options for how the library handles timeouts and +how it retries transient errors. + + +.. _configuring_timeouts: + +Configuring Timeouts +-------------------- + +For a number of reasons, methods which invoke API methods may take +longer than expected or desired. By default, such methods all time out +after a default interval, 60.0 seconds. Rather than blocking your application +code for that interval, you may choose to configure explicit timeouts +in your code, using one of three forms: + +- You can pass a single integer or float which functions as the timeout for the + entire request. E.g.: + +.. code-block:: python + + bucket = client.get_bucket(BUCKET_NAME, timeout=300.0) # five minutes + +- You can also be passed as a two-tuple, ``(connect_timeout, read_timeout)``, + where the ``connect_timeout`` sets the maximum time required to establish + the connection to the server, and the ``read_timeout`` sets the maximum + time to wait for a completed response. E.g.: + +.. code-block:: python + + bucket = client.get_bucket(BUCKET_NAME, timeout=(3, 10)) + + +- You can also pass ``None`` as the timeout value: in this case, the library + will block indefinitely for a response. E.g.: + +.. code-block:: python + + bucket = client.get_bucket(BUCKET_NAME, timeout=None) + +.. note:: + Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + +See also: + + :ref:`Timeouts in requests ` + + +.. _configuring_retries: + +Configuring Retries +-------------------- + +.. note:: + + For more background on retries, see also the + `GCS Retry Strategies Document `_ + +Methods which invoke API methods may fail for a number of reasons, some of +which represent "transient" conditions, and thus can be retried +automatically. The library tries to provide a sensible default retry policy +for each method, base on its semantics: + +- For API requests which are always idempotent, the library uses its + :data:`~google.cloud.storage.retry.DEFAULT_RETRY` policy, which + retries any API request which returns a "transient" error. + +- For API requests which are idempotent only if the blob has + the same "generation", the library uses its + :data:`~google.cloud.storage.retry.DEFAULT_RETRY_IF_GENERATION_SPECIFIED` + policy, which retries API requests which returns a "transient" error, + but only if the original request includes an ``ifGenerationMatch`` header. + +- For API requests which are idempotent only if the bucket or blob has + the same "metageneration", the library uses its + :data:`~google.cloud.storage.retry.DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED` + policy, which retries API requests which returns a "transient" error, + but only if the original request includes an ``ifMetagenerationMatch`` header. + +- For API requests which are idempotent only if the bucket or blob has + the same "etag", the library uses its + :data:`~google.cloud.storage.retry.DEFAULT_RETRY_IF_ETAG_IN_JSON` + policy, which retries API requests which returns a "transient" error, + but only if the original request includes an ``ETAG`` in its payload. + +- For those API requests which are never idempotent, the library passes + ``retry=None`` by default, suppressing any retries. + +Rather than using one of the default policies, you may choose to configure an +explicit policy in your code. + +- You can pass ``None`` as a retry policy to disable retries. E.g.: + +.. code-block:: python + + bucket = client.get_bucket(BUCKET_NAME, retry=None) + +- You can pass an instance of :class:`google.api_core.retry.Retry` to enable + retries; the passed object will define retriable response codes and errors, + as well as configuring backoff and retry interval options. E.g.: + +.. code-block:: python + + from google.api_core import exceptions + from google.api_core.retry import Retry + + _MY_RETRIABLE_TYPES = [ + exceptions.TooManyRequests, # 429 + exceptions.InternalServerError, # 500 + exceptions.BadGateway, # 502 + exceptions.ServiceUnavailable, # 503 + ] + + def is_retryable(exc): + return isinstance(exc, _MY_RETRIABLE_TYPES) + + my_retry_policy = Retry(predicate=is_retryable) + bucket = client.get_bucket(BUCKET_NAME, retry=my_retry_policy) + +- You can pass an instance of + :class:`google.cloud.storage.retry.ConditionalRetryPolicy`, which wraps a + :class:`~google.cloud.storage.retry.RetryPolicy`, activating it only if + certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to + potential data duplication or other side-effects) but become safe to retry + if a condition such as if_metageneration_match is set. E.g.: + +.. code-block:: python + + from google.api_core.retry import Retry + from google.cloud.storage.retry import ConditionalRetryPolicy + from google.cloud.storage.retry import is_etag_in_json + + def is_retryable(exc): + ... # as above + + my_retry_policy = Retry(predicate=is_retryable) + my_cond_policy = ConditionalRetryPolicy( + my_retry_policy, conditional_predicate=is_etag_in_json) + bucket = client.get_bucket(BUCKET_NAME, retry=my_cond_policy) + + +Retry Module API +---------------- + +.. automodule:: google.cloud.storage.retry + :members: + :show-inheritance: diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py index 83ed10b87..9e09fc9f2 100644 --- a/google/cloud/storage/_helpers.py +++ b/google/cloud/storage/_helpers.py @@ -162,11 +162,9 @@ def reload( properties to return. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_generation_match: long :param if_generation_match: (Optional) Make the operation conditional on whether @@ -190,18 +188,8 @@ def reload( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ client = self._require_client(client) query_params = self._query_params @@ -275,11 +263,9 @@ def patch( ``client`` stored on the current object. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_generation_match: long :param if_generation_match: (Optional) Make the operation conditional on whether @@ -303,18 +289,8 @@ def patch( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ client = self._require_client(client) query_params = self._query_params @@ -363,11 +339,9 @@ def update( ``client`` stored on the current object. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_generation_match: long :param if_generation_match: (Optional) Make the operation conditional on whether @@ -391,18 +365,8 @@ def update( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ client = self._require_client(client) diff --git a/google/cloud/storage/acl.py b/google/cloud/storage/acl.py index bdb17bfc9..a17e4f09e 100644 --- a/google/cloud/storage/acl.py +++ b/google/cloud/storage/acl.py @@ -219,11 +219,9 @@ def _ensure_loaded(self, timeout=_DEFAULT_TIMEOUT): """Load if not already loaded. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` """ if not self.loaded: self.reload(timeout=timeout) @@ -442,20 +440,13 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the ACL's parent. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: :class:`~google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. - - A None value will disable retries. - - A google.api_core.retry.Retry value will enable retries, - and the object will define retriable response codes and errors - and configure backoff and timeout options. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ path = self.reload_path client = self._require_client(client) @@ -489,21 +480,15 @@ def _save(self, acl, predefined, client, timeout=_DEFAULT_TIMEOUT): ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the ACL's parent. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: :class:`~google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. - - A None value will disable retries. - - A google.api_core.retry.Retry value will enable retries, - and the object will define retriable response codes and errors - and configure backoff and timeout options. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ client = self._require_client(client) query_params = {"projection": "full"} @@ -545,12 +530,11 @@ def save(self, acl=None, client=None, timeout=_DEFAULT_TIMEOUT): ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the ACL's parent. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` """ if acl is None: acl = self @@ -577,12 +561,11 @@ def save_predefined(self, predefined, client=None, timeout=_DEFAULT_TIMEOUT): ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the ACL's parent. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` """ predefined = self.validate_predefined(predefined) self._save(None, predefined, client, timeout=timeout) @@ -601,12 +584,11 @@ def clear(self, client=None, timeout=_DEFAULT_TIMEOUT): ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the ACL's parent. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` """ self.save([], client=client, timeout=timeout) diff --git a/google/cloud/storage/batch.py b/google/cloud/storage/batch.py index d40fdc6f5..732439f14 100644 --- a/google/cloud/storage/batch.py +++ b/google/cloud/storage/batch.py @@ -181,11 +181,9 @@ def _do_request( initialization of the object at a later time. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :rtype: tuple of ``response`` (a dictionary of sorts) and ``content`` (a string). diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index fa3f5c7ac..3fb8a59b9 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -642,11 +642,8 @@ def exists( :type timeout: float or tuple :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_generation_match: long :param if_generation_match: @@ -672,18 +669,8 @@ def exists( current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: bool :returns: True if the blob exists in Cloud Storage. @@ -740,11 +727,8 @@ def delete( :type timeout: float or tuple :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_generation_match: long :param if_generation_match: @@ -770,18 +754,8 @@ def delete( current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises: :class:`google.cloud.exceptions.NotFound` (propagated from @@ -951,11 +925,8 @@ def _do_download( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -1092,11 +1063,8 @@ def download_to_file( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -1188,11 +1156,8 @@ def download_to_filename( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -1293,11 +1258,8 @@ def download_as_bytes( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -1391,11 +1353,8 @@ def download_as_string( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :rtype: bytes :returns: The data stored in this blob. @@ -1483,11 +1442,8 @@ def download_as_text( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :rtype: text :returns: The data stored in this blob, decoded to text. @@ -1678,11 +1634,8 @@ def _do_multipart_upload( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -1857,11 +1810,8 @@ def _initiate_resumable_upload( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2035,11 +1985,8 @@ def _do_resumable_upload( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2162,11 +2109,8 @@ def _do_upload( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2341,11 +2285,8 @@ def upload_from_file( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2478,11 +2419,8 @@ def upload_from_filename( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2598,11 +2536,8 @@ def upload_from_string( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2704,11 +2639,8 @@ def create_resumable_upload_session( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2794,25 +2726,12 @@ def get_iam_policy( :type timeout: float or tuple :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.api_core.iam.Policy` :returns: the policy instance, based on the resource returned from @@ -2867,25 +2786,12 @@ def set_iam_policy( :type timeout: float or tuple :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.api_core.iam.Policy` :returns: the policy instance, based on the resource returned from @@ -2937,25 +2843,12 @@ def test_iam_permissions( :type timeout: float or tuple :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: list of string :returns: the permissions returned by the ``testIamPermissions`` API @@ -2987,12 +2880,10 @@ def make_public(self, client=None, timeout=_DEFAULT_TIMEOUT): to the ``client`` stored on the blob's bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. The timeout applies to each underlying - request. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. """ self.acl.all().grant_read() self.acl.save(client=client, timeout=timeout) @@ -3006,12 +2897,9 @@ def make_private(self, client=None, timeout=_DEFAULT_TIMEOUT): to the ``client`` stored on the blob's bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. The timeout applies to each underlying - request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` """ self.acl.all().revoke_read() self.acl.save(client=client, timeout=timeout) @@ -3039,11 +2927,9 @@ def compose( ``client`` stored on the blob's bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_generation_match: list of long :param if_generation_match: @@ -3059,18 +2945,8 @@ def compose( ``sources`` item-to-item. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` Example: Compose blobs using generation match preconditions. @@ -3179,11 +3055,8 @@ def rewrite( :type timeout: float or tuple :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_generation_match: long :param if_generation_match: @@ -3233,18 +3106,8 @@ def rewrite( object's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: tuple :returns: ``(token, bytes_rewritten, total_bytes)``, where ``token`` @@ -3394,25 +3257,12 @@ def update_storage_class( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ if new_class not in self.STORAGE_CLASSES: raise ValueError("Invalid storage class: %s" % (new_class,)) diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index b79fc12e5..a4a27d7aa 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -737,11 +737,9 @@ def exists( to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the @@ -752,18 +750,8 @@ def exists( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: bool :returns: True if the bucket exists in Cloud Storage. @@ -848,25 +836,13 @@ def create( https://cloud.google.com/storage/docs/access-control/lists#predefined-acl :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ warnings.warn( "Bucket.create() is deprecated and will be removed in future." @@ -907,11 +883,9 @@ def update( ``client`` stored on the current object. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the @@ -922,18 +896,8 @@ def update( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ super(Bucket, self).update( client=client, @@ -967,11 +931,9 @@ def reload( properties to return. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the @@ -982,18 +944,8 @@ def reload( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ super(Bucket, self).reload( client=client, @@ -1024,11 +976,9 @@ def patch( ``client`` stored on the current object. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the @@ -1039,18 +989,8 @@ def patch( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ # Special case: For buckets, it is possible that labels are being # removed; this requires special handling. @@ -1143,11 +1083,9 @@ def get_blob( this object. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_generation_match: long :param if_generation_match: (Optional) Make the operation conditional on whether @@ -1171,18 +1109,8 @@ def get_blob( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :param kwargs: Keyword arguments to pass to the :class:`~google.cloud.storage.blob.Blob` constructor. @@ -1300,25 +1228,13 @@ def list_blobs( to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob` @@ -1365,25 +1281,13 @@ def list_notifications( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: list of :class:`.BucketNotification` :returns: notification instances @@ -1418,25 +1322,13 @@ def get_notification( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`.BucketNotification` :returns: notification instance. @@ -1497,25 +1389,13 @@ def delete( blob's current metageneration does not match the given value. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response on each request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket contains more than 256 objects / blobs. @@ -1608,11 +1488,9 @@ def delete_blob( revision of this object. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_generation_match: long :param if_generation_match: (Optional) Make the operation conditional on whether @@ -1636,18 +1514,8 @@ def delete_blob( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises: :class:`google.cloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op @@ -1714,12 +1582,9 @@ def delete_blobs( to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. The timeout applies to each individual - blob delete request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_generation_match: list of long :param if_generation_match: (Optional) Make the operation conditional on whether @@ -1747,18 +1612,8 @@ def delete_blobs( The list must match ``blobs`` item-to-item. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises: :class:`~google.cloud.exceptions.NotFound` (if `on_error` is not passed). @@ -1858,11 +1713,9 @@ def copy_blob( copied. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_generation_match: long :param if_generation_match: (Optional) Makes the operation @@ -1922,18 +1775,8 @@ def copy_blob( does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The new Blob. @@ -2098,26 +1941,13 @@ def rename_blob( Also used in the delete request. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. The timeout applies to each individual - request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`Blob` :returns: The newly-renamed blob. @@ -2819,25 +2649,13 @@ def get_iam_policy( feature syntax in the policy fetched. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.api_core.iam.Policy` :returns: the policy instance, based on the resource returned from @@ -2908,25 +2726,13 @@ def set_iam_policy( to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.api_core.iam.Policy` :returns: the policy instance, based on the resource returned from @@ -2972,25 +2778,13 @@ def test_iam_permissions( to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: list of string :returns: the permissions returned by the ``testIamPermissions`` API @@ -3030,12 +2824,9 @@ def make_public( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. The timeout applies to each underlying - request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :raises ValueError: If ``recursive`` is True, and the bucket contains more than 256 @@ -3097,12 +2888,9 @@ def make_private( to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. The timeout applies to each underlying - request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :raises ValueError: If ``recursive`` is True, and the bucket contains more than 256 @@ -3219,25 +3007,13 @@ def lock_retention_policy( to the ``client`` stored on the blob's bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises ValueError: if the bucket has no metageneration (i.e., new or never reloaded); diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 8fcc12b69..a9a06746a 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -259,25 +259,13 @@ def get_service_account_email( (Optional) Project ID to use for retreiving GCS service account email address. Defaults to the client's project. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: str :returns: service account email address @@ -799,11 +787,9 @@ def lookup_bucket( :param bucket_name: The name of the bucket to get. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the @@ -814,18 +800,8 @@ def lookup_bucket( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.cloud.storage.bucket.Bucket` :returns: The bucket matching the name provided or None if not found. @@ -1306,25 +1282,13 @@ def list_buckets( If not passed, uses the project set on the client. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`~google.api_core.page_iterator.Iterator` :raises ValueError: if both ``project`` is ``None`` and the client's @@ -1379,11 +1343,9 @@ def create_hmac_key( :param user_project: (Optional) This parameter is currently ignored. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: (Optional) How to retry the RPC. A None value will disable retries. @@ -1453,25 +1415,13 @@ def list_hmac_keys( :param user_project: (Optional) This parameter is currently ignored. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str] @@ -1514,11 +1464,9 @@ def get_hmac_key_metadata( Defaults to client's project. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type user_project: str :param user_project: (Optional) This parameter is currently ignored. diff --git a/google/cloud/storage/hmac_key.py b/google/cloud/storage/hmac_key.py index e59960a1c..5cec51fa7 100644 --- a/google/cloud/storage/hmac_key.py +++ b/google/cloud/storage/hmac_key.py @@ -193,25 +193,13 @@ def exists(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): """Determine whether or not the key for this metadata exists. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: bool :returns: True if the key exists in Cloud Storage. @@ -234,25 +222,13 @@ def reload(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): """Reload properties from Cloud Storage. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises :class:`~google.api_core.exceptions.NotFound`: if the key does not exist on the back-end. @@ -270,25 +246,13 @@ def update(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_ETAG_IN_JSON): """Save writable properties to Cloud Storage. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises :class:`~google.api_core.exceptions.NotFound`: if the key does not exist on the back-end. @@ -306,25 +270,13 @@ def delete(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): """Delete the key from Cloud Storage. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises :class:`~google.api_core.exceptions.NotFound`: if the key does not exist on the back-end. diff --git a/google/cloud/storage/notification.py b/google/cloud/storage/notification.py index e86859466..d23343100 100644 --- a/google/cloud/storage/notification.py +++ b/google/cloud/storage/notification.py @@ -246,25 +246,13 @@ def create(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=None): :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the notification's bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ if self.notification_id is not None: raise ValueError( @@ -298,25 +286,13 @@ def exists(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: bool :returns: True, if the notification exists, else False. @@ -354,25 +330,13 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises ValueError: if the notification has no ID. @@ -405,25 +369,13 @@ def delete(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises: :class:`google.api_core.exceptions.NotFound`: if the notification does not exist. diff --git a/google/cloud/storage/retry.py b/google/cloud/storage/retry.py index 7b9626ed5..a9cdc9c0d 100644 --- a/google/cloud/storage/retry.py +++ b/google/cloud/storage/retry.py @@ -139,9 +139,29 @@ def is_etag_in_json(data): DEFAULT_RETRY_IF_GENERATION_SPECIFIED = ConditionalRetryPolicy( DEFAULT_RETRY, is_generation_specified, ["query_params"] ) +"""Conditional wrapper for the default retry object. + +This retry setting will retry all _RETRYABLE_TYPES and any status codes from +_ADDITIONAL_RETRYABLE_STATUS_CODES, but only if the request included an +``ifGenerationMatch`` header. +""" + DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED = ConditionalRetryPolicy( DEFAULT_RETRY, is_metageneration_specified, ["query_params"] ) +"""Conditional wrapper for the default retry object. + +This retry setting will retry all _RETRYABLE_TYPES and any status codes from +_ADDITIONAL_RETRYABLE_STATUS_CODES, but only if the request included an +``ifMetagenerationMatch`` header. +""" + DEFAULT_RETRY_IF_ETAG_IN_JSON = ConditionalRetryPolicy( DEFAULT_RETRY, is_etag_in_json, ["data"] ) +"""Conditional wrapper for the default retry object. + +This retry setting will retry all _RETRYABLE_TYPES and any status codes from +_ADDITIONAL_RETRYABLE_STATUS_CODES, but only if the request included an +``ETAG`` entry in its payload. +""" diff --git a/owlbot.py b/owlbot.py index 945dfa91a..0e23239ec 100644 --- a/owlbot.py +++ b/owlbot.py @@ -32,6 +32,9 @@ # See: https://github.com/googleapis/python-storage/issues/226 "google-cloud-kms < 2.0dev", ], + intersphinx_dependencies = { + "requests": "https://docs.python-requests.org/en/master/" + }, ) s.move( From 0dbbb8ac17a4b632707485ee6c7cc15e4670efaa Mon Sep 17 00:00:00 2001 From: Andrew Gorcester Date: Fri, 11 Jun 2021 15:26:18 -0700 Subject: [PATCH 23/30] feat: media operation retries can be configured using the same interface as with non-media operation (#447) All media operation calls (downloads and uploads) can be configured with Retry objects and ConditionalRetryPolicy objects, nearly identically to non-media operations. This is accomplished by converting the Retry object to a google-resumable-media-python library RetryStrategy object at the point of entry to that library. Custom predicates of Retry objects (for instance set with Retry(predicate=...)) are not supported for media operations; they will be replaced with a media-operation-specific predicate. This change is backwards-compatible for users of public methods using num_retries arguments to configure uploads; num_retries continue to be supported but the deprecation warning remains in effect. They will be fully removed and replaced with Retry objects in the future. With this change, the default parameters for a media operations retry changes to be uniform with non-media operation retries. Specifically, the retry deadline for media operation retries becomes 120 seconds unless otherwise configured. --- google/cloud/storage/_helpers.py | 41 ++++ google/cloud/storage/blob.py | 350 ++++++++++++++++++++++++++++--- google/cloud/storage/client.py | 39 ++++ google/cloud/storage/fileio.py | 103 +++++++-- setup.py | 2 +- tests/unit/test__helpers.py | 39 ++++ tests/unit/test_blob.py | 335 +++++++++++++++++++++++++---- tests/unit/test_client.py | 50 ++++- tests/unit/test_fileio.py | 268 +++++++++++++++++++++-- 9 files changed, 1127 insertions(+), 100 deletions(-) diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py index 9e09fc9f2..04671035b 100644 --- a/google/cloud/storage/_helpers.py +++ b/google/cloud/storage/_helpers.py @@ -23,6 +23,7 @@ import os from six.moves.urllib.parse import urlsplit +from google import resumable_media from google.cloud.storage.constants import _DEFAULT_TIMEOUT from google.cloud.storage.retry import DEFAULT_RETRY from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED @@ -45,6 +46,12 @@ ("if_source_metageneration_not_match", "ifSourceMetagenerationNotMatch"), ) +_NUM_RETRIES_MESSAGE = ( + "`num_retries` has been deprecated and will be removed in a future " + "release. Use the `retry` argument with a Retry or ConditionalRetryPolicy " + "object, or None, instead." +) + def _get_storage_host(): return os.environ.get(STORAGE_EMULATOR_ENV_VAR, _DEFAULT_STORAGE_HOST) @@ -524,3 +531,37 @@ def _bucket_bound_hostname_url(host, scheme=None): return host return "{scheme}://{host}/".format(scheme=scheme, host=host) + + +def _api_core_retry_to_resumable_media_retry(retry, num_retries=None): + """Convert google.api.core.Retry to google.resumable_media.RetryStrategy. + + Custom predicates are not translated. + + :type retry: google.api_core.Retry + :param retry: (Optional) The google.api_core.Retry object to translate. + + :type num_retries: int + :param num_retries: (Optional) The number of retries desired. This is + supported for backwards compatibility and is mutually exclusive with + `retry`. + + :rtype: google.resumable_media.RetryStrategy + :returns: A RetryStrategy with all applicable attributes copied from input, + or a RetryStrategy with max_retries set to 0 if None was input. + """ + + if retry is not None and num_retries is not None: + raise ValueError("num_retries and retry arguments are mutually exclusive") + + elif retry is not None: + return resumable_media.RetryStrategy( + max_sleep=retry._maximum, + max_cumulative_retry=retry._deadline, + initial_delay=retry._initial, + multiplier=retry._multiplier, + ) + elif num_retries is not None: + return resumable_media.RetryStrategy(max_retries=num_retries) + else: + return resumable_media.RetryStrategy(max_retries=0) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 3fb8a59b9..597e63ca4 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -65,8 +65,10 @@ from google.cloud.storage._helpers import _bucket_bound_hostname_url from google.cloud.storage._helpers import _convert_to_timestamp from google.cloud.storage._helpers import _raise_if_more_than_one_set +from google.cloud.storage._helpers import _api_core_retry_to_resumable_media_retry from google.cloud.storage._signing import generate_signed_url_v2 from google.cloud.storage._signing import generate_signed_url_v4 +from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE from google.cloud.storage.acl import ACL from google.cloud.storage.acl import ObjectACL from google.cloud.storage.constants import _DEFAULT_TIMEOUT @@ -76,9 +78,11 @@ from google.cloud.storage.constants import NEARLINE_STORAGE_CLASS from google.cloud.storage.constants import REGIONAL_LEGACY_STORAGE_CLASS from google.cloud.storage.constants import STANDARD_STORAGE_CLASS +from google.cloud.storage.retry import ConditionalRetryPolicy from google.cloud.storage.retry import DEFAULT_RETRY from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED +from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED from google.cloud.storage.fileio import BlobReader from google.cloud.storage.fileio import BlobWriter @@ -105,17 +109,6 @@ "name", "storageClass", ) -_NUM_RETRIES_MESSAGE = ( - "`num_retries` has been deprecated and will be removed in a future " - "release. The default behavior (when `num_retries` is not specified) when " - "a transient error (e.g. 429 Too Many Requests or 500 Internal Server " - "Error) occurs will be as follows: upload requests will be automatically " - "retried if and only if `if_metageneration_match` is specified (thus " - "making the upload idempotent). Subsequent retries will be sent after " - "waiting 1, 2, 4, 8, etc. seconds (exponential backoff) until 10 minutes " - "of wait time have elapsed. At that point, there will be no more attempts " - "to retry." -) _READ_LESS_THAN_SIZE = ( "Size {:d} was specified but the file-like object only had " "{:d} bytes remaining." ) @@ -892,6 +885,7 @@ def _do_download( raw_download=False, timeout=_DEFAULT_TIMEOUT, checksum="md5", + retry=None, ): """Perform a download without any error handling. @@ -938,7 +932,25 @@ def _do_download( downloads where chunk_size is set) an INFO-level log will be emitted. Supported values are "md5", "crc32c" and None. The default is "md5". + + :type retry: google.api_core.retry.Retry + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will configure backoff and timeout options. Custom + predicates (customizable error codes) are not supported for media + operations such as this one. + + This private method does not accept ConditionalRetryPolicy values + because the information necessary to evaluate the policy is instead + evaluated in client.download_blob_to_file(). + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. """ + + retry_strategy = _api_core_retry_to_resumable_media_retry(retry) + if self.chunk_size is None: if raw_download: klass = RawDownload @@ -953,6 +965,7 @@ def _do_download( end=end, checksum=checksum, ) + download._retry_strategy = retry_strategy response = download.consume(transport, timeout=timeout) self._extract_headers_from_download(response) else: @@ -975,6 +988,7 @@ def _do_download( end=end, ) + download._retry_strategy = retry_strategy while not download.finished: download.consume_next_chunk(transport, timeout=timeout) @@ -991,6 +1005,7 @@ def download_to_file( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="md5", + retry=DEFAULT_RETRY, ): """DEPRECATED. Download the contents of this blob into a file-like object. @@ -1077,6 +1092,28 @@ def download_to_file( emitted. Supported values are "md5", "crc32c" and None. The default is "md5". + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :raises: :class:`google.cloud.exceptions.NotFound` """ client = self._require_client(client) @@ -1093,6 +1130,7 @@ def download_to_file( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) def download_to_filename( @@ -1108,6 +1146,7 @@ def download_to_filename( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="md5", + retry=DEFAULT_RETRY, ): """Download the contents of this blob into a named file. @@ -1170,6 +1209,28 @@ def download_to_filename( emitted. Supported values are "md5", "crc32c" and None. The default is "md5". + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :raises: :class:`google.cloud.exceptions.NotFound` """ client = self._require_client(client) @@ -1187,6 +1248,7 @@ def download_to_filename( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) except resumable_media.DataCorruption: # Delete the corrupt downloaded file. @@ -1213,6 +1275,7 @@ def download_as_bytes( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="md5", + retry=DEFAULT_RETRY, ): """Download the contents of this blob as a bytes object. @@ -1272,6 +1335,28 @@ def download_as_bytes( emitted. Supported values are "md5", "crc32c" and None. The default is "md5". + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :rtype: bytes :returns: The data stored in this blob. @@ -1291,6 +1376,7 @@ def download_as_bytes( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) return string_buffer.getvalue() @@ -1305,6 +1391,7 @@ def download_as_string( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, ): """(Deprecated) Download the contents of this blob as a bytes object. @@ -1356,6 +1443,28 @@ def download_as_string( (Optional) The amount of time, in seconds, to wait for the server response. See: :ref:`configuring_timeouts` + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :rtype: bytes :returns: The data stored in this blob. @@ -1377,6 +1486,7 @@ def download_as_string( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, + retry=retry, ) def download_as_text( @@ -1391,6 +1501,7 @@ def download_as_text( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, ): """Download the contents of this blob as text (*not* bytes). @@ -1445,6 +1556,28 @@ def download_as_text( (Optional) The amount of time, in seconds, to wait for the server response. See: :ref:`configuring_timeouts` + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :rtype: text :returns: The data stored in this blob, decoded to text. """ @@ -1458,6 +1591,7 @@ def download_as_text( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, + retry=retry, ) if encoding is not None: @@ -1570,6 +1704,7 @@ def _do_multipart_upload( if_metageneration_not_match, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=None, ): """Perform a multipart upload. @@ -1645,6 +1780,21 @@ def _do_multipart_upload( manually-set checksum value. Supported values are "md5", "crc32c" and None. The default is None. + :type retry: google.api_core.retry.Retry + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will configure backoff and timeout options. Custom + predicates (customizable error codes) are not supported for media + operations such as this one. + + This private method does not accept ConditionalRetryPolicy values + because the information necessary to evaluate the policy is instead + evaluated in client.download_blob_to_file(). + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the multipart upload request. @@ -1706,10 +1856,9 @@ def _do_multipart_upload( upload_url = _add_query_parameters(base_url, name_value_pairs) upload = MultipartUpload(upload_url, headers=headers, checksum=checksum) - if num_retries is not None: - upload._retry_strategy = resumable_media.RetryStrategy( - max_retries=num_retries - ) + upload._retry_strategy = _api_core_retry_to_resumable_media_retry( + retry, num_retries + ) response = upload.transmit( transport, data, object_metadata, content_type, timeout=timeout @@ -1733,6 +1882,7 @@ def _initiate_resumable_upload( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=None, ): """Initiate a resumable upload. @@ -1823,6 +1973,21 @@ def _initiate_resumable_upload( delete the uploaded object automatically. Supported values are "md5", "crc32c" and None. The default is None. + :type retry: google.api_core.retry.Retry + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will configure backoff and timeout options. Custom + predicates (customizable error codes) are not supported for media + operations such as this one. + + This private method does not accept ConditionalRetryPolicy values + because the information necessary to evaluate the policy is instead + evaluated in client.download_blob_to_file(). + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + :rtype: tuple :returns: Pair of @@ -1887,10 +2052,9 @@ def _initiate_resumable_upload( upload_url, chunk_size, headers=headers, checksum=checksum ) - if num_retries is not None: - upload._retry_strategy = resumable_media.RetryStrategy( - max_retries=num_retries - ) + upload._retry_strategy = _api_core_retry_to_resumable_media_retry( + retry, num_retries + ) upload.initiate( transport, @@ -1918,6 +2082,7 @@ def _do_resumable_upload( if_metageneration_not_match, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=None, ): """Perform a resumable upload. @@ -1998,6 +2163,21 @@ def _do_resumable_upload( delete the uploaded object automatically. Supported values are "md5", "crc32c" and None. The default is None. + :type retry: google.api_core.retry.Retry + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will configure backoff and timeout options. Custom + predicates (customizable error codes) are not supported for media + operations such as this one. + + This private method does not accept ConditionalRetryPolicy values + because the information necessary to evaluate the policy is instead + evaluated in client.download_blob_to_file(). + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the final chunk is uploaded. @@ -2015,6 +2195,7 @@ def _do_resumable_upload( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) while not upload.finished: @@ -2041,6 +2222,7 @@ def _do_upload( if_metageneration_not_match, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=None, ): """Determine an upload strategy and then perform the upload. @@ -2125,19 +2307,45 @@ def _do_upload( attempting to delete the corrupted file. Supported values are "md5", "crc32c" and None. The default is None. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :rtype: dict :returns: The parsed JSON from the "200 OK" response. This will be the **only** response in the multipart case and it will be the **final** response in the resumable case. """ - if if_metageneration_match is None and num_retries is None: - # Uploads are only idempotent (safe to retry) if - # if_metageneration_match is set. If it is not set, the default - # num_retries should be 0. Note: Because retry logic for uploads is - # provided by the google-resumable-media-python package, it doesn't - # use the ConditionalRetryStrategy class used in other API calls in - # this library to solve this problem. - num_retries = 0 + + # Handle ConditionalRetryPolicy. + if isinstance(retry, ConditionalRetryPolicy): + # Conditional retries are designed for non-media calls, which change + # arguments into query_params dictionaries. Media operations work + # differently, so here we make a "fake" query_params to feed to the + # ConditionalRetryPolicy. + query_params = { + "ifGenerationMatch": if_generation_match, + "ifMetagenerationMatch": if_metageneration_match, + } + retry = retry.get_retry_policy_if_conditions_met(query_params=query_params) if size is not None and size <= _MAX_MULTIPART_SIZE: response = self._do_multipart_upload( @@ -2153,6 +2361,7 @@ def _do_upload( if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) else: response = self._do_resumable_upload( @@ -2168,6 +2377,7 @@ def _do_upload( if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) return response.json() @@ -2187,6 +2397,7 @@ def upload_from_file( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, ): """Upload the contents of this blob from a file-like object. @@ -2301,6 +2512,28 @@ def upload_from_file( attempting to delete the corrupted file. Supported values are "md5", "crc32c" and None. The default is None. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the upload response returns an error status. @@ -2310,6 +2543,11 @@ def upload_from_file( """ if num_retries is not None: warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2) + # num_retries and retry are mutually exclusive. If num_retries is + # set and retry is exactly the default, then nullify retry for + # backwards compatibility. + if retry is DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED: + retry = None _maybe_rewind(file_obj, rewind=rewind) predefined_acl = ACL.validate_predefined(predefined_acl) @@ -2328,6 +2566,7 @@ def upload_from_file( if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) self._set_properties(created_json) except resumable_media.InvalidResponse as exc: @@ -2346,6 +2585,7 @@ def upload_from_filename( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, ): """Upload this blob's contents from the content of a named file. @@ -2434,6 +2674,28 @@ def upload_from_filename( google.resumable_media.common.DataCorruption on a mismatch and attempting to delete the corrupted file. Supported values are "md5", "crc32c" and None. The default is None. + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. """ content_type = self._get_content_type(content_type, filename=filename) @@ -2452,6 +2714,7 @@ def upload_from_filename( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) def upload_from_string( @@ -2467,6 +2730,7 @@ def upload_from_string( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, ): """Upload contents of this blob from the provided string. @@ -2551,6 +2815,28 @@ def upload_from_string( google.resumable_media.common.DataCorruption on a mismatch and attempting to delete the corrupted file. Supported values are "md5", "crc32c" and None. The default is None. + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. """ data = _to_bytes(data, encoding="utf-8") string_buffer = BytesIO(data) @@ -2567,6 +2853,7 @@ def upload_from_string( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) def create_resumable_upload_session( @@ -3371,9 +3658,12 @@ def open( :param kwargs: Keyword arguments to pass to the underlying API calls. For both uploads and downloads, the following arguments are supported: "if_generation_match", "if_generation_not_match", - "if_metageneration_match", "if_metageneration_not_match", "timeout". - For uploads only, the following additional arguments are supported: - "content_type", "num_retries", "predefined_acl", "checksum". + "if_metageneration_match", "if_metageneration_not_match", "timeout", + "retry". For uploads only, the following additional arguments are + supported: "content_type", "num_retries", "predefined_acl", + "checksum". "num_retries" is supported for backwards-compatibility + reasons only; please use "retry" with a Retry object or + ConditionalRetryPolicy instead. :returns: A 'BlobReader' or 'BlobWriter' from 'google.cloud.storage.fileio', or an 'io.TextIOWrapper' around one diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index a9a06746a..df42f0c11 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -53,6 +53,7 @@ from google.cloud.storage.acl import DefaultObjectACL from google.cloud.storage.constants import _DEFAULT_TIMEOUT from google.cloud.storage.retry import DEFAULT_RETRY +from google.cloud.storage.retry import ConditionalRetryPolicy _marker = object() @@ -972,6 +973,7 @@ def download_blob_to_file( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="md5", + retry=DEFAULT_RETRY, ): """Download the contents of a blob object or blob URI into a file-like object. @@ -1021,6 +1023,27 @@ def download_blob_to_file( downloads where chunk_size is set) an INFO-level log will be emitted. Supported values are "md5", "crc32c" and None. The default is "md5". + retry (google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy) + (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. Examples: Download a blob using a blob resource. @@ -1046,6 +1069,19 @@ def download_blob_to_file( """ + + # Handle ConditionalRetryPolicy. + if isinstance(retry, ConditionalRetryPolicy): + # Conditional retries are designed for non-media calls, which change + # arguments into query_params dictionaries. Media operations work + # differently, so here we make a "fake" query_params to feed to the + # ConditionalRetryPolicy. + query_params = { + "ifGenerationMatch": if_generation_match, + "ifMetagenerationMatch": if_metageneration_match, + } + retry = retry.get_retry_policy_if_conditions_met(query_params=query_params) + if not isinstance(blob_or_uri, Blob): blob_or_uri = Blob.from_string(blob_or_uri) download_url = blob_or_uri._get_download_url( @@ -1070,6 +1106,7 @@ def download_blob_to_file( raw_download, timeout=timeout, checksum=checksum, + retry=retry, ) except resumable_media.InvalidResponse as exc: _raise_from_invalid_response(exc) @@ -1222,6 +1259,8 @@ def list_blobs( max_results=max_results, extra_params=extra_params, page_start=_blobs_page_start, + timeout=timeout, + retry=retry, ) iterator.bucket = bucket iterator.prefixes = set() diff --git a/google/cloud/storage/fileio.py b/google/cloud/storage/fileio.py index 53d3d14ab..e74b9ed4a 100644 --- a/google/cloud/storage/fileio.py +++ b/google/cloud/storage/fileio.py @@ -13,8 +13,14 @@ # limitations under the License. import io +import warnings from google.api_core.exceptions import RequestRangeNotSatisfiable +from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE +from google.cloud.storage.retry import DEFAULT_RETRY +from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED +from google.cloud.storage.retry import ConditionalRetryPolicy + # Resumable uploads require a chunk size of precisely a multiple of 256 KiB. CHUNK_SIZE_MULTIPLE = 256 * 1024 # 256 KiB @@ -28,20 +34,22 @@ "if_metageneration_match", "if_metageneration_not_match", "timeout", + "retry", } # Valid keyword arguments for upload methods. # Note: Changes here need to be reflected in the blob.open() docstring. VALID_UPLOAD_KWARGS = { "content_type", - "num_retries", "predefined_acl", + "num_retries", "if_generation_match", "if_generation_not_match", "if_metageneration_match", "if_metageneration_not_match", "timeout", "checksum", + "retry", } @@ -58,13 +66,35 @@ class BlobReader(io.BufferedIOBase): bytes than the chunk_size are requested, the remainder is buffered. The default is the chunk_size of the blob, or 40MiB. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :param download_kwargs: Keyword arguments to pass to the underlying API calls. The following arguments are supported: "if_generation_match", "if_generation_not_match", "if_metageneration_match", "if_metageneration_not_match", "timeout". """ - def __init__(self, blob, chunk_size=None, **download_kwargs): + def __init__(self, blob, chunk_size=None, retry=DEFAULT_RETRY, **download_kwargs): """docstring note that download_kwargs also used for reload()""" for kwarg in download_kwargs: if kwarg not in VALID_DOWNLOAD_KWARGS: @@ -76,6 +106,7 @@ def __init__(self, blob, chunk_size=None, **download_kwargs): self._pos = 0 self._buffer = io.BytesIO() self._chunk_size = chunk_size or blob.chunk_size or DEFAULT_CHUNK_SIZE + self._retry = retry self._download_kwargs = download_kwargs def read(self, size=-1): @@ -102,6 +133,7 @@ def read(self, size=-1): start=fetch_start, end=fetch_end, checksum=None, + retry=self._retry, **self._download_kwargs ) except RequestRangeNotSatisfiable: @@ -197,6 +229,28 @@ class BlobWriter(io.BufferedIOBase): changes the behavior of flush() to conform to TextIOWrapper's expectations. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :param upload_kwargs: Keyword arguments to pass to the underlying API calls. The following arguments are supported: "if_generation_match", "if_generation_not_match", "if_metageneration_match", @@ -204,7 +258,14 @@ class BlobWriter(io.BufferedIOBase): "num_retries", "predefined_acl", "checksum". """ - def __init__(self, blob, chunk_size=None, text_mode=False, **upload_kwargs): + def __init__( + self, + blob, + chunk_size=None, + text_mode=False, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + **upload_kwargs + ): for kwarg in upload_kwargs: if kwarg not in VALID_UPLOAD_KWARGS: raise ValueError( @@ -219,6 +280,7 @@ def __init__(self, blob, chunk_size=None, text_mode=False, **upload_kwargs): # In text mode this class will be wrapped and TextIOWrapper requires a # different behavior of flush(). self._text_mode = text_mode + self._retry = retry self._upload_kwargs = upload_kwargs @property @@ -259,20 +321,32 @@ def write(self, b): return pos def _initiate_upload(self): + # num_retries is only supported for backwards-compatibility reasons. num_retries = self._upload_kwargs.pop("num_retries", None) + retry = self._retry content_type = self._upload_kwargs.pop("content_type", None) - if ( - self._upload_kwargs.get("if_metageneration_match") is None - and num_retries is None - ): - # Uploads are only idempotent (safe to retry) if - # if_metageneration_match is set. If it is not set, the default - # num_retries should be 0. Note: Because retry logic for uploads is - # provided by the google-resumable-media-python package, it doesn't - # use the ConditionalRetryStrategy class used in other API calls in - # this library to solve this problem. - num_retries = 0 + if num_retries is not None: + warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2) + # num_retries and retry are mutually exclusive. If num_retries is + # set and retry is exactly the default, then nullify retry for + # backwards compatibility. + if retry is DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED: + retry = None + + # Handle ConditionalRetryPolicy. + if isinstance(retry, ConditionalRetryPolicy): + # Conditional retries are designed for non-media calls, which change + # arguments into query_params dictionaries. Media operations work + # differently, so here we make a "fake" query_params to feed to the + # ConditionalRetryPolicy. + query_params = { + "ifGenerationMatch": self._upload_kwargs.get("if_generation_match"), + "ifMetagenerationMatch": self._upload_kwargs.get( + "if_metageneration_match" + ), + } + retry = retry.get_retry_policy_if_conditions_met(query_params=query_params) self._upload_and_transport = self._blob._initiate_resumable_upload( self._blob.bucket.client, @@ -281,6 +355,7 @@ def _initiate_upload(self): None, num_retries, chunk_size=self._chunk_size, + retry=retry, **self._upload_kwargs ) diff --git a/setup.py b/setup.py index 55863aabb..6f6fa1f3d 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ dependencies = [ "google-auth >= 1.11.0, < 2.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", - "google-resumable-media >= 1.2.0, < 2.0dev", + "google-resumable-media >= 1.3.0, < 2.0dev", "requests >= 2.18.0, < 3.0.0dev", "googleapis-common-protos < 1.53.0; python_version<'3.0'", ] diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index 275d01c60..75a439cf1 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -593,6 +593,45 @@ def test_hostname_and_scheme(self): self.assertEqual(self._call_fut(host=HOST, scheme=SCHEME), EXPECTED_URL) +class Test__api_core_retry_to_resumable_media_retry(unittest.TestCase): + def test_conflict(self): + from google.cloud.storage._helpers import ( + _api_core_retry_to_resumable_media_retry, + ) + + with self.assertRaises(ValueError): + _api_core_retry_to_resumable_media_retry(retry=DEFAULT_RETRY, num_retries=2) + + def test_retry(self): + from google.cloud.storage._helpers import ( + _api_core_retry_to_resumable_media_retry, + ) + + retry_strategy = _api_core_retry_to_resumable_media_retry(retry=DEFAULT_RETRY) + self.assertEqual(retry_strategy.max_sleep, DEFAULT_RETRY._maximum) + self.assertEqual(retry_strategy.max_cumulative_retry, DEFAULT_RETRY._deadline) + self.assertEqual(retry_strategy.initial_delay, DEFAULT_RETRY._initial) + self.assertEqual(retry_strategy.multiplier, DEFAULT_RETRY._multiplier) + + def test_num_retries(self): + from google.cloud.storage._helpers import ( + _api_core_retry_to_resumable_media_retry, + ) + + retry_strategy = _api_core_retry_to_resumable_media_retry( + retry=None, num_retries=2 + ) + self.assertEqual(retry_strategy.max_retries, 2) + + def test_none(self): + from google.cloud.storage._helpers import ( + _api_core_retry_to_resumable_media_retry, + ) + + retry_strategy = _api_core_retry_to_resumable_media_retry(retry=None) + self.assertEqual(retry_strategy.max_retries, 0) + + class _MD5Hash(object): def __init__(self, digest_val): self.digest_val = digest_val diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 3ec0db716..46a130dc8 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -29,6 +29,7 @@ from google.cloud.storage.retry import DEFAULT_RETRY from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED +from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED def _make_credentials(): @@ -1114,7 +1115,9 @@ def test__extract_headers_from_download_w_response_headers_not_match(self): self.assertIsNone(blob.md5_hash) self.assertIsNone(blob.crc32c) - def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): + def _do_download_helper_wo_chunks( + self, w_range, raw_download, timeout=None, **extra_kwargs + ): blob_name = "blob-name" client = mock.Mock() bucket = _Bucket(client) @@ -1138,6 +1141,8 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): expected_timeout = timeout timeout_kwarg = {"timeout": timeout} + extra_kwargs.update(timeout_kwarg) + with patch as patched: if w_range: blob._do_download( @@ -1148,7 +1153,7 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): start=1, end=3, raw_download=raw_download, - **timeout_kwarg + **extra_kwargs ) else: blob._do_download( @@ -1157,7 +1162,7 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): download_url, headers, raw_download=raw_download, - **timeout_kwarg + **extra_kwargs ) if w_range: @@ -1183,9 +1188,21 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): transport, timeout=expected_timeout ) + retry_strategy = patched.return_value._retry_strategy + retry = extra_kwargs.get("retry", None) + if retry is None: + self.assertEqual(retry_strategy.max_retries, 0) + else: + self.assertEqual(retry_strategy.max_sleep, retry._maximum) + def test__do_download_wo_chunks_wo_range_wo_raw(self): self._do_download_helper_wo_chunks(w_range=False, raw_download=False) + def test__do_download_wo_chunks_wo_range_wo_raw_w_retry(self): + self._do_download_helper_wo_chunks( + w_range=False, raw_download=False, retry=DEFAULT_RETRY + ) + def test__do_download_wo_chunks_w_range_wo_raw(self): self._do_download_helper_wo_chunks(w_range=True, raw_download=False) @@ -1334,6 +1351,7 @@ def test_download_to_file_with_failure(self): raw_download=False, timeout=expected_timeout, checksum="md5", + retry=DEFAULT_RETRY, ) def test_download_to_file_wo_media_link(self): @@ -1361,6 +1379,7 @@ def test_download_to_file_wo_media_link(self): raw_download=False, timeout=expected_timeout, checksum="md5", + retry=DEFAULT_RETRY, ) def test_download_to_file_w_generation_match(self): @@ -1384,9 +1403,12 @@ def test_download_to_file_w_generation_match(self): raw_download=False, timeout=expected_timeout, checksum="md5", + retry=DEFAULT_RETRY, ) - def _download_to_file_helper(self, use_chunks, raw_download, timeout=None): + def _download_to_file_helper( + self, use_chunks, raw_download, timeout=None, **extra_kwargs + ): blob_name = "blob-name" client = self._make_client() bucket = _Bucket(client) @@ -1404,12 +1426,15 @@ def _download_to_file_helper(self, use_chunks, raw_download, timeout=None): expected_timeout = timeout timeout_kwarg = {"timeout": timeout} + extra_kwargs.update(timeout_kwarg) + file_obj = io.BytesIO() if raw_download: - blob.download_to_file(file_obj, raw_download=True, **timeout_kwarg) + blob.download_to_file(file_obj, raw_download=True, **extra_kwargs) else: - blob.download_to_file(file_obj, **timeout_kwarg) + blob.download_to_file(file_obj, **extra_kwargs) + expected_retry = extra_kwargs.get("retry", DEFAULT_RETRY) client.download_blob_to_file.assert_called_once_with( blob, file_obj, @@ -1422,11 +1447,15 @@ def _download_to_file_helper(self, use_chunks, raw_download, timeout=None): raw_download=raw_download, timeout=expected_timeout, checksum="md5", + retry=expected_retry, ) def test_download_to_file_wo_chunks_wo_raw(self): self._download_to_file_helper(use_chunks=False, raw_download=False) + def test_download_to_file_wo_chunks_no_retry(self): + self._download_to_file_helper(use_chunks=False, raw_download=False, retry=None) + def test_download_to_file_w_chunks_wo_raw(self): self._download_to_file_helper(use_chunks=True, raw_download=False) @@ -1441,7 +1470,9 @@ def test_download_to_file_w_custom_timeout(self): use_chunks=False, raw_download=False, timeout=9.58 ) - def _download_to_filename_helper(self, updated, raw_download, timeout=None): + def _download_to_filename_helper( + self, updated, raw_download, timeout=None, **extra_kwargs + ): import os from google.cloud.storage._helpers import _convert_to_timestamp from google.cloud._testing import _NamedTemporaryFile @@ -1457,10 +1488,15 @@ def _download_to_filename_helper(self, updated, raw_download, timeout=None): with _NamedTemporaryFile() as temp: if timeout is None: - blob.download_to_filename(temp.name, raw_download=raw_download) + blob.download_to_filename( + temp.name, raw_download=raw_download, **extra_kwargs + ) else: blob.download_to_filename( - temp.name, raw_download=raw_download, timeout=timeout, + temp.name, + raw_download=raw_download, + timeout=timeout, + **extra_kwargs ) if updated is None: @@ -1475,6 +1511,8 @@ def _download_to_filename_helper(self, updated, raw_download, timeout=None): expected_timeout = self._get_default_timeout() if timeout is None else timeout + expected_retry = extra_kwargs.get("retry", DEFAULT_RETRY) + client.download_blob_to_file.assert_called_once_with( blob, mock.ANY, @@ -1487,6 +1525,7 @@ def _download_to_filename_helper(self, updated, raw_download, timeout=None): raw_download=raw_download, timeout=expected_timeout, checksum="md5", + retry=expected_retry, ) stream = client.download_blob_to_file.mock_calls[0].args[1] self.assertEqual(stream.name, temp.name) @@ -1495,6 +1534,12 @@ def test_download_to_filename_w_updated_wo_raw(self): updated = "2014-12-06T13:13:50.690Z" self._download_to_filename_helper(updated=updated, raw_download=False) + def test_download_to_filename_w_updated_no_retry(self): + updated = "2014-12-06T13:13:50.690Z" + self._download_to_filename_helper( + updated=updated, raw_download=False, retry=None + ) + def test_download_to_filename_wo_updated_wo_raw(self): self._download_to_filename_helper(updated=None, raw_download=False) @@ -1533,6 +1578,7 @@ def test_download_to_filename_w_generation_match(self): raw_download=False, timeout=expected_timeout, checksum="md5", + retry=DEFAULT_RETRY, ) stream = client.download_blob_to_file.mock_calls[0].args[1] self.assertEqual(stream.name, temp.name) @@ -1572,11 +1618,12 @@ def test_download_to_filename_corrupted(self): raw_download=False, timeout=expected_timeout, checksum="md5", + retry=DEFAULT_RETRY, ) stream = client.download_blob_to_file.mock_calls[0].args[1] self.assertEqual(stream.name, filename) - def _download_as_bytes_helper(self, raw_download, timeout=None): + def _download_as_bytes_helper(self, raw_download, timeout=None, **extra_kwargs): blob_name = "blob-name" client = self._make_client() bucket = _Bucket(client) @@ -1584,12 +1631,16 @@ def _download_as_bytes_helper(self, raw_download, timeout=None): if timeout is None: expected_timeout = self._get_default_timeout() - fetched = blob.download_as_bytes(raw_download=raw_download) + fetched = blob.download_as_bytes(raw_download=raw_download, **extra_kwargs) else: expected_timeout = timeout - fetched = blob.download_as_bytes(raw_download=raw_download, timeout=timeout) + fetched = blob.download_as_bytes( + raw_download=raw_download, timeout=timeout, **extra_kwargs + ) self.assertEqual(fetched, b"") + expected_retry = extra_kwargs.get("retry", DEFAULT_RETRY) + client.download_blob_to_file.assert_called_once_with( blob, mock.ANY, @@ -1602,16 +1653,11 @@ def _download_as_bytes_helper(self, raw_download, timeout=None): raw_download=raw_download, timeout=expected_timeout, checksum="md5", + retry=expected_retry, ) stream = client.download_blob_to_file.mock_calls[0].args[1] self.assertIsInstance(stream, io.BytesIO) - def test_download_as_bytes_wo_raw(self): - self._download_as_bytes_helper(raw_download=False) - - def test_download_as_bytes_w_raw(self): - self._download_as_bytes_helper(raw_download=True) - def test_download_as_bytes_w_custom_timeout(self): self._download_as_bytes_helper(raw_download=False, timeout=9.58) @@ -1640,8 +1686,21 @@ def test_download_as_bytes_w_generation_match(self): if_metageneration_not_match=None, timeout=self._get_default_timeout(), checksum="md5", + retry=DEFAULT_RETRY, ) + def test_download_as_bytes_wo_raw(self): + self._download_as_bytes_helper(raw_download=False) + + def test_download_as_bytes_no_retry(self): + self._download_as_bytes_helper(raw_download=False, retry=None) + + def test_download_as_bytes_w_raw(self): + self._download_as_bytes_helper(raw_download=True) + + def test_download_as_byte_w_custom_timeout(self): + self._download_as_bytes_helper(raw_download=False, timeout=9.58) + def _download_as_text_helper( self, raw_download, @@ -1658,6 +1717,7 @@ def _download_as_text_helper( no_charset=False, expected_value=u"DEADBEEF", payload=None, + **extra_kwargs ): if payload is None: if encoding is not None: @@ -1709,10 +1769,14 @@ def _download_as_text_helper( else: kwargs["timeout"] = expected_timeout = timeout + kwargs.update(extra_kwargs) + fetched = blob.download_as_text(**kwargs) self.assertEqual(fetched, expected_value) + expected_retry = extra_kwargs.get("retry", DEFAULT_RETRY) + blob.download_as_bytes.assert_called_once_with( client=client, start=start, @@ -1723,11 +1787,15 @@ def _download_as_text_helper( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + retry=expected_retry, ) def test_download_as_text_wo_raw(self): self._download_as_text_helper(raw_download=False) + def test_download_as_text_w_no_retry(self): + self._download_as_text_helper(raw_download=False, retry=None) + def test_download_as_text_w_raw(self): self._download_as_text_helper(raw_download=True) @@ -1815,6 +1883,7 @@ def test_download_as_string(self, mock_warn): if_metageneration_not_match=None, timeout=self._get_default_timeout(), checksum="md5", + retry=DEFAULT_RETRY, ) mock_warn.assert_called_with( @@ -1824,6 +1893,33 @@ def test_download_as_string(self, mock_warn): stacklevel=1, ) + def test_download_as_string_no_retry(self): + MEDIA_LINK = "http://example.com/media/" + + client = self._make_client() + blob = self._make_one( + "blob-name", bucket=_Bucket(client), properties={"mediaLink": MEDIA_LINK} + ) + client.download_blob_to_file = mock.Mock() + + fetched = blob.download_as_string(retry=None) + self.assertEqual(fetched, b"") + + client.download_blob_to_file.assert_called_once_with( + blob, + mock.ANY, + start=None, + end=None, + raw_download=False, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + checksum="md5", + retry=None, + ) + def test__get_content_type_explicit(self): blob = self._make_one(u"blob-name", bucket=None) @@ -1944,6 +2040,7 @@ def _do_multipart_success( timeout=None, metadata=None, mtls=False, + retry=None, ): from six.moves.urllib.parse import urlencode @@ -1992,6 +2089,7 @@ def _do_multipart_success( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + retry=retry, **timeout_kwarg ) @@ -2063,6 +2161,28 @@ def _do_multipart_success( def test__do_multipart_upload_no_size(self, mock_get_boundary): self._do_multipart_success(mock_get_boundary, predefined_acl="private") + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + def test__do_multipart_upload_no_size_retry(self, mock_get_boundary): + self._do_multipart_success( + mock_get_boundary, predefined_acl="private", retry=DEFAULT_RETRY + ) + + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + def test__do_multipart_upload_no_size_num_retries(self, mock_get_boundary): + self._do_multipart_success( + mock_get_boundary, predefined_acl="private", num_retries=2 + ) + + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + def test__do_multipart_upload_no_size_retry_conflict(self, mock_get_boundary): + with self.assertRaises(ValueError): + self._do_multipart_success( + mock_get_boundary, + predefined_acl="private", + num_retries=2, + retry=DEFAULT_RETRY, + ) + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_no_size_mtls(self, mock_get_boundary): self._do_multipart_success( @@ -2101,7 +2221,7 @@ def test__do_multipart_upload_with_kms_with_version(self, mock_get_boundary): @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_retry(self, mock_get_boundary): - self._do_multipart_success(mock_get_boundary, num_retries=8) + self._do_multipart_success(mock_get_boundary, retry=DEFAULT_RETRY) @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_generation_match(self, mock_get_boundary): @@ -2165,6 +2285,7 @@ def _initiate_resumable_helper( timeout=None, metadata=None, mtls=False, + retry=None, ): from six.moves.urllib.parse import urlencode from google.resumable_media.requests import ResumableUpload @@ -2235,6 +2356,7 @@ def _initiate_resumable_helper( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + retry=retry, **timeout_kwarg ) @@ -2300,13 +2422,15 @@ def _initiate_resumable_helper( self.assertEqual(upload._content_type, content_type) self.assertEqual(upload.resumable_url, resumable_url) retry_strategy = upload._retry_strategy - self.assertEqual(retry_strategy.max_sleep, 64.0) - if num_retries is None: - self.assertEqual(retry_strategy.max_cumulative_retry, 600.0) - self.assertIsNone(retry_strategy.max_retries) - else: - self.assertIsNone(retry_strategy.max_cumulative_retry) + self.assertFalse(num_retries is not None and retry is not None) + if num_retries is not None and retry is None: self.assertEqual(retry_strategy.max_retries, num_retries) + elif retry is None: + self.assertEqual(retry_strategy.max_retries, 0) + else: + self.assertEqual(retry_strategy.max_sleep, 60.0) + self.assertEqual(retry_strategy.max_cumulative_retry, 120.0) + self.assertIsNone(retry_strategy.max_retries) self.assertIs(client._http, transport) # Make sure we never read from the stream. self.assertEqual(stream.tell(), 0) @@ -2383,8 +2507,15 @@ def test__initiate_resumable_upload_with_extra_headers(self): self._initiate_resumable_helper(extra_headers=extra_headers) def test__initiate_resumable_upload_with_retry(self): + self._initiate_resumable_helper(retry=DEFAULT_RETRY) + + def test__initiate_resumable_upload_with_num_retries(self): self._initiate_resumable_helper(num_retries=11) + def test__initiate_resumable_upload_with_retry_conflict(self): + with self.assertRaises(ValueError): + self._initiate_resumable_helper(retry=DEFAULT_RETRY, num_retries=2) + def test__initiate_resumable_upload_with_generation_match(self): self._initiate_resumable_helper( if_generation_match=4, if_metageneration_match=4 @@ -2536,6 +2667,7 @@ def _do_resumable_helper( if_metageneration_not_match=None, timeout=None, data_corruption=False, + retry=None, ): bucket = _Bucket(name="yesterday") blob = self._make_one(u"blob-name", bucket=bucket) @@ -2582,6 +2714,7 @@ def _do_resumable_helper( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + retry=retry, **timeout_kwarg ) @@ -2639,7 +2772,14 @@ def test__do_resumable_upload_with_size(self): self._do_resumable_helper(use_size=True) def test__do_resumable_upload_with_retry(self): - self._do_resumable_helper(num_retries=6) + self._do_resumable_helper(retry=DEFAULT_RETRY) + + def test__do_resumable_upload_with_num_retries(self): + self._do_resumable_helper(num_retries=8) + + def test__do_resumable_upload_with_retry_conflict(self): + with self.assertRaises(ValueError): + self._do_resumable_helper(num_retries=9, retry=DEFAULT_RETRY) def test__do_resumable_upload_with_predefined_acl(self): self._do_resumable_helper(predefined_acl="private") @@ -2665,6 +2805,7 @@ def _do_upload_helper( if_metageneration_not_match=None, size=None, timeout=None, + retry=None, ): from google.cloud.storage.blob import _MAX_MULTIPART_SIZE @@ -2708,13 +2849,12 @@ def _do_upload_helper( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + retry=retry, **timeout_kwarg ) - # Adjust num_retries expectations to reflect the conditional default in - # _do_upload() - if num_retries is None and if_metageneration_match is None: - num_retries = 0 + if retry is DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED: + retry = DEFAULT_RETRY if if_metageneration_match else None self.assertIs(created_json, mock.sentinel.json) response.json.assert_called_once_with() @@ -2732,6 +2872,7 @@ def _do_upload_helper( if_metageneration_not_match, timeout=expected_timeout, checksum=None, + retry=retry, ) blob._do_resumable_upload.assert_not_called() else: @@ -2749,6 +2890,7 @@ def _do_upload_helper( if_metageneration_not_match, timeout=expected_timeout, checksum=None, + retry=retry, ) def test__do_upload_uses_multipart(self): @@ -2776,7 +2918,18 @@ def test__do_upload_uses_resumable_w_custom_timeout(self): ) def test__do_upload_with_retry(self): - self._do_upload_helper(num_retries=20) + self._do_upload_helper(retry=DEFAULT_RETRY) + + def test__do_upload_with_num_retries(self): + self._do_upload_helper(num_retries=2) + + def test__do_upload_with_conditional_retry_success(self): + self._do_upload_helper( + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, if_metageneration_match=1 + ) + + def test__do_upload_with_conditional_retry_failure(self): + self._do_upload_helper(retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED) def _upload_from_file_helper(self, side_effect=None, **kwargs): from google.cloud._helpers import UTC @@ -2800,6 +2953,11 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): if_generation_not_match = kwargs.get("if_generation_not_match", None) if_metageneration_match = kwargs.get("if_metageneration_match", None) if_metageneration_not_match = kwargs.get("if_metageneration_not_match", None) + num_retries = kwargs.get("num_retries", None) + default_retry = ( + DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED if not num_retries else None + ) + retry = kwargs.get("retry", default_retry) ret_val = blob.upload_from_file( stream, size=len(data), content_type=content_type, client=client, **kwargs ) @@ -2811,8 +2969,6 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): expected_timeout = kwargs.get("timeout", self._get_default_timeout()) - # Check the mock. - num_retries = kwargs.get("num_retries") blob._do_upload.assert_called_once_with( client, stream, @@ -2826,6 +2982,7 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): if_metageneration_not_match, timeout=expected_timeout, checksum=None, + retry=retry, ) return stream @@ -2835,13 +2992,24 @@ def test_upload_from_file_success(self): @mock.patch("warnings.warn") def test_upload_from_file_with_retries(self, mock_warn): + self._upload_from_file_helper(retry=DEFAULT_RETRY) + + @mock.patch("warnings.warn") + def test_upload_from_file_with_num_retries(self, mock_warn): from google.cloud.storage import blob as blob_module - self._upload_from_file_helper(num_retries=20) + self._upload_from_file_helper(num_retries=2) mock_warn.assert_called_once_with( blob_module._NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2 ) + @mock.patch("warnings.warn") + def test_upload_from_file_with_retry_conflict(self, mock_warn): + # Special case here: in a conflict this method should NOT raise an error + # as that's handled further downstream. It should pass both options + # through. + self._upload_from_file_helper(retry=DEFAULT_RETRY, num_retries=2) + def test_upload_from_file_with_rewind(self): stream = self._upload_from_file_helper(rewind=True) assert stream.tell() == 0 @@ -2868,7 +3036,14 @@ def test_upload_from_file_failure(self): self.assertEqual(exc_info.exception.errors, []) def _do_upload_mock_call_helper( - self, blob, client, content_type, size, timeout=None + self, + blob, + client, + content_type, + size, + timeout=None, + num_retries=None, + retry=None, ): self.assertEqual(blob._do_upload.call_count, 1) mock_call = blob._do_upload.mock_calls[0] @@ -2878,7 +3053,7 @@ def _do_upload_mock_call_helper( self.assertEqual(pos_args[0], client) self.assertEqual(pos_args[2], content_type) self.assertEqual(pos_args[3], size) - self.assertIsNone(pos_args[4]) # num_retries + self.assertEqual(pos_args[4], num_retries) # num_retries self.assertIsNone(pos_args[5]) # predefined_acl self.assertIsNone(pos_args[6]) # if_generation_match self.assertIsNone(pos_args[7]) # if_generation_not_match @@ -2886,7 +3061,13 @@ def _do_upload_mock_call_helper( self.assertIsNone(pos_args[9]) # if_metageneration_not_match expected_timeout = self._get_default_timeout() if timeout is None else timeout - self.assertEqual(kwargs, {"timeout": expected_timeout, "checksum": None}) + if not retry: + retry = ( + DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED if not num_retries else None + ) + self.assertEqual( + kwargs, {"timeout": expected_timeout, "checksum": None, "retry": retry} + ) return pos_args[1] @@ -2921,6 +3102,72 @@ def test_upload_from_filename(self): self.assertEqual(stream.mode, "rb") self.assertEqual(stream.name, temp.name) + def test_upload_from_filename_with_retry(self): + from google.cloud._testing import _NamedTemporaryFile + + blob = self._make_one("blob-name", bucket=None) + # Mock low-level upload helper on blob (it is tested elsewhere). + created_json = {"metadata": {"mint": "ice-cream"}} + blob._do_upload = mock.Mock(return_value=created_json, spec=[]) + # Make sure `metadata` is empty before the request. + self.assertIsNone(blob.metadata) + + data = b"soooo much data" + content_type = u"image/svg+xml" + client = mock.sentinel.client + with _NamedTemporaryFile() as temp: + with open(temp.name, "wb") as file_obj: + file_obj.write(data) + + ret_val = blob.upload_from_filename( + temp.name, content_type=content_type, client=client, retry=DEFAULT_RETRY + ) + + # Check the response and side-effects. + self.assertIsNone(ret_val) + self.assertEqual(blob.metadata, created_json["metadata"]) + + # Check the mock. + stream = self._do_upload_mock_call_helper( + blob, client, content_type, len(data), retry=DEFAULT_RETRY + ) + self.assertTrue(stream.closed) + self.assertEqual(stream.mode, "rb") + self.assertEqual(stream.name, temp.name) + + def test_upload_from_filename_with_num_retries(self): + from google.cloud._testing import _NamedTemporaryFile + + blob = self._make_one("blob-name", bucket=None) + # Mock low-level upload helper on blob (it is tested elsewhere). + created_json = {"metadata": {"mint": "ice-cream"}} + blob._do_upload = mock.Mock(return_value=created_json, spec=[]) + # Make sure `metadata` is empty before the request. + self.assertIsNone(blob.metadata) + + data = b"soooo much data" + content_type = u"image/svg+xml" + client = mock.sentinel.client + with _NamedTemporaryFile() as temp: + with open(temp.name, "wb") as file_obj: + file_obj.write(data) + + ret_val = blob.upload_from_filename( + temp.name, content_type=content_type, client=client, num_retries=2 + ) + + # Check the response and side-effects. + self.assertIsNone(ret_val) + self.assertEqual(blob.metadata, created_json["metadata"]) + + # Check the mock. + stream = self._do_upload_mock_call_helper( + blob, client, content_type, len(data), num_retries=2 + ) + self.assertTrue(stream.closed) + self.assertEqual(stream.mode, "rb") + self.assertEqual(stream.name, temp.name) + def test_upload_from_filename_w_custom_timeout(self): from google.cloud._testing import _NamedTemporaryFile @@ -2965,6 +3212,11 @@ def _upload_from_string_helper(self, data, **kwargs): self.assertIsNone(ret_val) self.assertEqual(blob.component_count, 5) + extra_kwargs = {} + if "retry" in kwargs: + extra_kwargs["retry"] = kwargs["retry"] + if "num_retries" in kwargs: + extra_kwargs["num_retries"] = kwargs["num_retries"] # Check the mock. payload = _to_bytes(data, encoding="utf-8") stream = self._do_upload_mock_call_helper( @@ -2973,6 +3225,7 @@ def _upload_from_string_helper(self, data, **kwargs): "text/plain", len(payload), kwargs.get("timeout", self._get_default_timeout()), + **extra_kwargs ) self.assertIsInstance(stream, io.BytesIO) self.assertEqual(stream.getvalue(), payload) @@ -2989,6 +3242,14 @@ def test_upload_from_string_w_text(self): data = u"\N{snowman} \N{sailboat}" self._upload_from_string_helper(data) + def test_upload_from_string_w_text_w_retry(self): + data = u"\N{snowman} \N{sailboat}" + self._upload_from_string_helper(data, retry=DEFAULT_RETRY) + + def test_upload_from_string_w_text_w_num_retries(self): + data = u"\N{snowman} \N{sailboat}" + self._upload_from_string_helper(data, num_retries=2) + def _create_resumable_upload_session_helper( self, origin=None, side_effect=None, timeout=None ): diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 4c99a3860..33ec331d6 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -29,6 +29,7 @@ from . import _read_local_json from google.cloud.storage.retry import DEFAULT_RETRY +from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED _SERVICE_ACCOUNT_JSON = _read_local_json("url_signer_v4_test_account.json") @@ -1402,6 +1403,7 @@ def test_download_blob_to_file_with_failure(self): False, checksum="md5", timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, ) def test_download_blob_to_file_with_uri(self): @@ -1432,18 +1434,42 @@ def test_download_blob_to_file_with_uri(self): False, checksum="md5", timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, ) def test_download_blob_to_file_with_invalid_uri(self): project = "PROJECT" - credentials = _make_credentials(project=project) + credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) file_obj = io.BytesIO() with pytest.raises(ValueError, match="URI scheme must be gs"): client.download_blob_to_file("http://bucket_name/path/to/object", file_obj) - def _download_blob_to_file_helper(self, use_chunks, raw_download): + def test_download_blob_to_file_w_no_retry(self): + self._download_blob_to_file_helper( + use_chunks=True, raw_download=True, retry=None + ) + + def test_download_blob_to_file_w_conditional_retry_pass(self): + self._download_blob_to_file_helper( + use_chunks=True, + raw_download=True, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + if_generation_match=1, + ) + + def test_download_blob_to_file_w_conditional_retry_fail(self): + self._download_blob_to_file_helper( + use_chunks=True, + raw_download=True, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + expect_condition_fail=True, + ) + + def _download_blob_to_file_helper( + self, use_chunks, raw_download, expect_condition_fail=False, **extra_kwargs + ): from google.cloud.storage.blob import Blob from google.cloud.storage.constants import _DEFAULT_TIMEOUT @@ -1460,9 +1486,20 @@ def _download_blob_to_file_helper(self, use_chunks, raw_download): file_obj = io.BytesIO() if raw_download: - client.download_blob_to_file(blob, file_obj, raw_download=True) + client.download_blob_to_file( + blob, file_obj, raw_download=True, **extra_kwargs + ) else: - client.download_blob_to_file(blob, file_obj) + client.download_blob_to_file(blob, file_obj, **extra_kwargs) + + expected_retry = extra_kwargs.get("retry", DEFAULT_RETRY) + if ( + expected_retry is DEFAULT_RETRY_IF_GENERATION_SPECIFIED + and not expect_condition_fail + ): + expected_retry = DEFAULT_RETRY + elif expect_condition_fail: + expected_retry = None headers = {"accept-encoding": "gzip"} blob._do_download.assert_called_once_with( @@ -1475,6 +1512,7 @@ def _download_blob_to_file_helper(self, use_chunks, raw_download): raw_download, checksum="md5", timeout=_DEFAULT_TIMEOUT, + retry=expected_retry, ) def test_download_blob_to_file_wo_chunks_wo_raw(self): @@ -1520,6 +1558,8 @@ def test_list_blobs_w_defaults_w_bucket_obj(self): max_results=expected_max_results, extra_params=expected_extra_params, page_start=expected_page_start, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) def test_list_blobs_w_explicit_w_user_project(self): @@ -1594,6 +1634,8 @@ def test_list_blobs_w_explicit_w_user_project(self): max_results=expected_max_results, extra_params=expected_extra_params, page_start=expected_page_start, + timeout=timeout, + retry=retry, ) def test_list_buckets_wo_project(self): diff --git a/tests/unit/test_fileio.py b/tests/unit/test_fileio.py index 0ac16ab24..6ce9b4990 100644 --- a/tests/unit/test_fileio.py +++ b/tests/unit/test_fileio.py @@ -19,8 +19,10 @@ import io import string +from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE from google.cloud.storage.fileio import BlobReader, BlobWriter, SlidingBuffer from google.api_core.exceptions import RequestRangeNotSatisfiable +from google.cloud.storage.retry import DEFAULT_RETRY TEST_TEXT_DATA = string.ascii_lowercase + "\n" + string.ascii_uppercase + "\n" TEST_BINARY_DATA = TEST_TEXT_DATA.encode("utf-8") @@ -37,7 +39,15 @@ def test_attributes(self): self.assertTrue(reader.seekable()) self.assertTrue(reader.readable()) self.assertFalse(reader.writable()) - self.assertEqual(256, reader._chunk_size) + self.assertEqual(reader._chunk_size, 256) + self.assertEqual(reader._retry, DEFAULT_RETRY) + + def test_attributes_explict(self): + blob = mock.Mock() + blob.chunk_size = 256 + reader = BlobReader(blob, chunk_size=1024, retry=None) + self.assertEqual(reader._chunk_size, 1024) + self.assertIsNone(reader._retry) def test_read(self): blob = mock.Mock() @@ -52,7 +62,7 @@ def read_from_fake_data(start=0, end=None, **_): # Read and trigger the first download of chunk_size. self.assertEqual(reader.read(1), TEST_BINARY_DATA[0:1]) blob.download_as_bytes.assert_called_once_with( - start=0, end=8, checksum=None, **download_kwargs + start=0, end=8, checksum=None, retry=DEFAULT_RETRY, **download_kwargs ) # Read from buffered data only. @@ -64,7 +74,7 @@ def read_from_fake_data(start=0, end=None, **_): self.assertEqual(reader._pos, 12) self.assertEqual(blob.download_as_bytes.call_count, 2) blob.download_as_bytes.assert_called_with( - start=8, end=16, checksum=None, **download_kwargs + start=8, end=16, checksum=None, retry=DEFAULT_RETRY, **download_kwargs ) # Read a larger amount, requiring a download larger than chunk_size. @@ -72,14 +82,32 @@ def read_from_fake_data(start=0, end=None, **_): self.assertEqual(reader._pos, 28) self.assertEqual(blob.download_as_bytes.call_count, 3) blob.download_as_bytes.assert_called_with( - start=16, end=28, checksum=None, **download_kwargs + start=16, end=28, checksum=None, retry=DEFAULT_RETRY, **download_kwargs ) # Read all remaining data. self.assertEqual(reader.read(), TEST_BINARY_DATA[28:]) self.assertEqual(blob.download_as_bytes.call_count, 4) blob.download_as_bytes.assert_called_with( - start=28, end=None, checksum=None, **download_kwargs + start=28, end=None, checksum=None, retry=DEFAULT_RETRY, **download_kwargs + ) + + reader.close() + + def test_retry_passed_through(self): + blob = mock.Mock() + + def read_from_fake_data(start=0, end=None, **_): + return TEST_BINARY_DATA[start:end] + + blob.download_as_bytes = mock.Mock(side_effect=read_from_fake_data) + download_kwargs = {"if_metageneration_match": 1} + reader = BlobReader(blob, chunk_size=8, retry=None, **download_kwargs) + + # Read and trigger the first download of chunk_size. + self.assertEqual(reader.read(1), TEST_BINARY_DATA[0:1]) + blob.download_as_bytes.assert_called_once_with( + start=0, end=8, checksum=None, retry=None, **download_kwargs ) reader.close() @@ -104,12 +132,16 @@ def read_from_fake_data(start=0, end=None, **_): # Read a line. With chunk_size=10, expect three chunks downloaded. self.assertEqual(reader.readline(), TEST_BINARY_DATA[:27]) - blob.download_as_bytes.assert_called_with(start=20, end=30, checksum=None) + blob.download_as_bytes.assert_called_with( + start=20, end=30, checksum=None, retry=DEFAULT_RETRY + ) self.assertEqual(blob.download_as_bytes.call_count, 3) # Read another line. self.assertEqual(reader.readline(), TEST_BINARY_DATA[27:]) - blob.download_as_bytes.assert_called_with(start=50, end=60, checksum=None) + blob.download_as_bytes.assert_called_with( + start=50, end=60, checksum=None, retry=DEFAULT_RETRY + ) self.assertEqual(blob.download_as_bytes.call_count, 6) blob.size = len(TEST_BINARY_DATA) @@ -118,7 +150,10 @@ def read_from_fake_data(start=0, end=None, **_): # Read all lines. The readlines algorithm will attempt to read past the end of the last line once to verify there is no more to read. self.assertEqual(b"".join(reader.readlines()), TEST_BINARY_DATA) blob.download_as_bytes.assert_called_with( - start=len(TEST_BINARY_DATA), end=len(TEST_BINARY_DATA) + 10, checksum=None + start=len(TEST_BINARY_DATA), + end=len(TEST_BINARY_DATA) + 10, + checksum=None, + retry=DEFAULT_RETRY, ) self.assertEqual(blob.download_as_bytes.call_count, 13) @@ -209,7 +244,14 @@ def test_attributes(self): self.assertFalse(writer.seekable()) self.assertFalse(writer.readable()) self.assertTrue(writer.writable()) - self.assertEqual(256 * 1024, writer._chunk_size) + self.assertEqual(writer._chunk_size, 256 * 1024) + + def test_attributes_explicit(self): + blob = mock.Mock() + blob.chunk_size = 256 * 1024 + writer = BlobWriter(blob, chunk_size=512 * 1024, retry=DEFAULT_RETRY) + self.assertEqual(writer._chunk_size, 512 * 1024) + self.assertEqual(writer._retry, DEFAULT_RETRY) def test_reject_wrong_chunk_size(self): blob = mock.Mock() @@ -261,6 +303,7 @@ def test_write(self): None, NUM_RETRIES, chunk_size=chunk_size, + retry=None, **upload_kwargs ) upload.transmit_next_chunk.assert_called_with(transport) @@ -286,7 +329,56 @@ def test_seek_fails(self): with self.assertRaises(io.UnsupportedOperation): writer.seek() - def test_conditional_retries(self): + def test_conditional_retry_failure(self): + blob = mock.Mock() + + upload = mock.Mock() + transport = mock.Mock() + + blob._initiate_resumable_upload.return_value = (upload, transport) + + with mock.patch("google.cloud.storage.fileio.CHUNK_SIZE_MULTIPLE", 1): + # Create a writer. + # It would be normal to use a context manager here, but not doing so + # gives us more control over close() for test purposes. + chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. + writer = BlobWriter( + blob, chunk_size=chunk_size, content_type=PLAIN_CONTENT_TYPE, + ) + + # The transmit_next_chunk method must actually consume bytes from the + # sliding buffer for the flush() feature to work properly. + upload.transmit_next_chunk.side_effect = lambda _: writer._buffer.read( + chunk_size + ) + + # Write under chunk_size. This should be buffered and the upload not + # initiated. + writer.write(TEST_BINARY_DATA[0:4]) + blob.initiate_resumable_upload.assert_not_called() + + # Write over chunk_size. This should result in upload initialization + # and multiple chunks uploaded. + # Due to the condition not being fulfilled, retry should be None. + writer.write(TEST_BINARY_DATA[4:32]) + blob._initiate_resumable_upload.assert_called_once_with( + blob.bucket.client, + writer._buffer, + PLAIN_CONTENT_TYPE, + None, # size + None, # num_retries + chunk_size=chunk_size, + retry=None, + ) + upload.transmit_next_chunk.assert_called_with(transport) + self.assertEqual(upload.transmit_next_chunk.call_count, 4) + + # Write another byte, finalize and close. + writer.write(TEST_BINARY_DATA[32:33]) + writer.close() + self.assertEqual(upload.transmit_next_chunk.call_count, 5) + + def test_conditional_retry_pass(self): blob = mock.Mock() upload = mock.Mock() @@ -302,8 +394,8 @@ def test_conditional_retries(self): writer = BlobWriter( blob, chunk_size=chunk_size, - num_retries=None, content_type=PLAIN_CONTENT_TYPE, + if_metageneration_match=1, ) # The transmit_next_chunk method must actually consume bytes from the @@ -319,15 +411,69 @@ def test_conditional_retries(self): # Write over chunk_size. This should result in upload initialization # and multiple chunks uploaded. - # Due to the condition not being fulfilled, num_retries should be 0. + # Due to the condition being fulfilled, retry should be DEFAULT_RETRY. writer.write(TEST_BINARY_DATA[4:32]) blob._initiate_resumable_upload.assert_called_once_with( blob.bucket.client, writer._buffer, PLAIN_CONTENT_TYPE, - None, - 0, + None, # size + None, # num_retries + chunk_size=chunk_size, + retry=DEFAULT_RETRY, + if_metageneration_match=1, + ) + upload.transmit_next_chunk.assert_called_with(transport) + self.assertEqual(upload.transmit_next_chunk.call_count, 4) + + # Write another byte, finalize and close. + writer.write(TEST_BINARY_DATA[32:33]) + writer.close() + self.assertEqual(upload.transmit_next_chunk.call_count, 5) + + @mock.patch("warnings.warn") + def test_forced_default_retry(self, mock_warn): + blob = mock.Mock() + + upload = mock.Mock() + transport = mock.Mock() + + blob._initiate_resumable_upload.return_value = (upload, transport) + + with mock.patch("google.cloud.storage.fileio.CHUNK_SIZE_MULTIPLE", 1): + # Create a writer. + # It would be normal to use a context manager here, but not doing so + # gives us more control over close() for test purposes. + chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. + writer = BlobWriter( + blob, + chunk_size=chunk_size, + content_type=PLAIN_CONTENT_TYPE, + retry=DEFAULT_RETRY, + ) + + # The transmit_next_chunk method must actually consume bytes from the + # sliding buffer for the flush() feature to work properly. + upload.transmit_next_chunk.side_effect = lambda _: writer._buffer.read( + chunk_size + ) + + # Write under chunk_size. This should be buffered and the upload not + # initiated. + writer.write(TEST_BINARY_DATA[0:4]) + blob.initiate_resumable_upload.assert_not_called() + + # Write over chunk_size. This should result in upload initialization + # and multiple chunks uploaded. + writer.write(TEST_BINARY_DATA[4:32]) + blob._initiate_resumable_upload.assert_called_once_with( + blob.bucket.client, + writer._buffer, + PLAIN_CONTENT_TYPE, + None, # size + None, # num_retries chunk_size=chunk_size, + retry=DEFAULT_RETRY, ) upload.transmit_next_chunk.assert_called_with(transport) self.assertEqual(upload.transmit_next_chunk.call_count, 4) @@ -337,6 +483,99 @@ def test_conditional_retries(self): writer.close() self.assertEqual(upload.transmit_next_chunk.call_count, 5) + def test_num_retries_and_retry_conflict(self): + blob = mock.Mock() + + blob._initiate_resumable_upload.side_effect = ValueError + + with mock.patch("google.cloud.storage.fileio.CHUNK_SIZE_MULTIPLE", 1): + # Create a writer. + # It would be normal to use a context manager here, but not doing so + # gives us more control over close() for test purposes. + chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. + writer = BlobWriter( + blob, + chunk_size=chunk_size, + content_type=PLAIN_CONTENT_TYPE, + num_retries=2, + retry=DEFAULT_RETRY, + ) + + # Write under chunk_size. This should be buffered and the upload not + # initiated. + writer.write(TEST_BINARY_DATA[0:4]) + blob.initiate_resumable_upload.assert_not_called() + + # Write over chunk_size. The mock will raise a ValueError, simulating + # actual behavior when num_retries and retry are both specified. + with self.assertRaises(ValueError): + writer.write(TEST_BINARY_DATA[4:32]) + + blob._initiate_resumable_upload.assert_called_once_with( + blob.bucket.client, + writer._buffer, + PLAIN_CONTENT_TYPE, + None, # size + 2, # num_retries + chunk_size=chunk_size, + retry=DEFAULT_RETRY, + ) + + @mock.patch("warnings.warn") + def test_num_retries_only(self, mock_warn): + blob = mock.Mock() + + upload = mock.Mock() + transport = mock.Mock() + + blob._initiate_resumable_upload.return_value = (upload, transport) + + with mock.patch("google.cloud.storage.fileio.CHUNK_SIZE_MULTIPLE", 1): + # Create a writer. + # It would be normal to use a context manager here, but not doing so + # gives us more control over close() for test purposes. + chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. + writer = BlobWriter( + blob, + chunk_size=chunk_size, + content_type=PLAIN_CONTENT_TYPE, + num_retries=2, + ) + + # The transmit_next_chunk method must actually consume bytes from the + # sliding buffer for the flush() feature to work properly. + upload.transmit_next_chunk.side_effect = lambda _: writer._buffer.read( + chunk_size + ) + + # Write under chunk_size. This should be buffered and the upload not + # initiated. + writer.write(TEST_BINARY_DATA[0:4]) + blob.initiate_resumable_upload.assert_not_called() + + # Write over chunk_size. This should result in upload initialization + # and multiple chunks uploaded. + writer.write(TEST_BINARY_DATA[4:32]) + blob._initiate_resumable_upload.assert_called_once_with( + blob.bucket.client, + writer._buffer, + PLAIN_CONTENT_TYPE, + None, # size + 2, # num_retries + chunk_size=chunk_size, + retry=None, + ) + upload.transmit_next_chunk.assert_called_with(transport) + self.assertEqual(upload.transmit_next_chunk.call_count, 4) + mock_warn.assert_called_once_with( + _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2 + ) + + # Write another byte, finalize and close. + writer.write(TEST_BINARY_DATA[32:33]) + writer.close() + self.assertEqual(upload.transmit_next_chunk.call_count, 5) + def test_rejects_invalid_kwargs(self): blob = mock.Mock() with self.assertRaises(ValueError): @@ -606,5 +845,6 @@ def test_write(self): None, NUM_RETRIES, chunk_size=chunk_size, + retry=None, ) upload.transmit_next_chunk.assert_called_with(transport) From 70d19e72831dee112bb07f38b50beef4890c1155 Mon Sep 17 00:00:00 2001 From: cojenco Date: Mon, 14 Jun 2021 15:31:15 -0700 Subject: [PATCH 24/30] fix: revise blob.compose query parameters `if_generation_match` (#454) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * revise blob.compose logic to match API usage * update tests * update system test * address comments * 🦉 Updates from OwlBot * revise logic for backwards compatibility * add tests * revise docstring * fix test * revise to DeprecationWarning * address comments and revise docstrings Co-authored-by: Tres Seaver Co-authored-by: Owl Bot --- google/cloud/storage/blob.py | 114 +++++++++++------ tests/system/test_system.py | 45 ++++++- tests/unit/test_blob.py | 239 ++++++++++++++++++++++++++++++----- 3 files changed, 329 insertions(+), 69 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 597e63ca4..c22e6699c 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -3198,6 +3198,7 @@ def compose( timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_metageneration_match=None, + if_source_generation_match=None, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Concatenate source blobs into this one. @@ -3218,73 +3219,98 @@ def compose( (Optional) The amount of time, in seconds, to wait for the server response. See: :ref:`configuring_timeouts` - :type if_generation_match: list of long + :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. - The list must match ``sources`` item-to-item. + (Optional) Makes the operation conditional on whether the + destination object's current generation matches the given value. + Setting to 0 makes the operation succeed only if there are no live + versions of the object. + + Note: In a previous version, this argument worked identically to the + ``if_source_generation_match`` argument. For backwards-compatibility reasons, + if a list is passed in, this argument will behave like ``if_source_generation_match`` + and also issue a DeprecationWarning. - :type if_metageneration_match: list of long + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. The list must match - ``sources`` item-to-item. + (Optional) Makes the operation conditional on whether the + destination object's current metageneration matches the given + value. + + If a list of long is passed in, no match operation will be performed. + (Deprecated: type(list of long) is supported for backwards-compatability reasons only.) + + :type if_source_generation_match: list of long + :param if_source_generation_match: + (Optional) Makes the operation conditional on whether the current generation + of each source blob matches the corresponding generation. + The list must match ``sources`` item-to-item. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: (Optional) How to retry the RPC. See: :ref:`configuring_retries` Example: - Compose blobs using generation match preconditions. + Compose blobs using source generation match preconditions. >>> from google.cloud import storage >>> client = storage.Client() >>> bucket = client.bucket("bucket-name") >>> blobs = [bucket.blob("blob-name-1"), bucket.blob("blob-name-2")] - >>> if_generation_match = [None] * len(blobs) - >>> if_generation_match[0] = "123" # precondition for "blob-name-1" + >>> if_source_generation_match = [None] * len(blobs) + >>> if_source_generation_match[0] = "123" # precondition for "blob-name-1" >>> composed_blob = bucket.blob("composed-name") - >>> composed_blob.compose(blobs, if_generation_match) + >>> composed_blob.compose(blobs, if_source_generation_match=if_source_generation_match) """ sources_len = len(sources) - if if_generation_match is not None and len(if_generation_match) != sources_len: - raise ValueError( - "'if_generation_match' length must be the same as 'sources' length" + client = self._require_client(client) + query_params = {} + + if isinstance(if_generation_match, list): + warnings.warn( + "if_generation_match: type list is deprecated and supported for backwards-compatability reasons only." + "Use if_source_generation_match instead to match source objects generations.", + DeprecationWarning, + stacklevel=2, ) - if ( - if_metageneration_match is not None - and len(if_metageneration_match) != sources_len - ): - raise ValueError( - "'if_metageneration_match' length must be the same as 'sources' length" + if if_source_generation_match is not None: + raise ValueError( + "Use if_generation_match to match the generation of the destination object by passing in a generation number, instead of a list." + "Use if_source_generation_match to match source objects generations." + ) + + # if_generation_match: type list is deprecated. Instead use if_source_generation_match. + if_source_generation_match = if_generation_match + if_generation_match = None + + if isinstance(if_metageneration_match, list): + warnings.warn( + "if_metageneration_match: type list is deprecated and supported for backwards-compatability reasons only." + "Note that the metageneration to be matched is that of the destination blob." + "Please pass in a single value (type long).", + DeprecationWarning, + stacklevel=2, ) - client = self._require_client(client) - query_params = {} + if_metageneration_match = None - if self.user_project is not None: - query_params["userProject"] = self.user_project + if if_source_generation_match is None: + if_source_generation_match = [None] * sources_len + if len(if_source_generation_match) != sources_len: + raise ValueError( + "'if_source_generation_match' length must be the same as 'sources' length" + ) source_objects = [] - for index, source in enumerate(sources): - source_object = {"name": source.name} + for source, source_generation in zip(sources, if_source_generation_match): + source_object = {"name": source.name, "generation": source.generation} preconditions = {} - if ( - if_generation_match is not None - and if_generation_match[index] is not None - ): - preconditions["ifGenerationMatch"] = if_generation_match[index] - - if ( - if_metageneration_match is not None - and if_metageneration_match[index] is not None - ): - preconditions["ifMetagenerationMatch"] = if_metageneration_match[index] + if source_generation is not None: + preconditions["ifGenerationMatch"] = source_generation if preconditions: source_object["objectPreconditions"] = preconditions @@ -3295,6 +3321,16 @@ def compose( "sourceObjects": source_objects, "destination": self._properties.copy(), } + + if self.user_project is not None: + query_params["userProject"] = self.user_project + + _add_generation_match_parameters( + query_params, + if_generation_match=if_generation_match, + if_metageneration_match=if_metageneration_match, + ) + api_response = client._post_resource( "{}/compose".format(self.path), request, diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 6ec1c2a68..2a5cad487 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -1723,7 +1723,7 @@ def test_compose_replace_existing_blob(self): composed = original.download_as_bytes() self.assertEqual(composed, BEFORE + TO_APPEND) - def test_compose_with_generation_match(self): + def test_compose_with_generation_match_list(self): BEFORE = b"AAA\n" original = self.bucket.blob("original") original.content_type = "text/plain" @@ -1751,6 +1751,49 @@ def test_compose_with_generation_match(self): composed = original.download_as_bytes() self.assertEqual(composed, BEFORE + TO_APPEND) + def test_compose_with_generation_match_long(self): + BEFORE = b"AAA\n" + original = self.bucket.blob("original") + original.content_type = "text/plain" + original.upload_from_string(BEFORE) + self.case_blobs_to_delete.append(original) + + TO_APPEND = b"BBB\n" + to_append = self.bucket.blob("to_append") + to_append.upload_from_string(TO_APPEND) + self.case_blobs_to_delete.append(to_append) + + with self.assertRaises(google.api_core.exceptions.PreconditionFailed): + original.compose([original, to_append], if_generation_match=0) + + original.compose([original, to_append], if_generation_match=original.generation) + + composed = original.download_as_bytes() + self.assertEqual(composed, BEFORE + TO_APPEND) + + def test_compose_with_source_generation_match(self): + BEFORE = b"AAA\n" + original = self.bucket.blob("original") + original.content_type = "text/plain" + original.upload_from_string(BEFORE) + self.case_blobs_to_delete.append(original) + + TO_APPEND = b"BBB\n" + to_append = self.bucket.blob("to_append") + to_append.upload_from_string(TO_APPEND) + self.case_blobs_to_delete.append(to_append) + + with self.assertRaises(google.api_core.exceptions.PreconditionFailed): + original.compose([original, to_append], if_source_generation_match=[6, 7]) + + original.compose( + [original, to_append], + if_source_generation_match=[original.generation, to_append.generation], + ) + + composed = original.download_as_bytes() + self.assertEqual(composed, BEFORE + TO_APPEND) + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_compose_with_user_project(self): new_bucket_name = "compose-user-project" + unique_resource_id("-") diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 46a130dc8..158109705 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -3754,7 +3754,10 @@ def test_compose_wo_content_type_set(self): expected_path = "/b/name/o/%s/compose" % destination_name expected_data = { - "sourceObjects": [{"name": source_1_name}, {"name": source_2_name}], + "sourceObjects": [ + {"name": source_1.name, "generation": source_1.generation}, + {"name": source_2.name, "generation": source_2.generation}, + ], "destination": {}, } expected_query_params = {} @@ -3788,7 +3791,10 @@ def test_compose_minimal_w_user_project_w_timeout(self): expected_path = "/b/name/o/%s/compose" % destination_name expected_data = { - "sourceObjects": [{"name": source_1_name}, {"name": source_2_name}], + "sourceObjects": [ + {"name": source_1.name, "generation": source_1.generation}, + {"name": source_2.name, "generation": source_2.generation}, + ], "destination": {"contentType": "text/plain"}, } expected_query_params = {"userProject": user_project} @@ -3823,7 +3829,10 @@ def test_compose_w_additional_property_changes_w_retry(self): expected_path = "/b/name/o/%s/compose" % destination_name expected_data = { - "sourceObjects": [{"name": source_1_name}, {"name": source_2_name}], + "sourceObjects": [ + {"name": source_1.name, "generation": source_1.generation}, + {"name": source_2.name, "generation": source_2.generation}, + ], "destination": { "contentType": "text/plain", "contentLanguage": "en-US", @@ -3840,13 +3849,12 @@ def test_compose_w_additional_property_changes_w_retry(self): _target_object=destination, ) - def test_compose_w_generation_match(self): + def test_compose_w_source_generation_match(self): source_1_name = "source-1" source_2_name = "source-2" destination_name = "destination" api_response = {} - generation_numbers = [6, 9] - metageneration_numbers = [7, 1] + source_generation_numbers = [6, 9] client = mock.Mock(spec=["_post_resource"]) client._post_resource.return_value = api_response @@ -3857,25 +3865,24 @@ def test_compose_w_generation_match(self): destination = self._make_one(destination_name, bucket=bucket) destination.compose( sources=[source_1, source_2], - if_generation_match=generation_numbers, - if_metageneration_match=metageneration_numbers, + if_source_generation_match=source_generation_numbers, ) expected_path = "/b/name/o/%s/compose" % destination_name expected_data = { "sourceObjects": [ { - "name": source_1_name, + "name": source_1.name, + "generation": source_1.generation, "objectPreconditions": { - "ifGenerationMatch": generation_numbers[0], - "ifMetagenerationMatch": metageneration_numbers[0], + "ifGenerationMatch": source_generation_numbers[0], }, }, { - "name": source_2_name, + "name": source_2.name, + "generation": source_2.generation, "objectPreconditions": { - "ifGenerationMatch": generation_numbers[1], - "ifMetagenerationMatch": metageneration_numbers[1], + "ifGenerationMatch": source_generation_numbers[1], }, }, ], @@ -3891,11 +3898,11 @@ def test_compose_w_generation_match(self): _target_object=destination, ) - def test_compose_w_generation_match_bad_length(self): + def test_compose_w_source_generation_match_bad_length(self): source_1_name = "source-1" source_2_name = "source-2" destination_name = "destination" - generation_numbers = [6] + source_generation_numbers = [6] client = mock.Mock(spec=["_post_resource"]) bucket = _Bucket(client=client) source_1 = self._make_one(source_1_name, bucket=bucket) @@ -3905,35 +3912,59 @@ def test_compose_w_generation_match_bad_length(self): with self.assertRaises(ValueError): destination.compose( - sources=[source_1, source_2], if_generation_match=generation_numbers + sources=[source_1, source_2], + if_source_generation_match=source_generation_numbers, ) client._post_resource.assert_not_called() - def test_compose_w_metageneration_match_bad_length(self): + def test_compose_w_source_generation_match_nones(self): source_1_name = "source-1" source_2_name = "source-2" destination_name = "destination" - metageneration_numbers = [7] + source_generation_numbers = [6, None] + api_response = {} client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) source_1 = self._make_one(source_1_name, bucket=bucket) source_2 = self._make_one(source_2_name, bucket=bucket) destination = self._make_one(destination_name, bucket=bucket) - with self.assertRaises(ValueError): - destination.compose( - sources=[source_1, source_2], - if_metageneration_match=metageneration_numbers, - ) + destination.compose( + sources=[source_1, source_2], + if_source_generation_match=source_generation_numbers, + ) - client._post_resource.assert_not_called() + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + { + "name": source_1.name, + "generation": source_1.generation, + "objectPreconditions": { + "ifGenerationMatch": source_generation_numbers[0], + }, + }, + {"name": source_2.name, "generation": source_2.generation}, + ], + "destination": {}, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, + ) - def test_compose_w_generation_match_nones(self): + def test_compose_w_generation_match(self): source_1_name = "source-1" source_2_name = "source-2" destination_name = "destination" - generation_numbers = [6, None] + generation_number = 1 api_response = {} client = mock.Mock(spec=["_post_resource"]) client._post_resource.return_value = api_response @@ -3943,7 +3974,44 @@ def test_compose_w_generation_match_nones(self): destination = self._make_one(destination_name, bucket=bucket) destination.compose( - sources=[source_1, source_2], if_generation_match=generation_numbers + sources=[source_1, source_2], if_generation_match=generation_number, + ) + + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + {"name": source_1.name, "generation": source_1.generation}, + {"name": source_2.name, "generation": source_2.generation}, + ], + "destination": {}, + } + expected_query_params = {"ifGenerationMatch": generation_number} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, + ) + + @mock.patch("warnings.warn") + def test_compose_w_generation_match_w_warning(self, mock_warn): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + api_response = {} + generation_numbers = [6, 9] + + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = _Bucket(client=client) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + + destination = self._make_one(destination_name, bucket=bucket) + destination.compose( + sources=[source_1, source_2], if_generation_match=generation_numbers, ) expected_path = "/b/name/o/%s/compose" % destination_name @@ -3951,11 +4019,18 @@ def test_compose_w_generation_match_nones(self): "sourceObjects": [ { "name": source_1_name, + "generation": None, "objectPreconditions": { "ifGenerationMatch": generation_numbers[0], }, }, - {"name": source_2_name}, + { + "name": source_2_name, + "generation": None, + "objectPreconditions": { + "ifGenerationMatch": generation_numbers[1], + }, + }, ], "destination": {}, } @@ -3969,6 +4044,112 @@ def test_compose_w_generation_match_nones(self): _target_object=destination, ) + mock_warn.assert_called_with( + "if_generation_match: type list is deprecated and supported for backwards-compatability reasons only." + "Use if_source_generation_match instead to match source objects generations.", + DeprecationWarning, + stacklevel=2, + ) + + def test_compose_invalid_generation_match(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + source_generation_numbers = [6, 8] + client = mock.Mock(spec=["_post_resource"]) + bucket = _Bucket(client=client) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + + destination = self._make_one(destination_name, bucket=bucket) + + with self.assertRaises(ValueError): + destination.compose( + sources=[source_1, source_2], + if_generation_match=source_generation_numbers, + if_source_generation_match=source_generation_numbers, + ) + + client._post_resource.assert_not_called() + + @mock.patch("warnings.warn") + def test_compose_w_metageneration_match_w_warning(self, mock_warn): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + metageneration_number = [6] + client = mock.Mock(spec=["_post_resource"]) + bucket = _Bucket(client=client) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + + destination = self._make_one(destination_name, bucket=bucket) + + destination.compose( + sources=[source_1, source_2], if_metageneration_match=metageneration_number, + ) + + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + {"name": source_1_name, "generation": None}, + {"name": source_2_name, "generation": None}, + ], + "destination": {}, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, + ) + + mock_warn.assert_called_with( + "if_metageneration_match: type list is deprecated and supported for backwards-compatability reasons only." + "Note that the metageneration to be matched is that of the destination blob." + "Please pass in a single value (type long).", + DeprecationWarning, + stacklevel=2, + ) + + def test_compose_w_metageneration_match(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + metageneration_number = 1 + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = _Bucket(client=client) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) + + destination.compose( + sources=[source_1, source_2], if_metageneration_match=metageneration_number, + ) + + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + {"name": source_1.name, "generation": source_1.generation}, + {"name": source_2.name, "generation": source_2.generation}, + ], + "destination": {}, + } + expected_query_params = {"ifMetagenerationMatch": metageneration_number} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, + ) + def test_rewrite_w_response_wo_resource(self): source_name = "source" dest_name = "dest" From ae8b655a33aab7b0be9bad1390aa2c4393f64552 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 17 Jun 2021 10:52:22 +0000 Subject: [PATCH 25/30] chore: new owl bot post processor docker image (#470) Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce --- .github/.OwlBot.lock.yaml | 2 +- docs/conf.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index da616c91a..ea06d395e 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 + digest: sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce diff --git a/docs/conf.py b/docs/conf.py index 1691c5c04..a25e7b866 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -80,9 +80,9 @@ master_doc = "index" # General information about the project. -project = u"google-cloud-storage" -copyright = u"2019, Google" -author = u"Google APIs" +project = "google-cloud-storage" +copyright = "2019, Google" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -281,7 +281,7 @@ ( master_doc, "google-cloud-storage.tex", - u"google-cloud-storage Documentation", + "google-cloud-storage Documentation", author, "manual", ) @@ -316,7 +316,7 @@ ( master_doc, "google-cloud-storage", - u"google-cloud-storage Documentation", + "google-cloud-storage Documentation", [author], 1, ) @@ -335,7 +335,7 @@ ( master_doc, "google-cloud-storage", - u"google-cloud-storage Documentation", + "google-cloud-storage Documentation", author, "google-cloud-storage", "google-cloud-storage Library", From 73ee6e3ea4fcd1782d8cfe73019cfa2a14f957c6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 20 Jun 2021 01:06:04 +0000 Subject: [PATCH 26/30] chore: update precommit hook pre-commit/pre-commit-hooks to v4 (#1083) (#472) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [pre-commit/pre-commit-hooks](https://togithub.com/pre-commit/pre-commit-hooks) | repository | major | `v3.4.0` -> `v4.0.1` | --- ### Release Notes
pre-commit/pre-commit-hooks ### [`v4.0.1`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.1) [Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v4.0.0...v4.0.1) ##### Fixes - `check-shebang-scripts-are-executable` fix entry point. - [#​602](https://togithub.com/pre-commit/pre-commit-hooks/issues/602) issue by [@​Person-93](https://togithub.com/Person-93). - [#​603](https://togithub.com/pre-commit/pre-commit-hooks/issues/603) PR by [@​scop](https://togithub.com/scop). ### [`v4.0.0`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.0) [Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v3.4.0...v4.0.0) ##### Features - `check-json`: report duplicate keys. - [#​558](https://togithub.com/pre-commit/pre-commit-hooks/issues/558) PR by [@​AdityaKhursale](https://togithub.com/AdityaKhursale). - [#​554](https://togithub.com/pre-commit/pre-commit-hooks/issues/554) issue by [@​adamchainz](https://togithub.com/adamchainz). - `no-commit-to-branch`: add `main` to default blocked branches. - [#​565](https://togithub.com/pre-commit/pre-commit-hooks/issues/565) PR by [@​ndevenish](https://togithub.com/ndevenish). - `check-case-conflict`: check conflicts in directory names as well. - [#​575](https://togithub.com/pre-commit/pre-commit-hooks/issues/575) PR by [@​slsyy](https://togithub.com/slsyy). - [#​70](https://togithub.com/pre-commit/pre-commit-hooks/issues/70) issue by [@​andyjack](https://togithub.com/andyjack). - `check-vcs-permalinks`: forbid other branch names. - [#​582](https://togithub.com/pre-commit/pre-commit-hooks/issues/582) PR by [@​jack1142](https://togithub.com/jack1142). - [#​581](https://togithub.com/pre-commit/pre-commit-hooks/issues/581) issue by [@​jack1142](https://togithub.com/jack1142). - `check-shebang-scripts-are-executable`: new hook which ensures shebang'd scripts are executable. - [#​545](https://togithub.com/pre-commit/pre-commit-hooks/issues/545) PR by [@​scop](https://togithub.com/scop). ##### Fixes - `check-executables-have-shebangs`: Short circuit shebang lookup on windows. - [#​544](https://togithub.com/pre-commit/pre-commit-hooks/issues/544) PR by [@​scop](https://togithub.com/scop). - `requirements-txt-fixer`: Fix comments which have indentation - [#​549](https://togithub.com/pre-commit/pre-commit-hooks/issues/549) PR by [@​greshilov](https://togithub.com/greshilov). - [#​548](https://togithub.com/pre-commit/pre-commit-hooks/issues/548) issue by [@​greshilov](https://togithub.com/greshilov). - `pretty-format-json`: write to stdout using UTF-8 encoding. - [#​571](https://togithub.com/pre-commit/pre-commit-hooks/issues/571) PR by [@​jack1142](https://togithub.com/jack1142). - [#​570](https://togithub.com/pre-commit/pre-commit-hooks/issues/570) issue by [@​jack1142](https://togithub.com/jack1142). - Use more inclusive language. - [#​599](https://togithub.com/pre-commit/pre-commit-hooks/issues/599) PR by [@​asottile](https://togithub.com/asottile). ##### Breaking changes - Remove deprecated hooks: `flake8`, `pyflakes`, `autopep8-wrapper`. - [#​597](https://togithub.com/pre-commit/pre-commit-hooks/issues/597) PR by [@​asottile](https://togithub.com/asottile).
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻️ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/synthtool). Source-Link: https://github.com/googleapis/synthtool/commit/333fd90856f1454380514bc59fc0936cdaf1c202 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 --- .github/.OwlBot.lock.yaml | 2 +- .pre-commit-config.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ea06d395e..9602d5405 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce + digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4f00c7cff..62eb5a77d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,7 +16,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v4.0.1 hooks: - id: trailing-whitespace - id: end-of-file-fixer From 3b06f9e8495ac99f59d082f8b5dde13ae7c5b374 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 21 Jun 2021 12:22:02 -0400 Subject: [PATCH 27/30] tests: harden 'test_bucket_w_default_event_based_hold' (#468) Reload blob w/ spurious EBH. Closes #435. --- tests/system/test_system.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 2a5cad487..ce89beb59 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -42,6 +42,7 @@ from google.api_core import path_template import google.oauth2 from test_utils.retry import RetryErrors +from test_utils.retry import RetryInstanceState from test_utils.system import unique_resource_id from test_utils.vpcsc_config import vpcsc_config @@ -57,12 +58,17 @@ def _bad_copy(bad_request): return err_msg.startswith("No file found in request. (POST") and "copyTo" in err_msg +def _no_event_based_hold(blob): + return not blob.event_based_hold + + retry_429 = RetryErrors(exceptions.TooManyRequests, max_tries=6) retry_429_harder = RetryErrors(exceptions.TooManyRequests, max_tries=10) retry_429_503 = RetryErrors( [exceptions.TooManyRequests, exceptions.ServiceUnavailable], max_tries=10 ) retry_bad_copy = RetryErrors(exceptions.BadRequest, error_predicate=_bad_copy) +retry_no_event_based_hold = RetryInstanceState(_no_event_based_hold) def _empty_bucket(client, bucket): @@ -2482,6 +2488,11 @@ def test_bucket_w_default_event_based_hold(self): self.assertFalse(bucket.retention_policy_locked) blob.upload_from_string(payload) + + # https://github.com/googleapis/python-storage/issues/435 + if blob.event_based_hold: + retry_no_event_based_hold(blob.reload)() + self.assertFalse(blob.event_based_hold) self.assertFalse(blob.temporary_hold) self.assertIsNone(blob.retention_expiration_time) From cf22a11dbccb46ec64f22c3996d9d65946970320 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 21 Jun 2021 12:59:34 -0400 Subject: [PATCH 28/30] tests: pre-scrub old HMAC keys before testing creation (#467) Avoids hitting 5-key-per-service-account quota. Closes #334. --- tests/system/test_system.py | 31 ++++++++++++++++++++++++++----- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index ce89beb59..6fbaa02c2 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -27,6 +27,7 @@ import mock import requests +import pytest import six from google.cloud import exceptions @@ -35,6 +36,7 @@ from google.cloud.storage._helpers import _base64_md5hash from google.cloud.storage.bucket import LifecycleRuleDelete from google.cloud.storage.bucket import LifecycleRuleSetStorageClass +from google.cloud import _helpers from google.cloud import kms from google import resumable_media import google.auth @@ -147,13 +149,33 @@ def test_get_service_account_email(self): self.assertTrue(any(match for match in matches if match is not None)) + @staticmethod + def _get_before_hmac_keys(client): + from google.cloud.storage.hmac_key import HMACKeyMetadata + + before_hmac_keys = set(client.list_hmac_keys()) + + now = datetime.datetime.utcnow().replace(tzinfo=_helpers.UTC) + yesterday = now - datetime.timedelta(days=1) + + # Delete any HMAC keys older than a day. + for hmac_key in list(before_hmac_keys): + if hmac_key.time_created < yesterday: + if hmac_key.state != HMACKeyMetadata.INACTIVE_STATE: + hmac_key.state = HMACKeyMetadata.INACTIVE_STATE + hmac_key.update() + hmac_key.delete() + before_hmac_keys.remove(hmac_key) + + return before_hmac_keys + def test_hmac_key_crud(self): from google.cloud.storage.hmac_key import HMACKeyMetadata credentials = Config.CLIENT._credentials email = credentials.service_account_email - before_keys = set(Config.CLIENT.list_hmac_keys()) + before_hmac_keys = self._get_before_hmac_keys(Config.CLIENT) metadata, secret = Config.CLIENT.create_hmac_key(email) self.case_hmac_keys_to_delete.append(metadata) @@ -161,9 +183,9 @@ def test_hmac_key_crud(self): self.assertIsInstance(secret, six.text_type) self.assertEqual(len(secret), 40) - after_keys = set(Config.CLIENT.list_hmac_keys()) - self.assertFalse(metadata in before_keys) - self.assertTrue(metadata in after_keys) + after_hmac_keys = set(Config.CLIENT.list_hmac_keys()) + self.assertFalse(metadata in before_hmac_keys) + self.assertTrue(metadata in after_hmac_keys) another = HMACKeyMetadata(Config.CLIENT) @@ -309,7 +331,6 @@ def test_bucket_update_labels(self): self.assertEqual(bucket.labels, {}) def test_get_set_iam_policy(self): - import pytest from google.cloud.storage.iam import STORAGE_OBJECT_VIEWER_ROLE from google.api_core.exceptions import BadRequest, PreconditionFailed From 69993702390322df07cc2e818003186a47524c2b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 21 Jun 2021 18:31:08 -0400 Subject: [PATCH 29/30] docs: streamline docstrings for conditional parmas (#464) Add narrative docs for conditional params: - 'if_generation_match' - 'if_generation_not_match' - 'if_metageneration_match' - 'if_metageneration_not_match' Streamline docstrings for conditional params, linking to new narrative docs. Also, move 'timeout' argument to end (just before 'retry', if present) for consistency. Closes #460. --- docs/generation_metageneration.rst | 127 +++++++++ docs/index.rst | 1 + google/cloud/storage/_helpers.py | 99 +++---- google/cloud/storage/blob.py | 405 ++++++++++++----------------- google/cloud/storage/bucket.py | 268 ++++++++----------- google/cloud/storage/client.py | 42 ++- google/cloud/storage/fileio.py | 37 ++- noxfile.py | 1 + tests/unit/test_blob.py | 19 +- 9 files changed, 505 insertions(+), 494 deletions(-) create mode 100644 docs/generation_metageneration.rst diff --git a/docs/generation_metageneration.rst b/docs/generation_metageneration.rst new file mode 100644 index 000000000..287e6573a --- /dev/null +++ b/docs/generation_metageneration.rst @@ -0,0 +1,127 @@ +Conditional Requests Via Generation / Metageneration Preconditions +================================================================== + +Preconditions tell Cloud Storage to only perform a request if the +:ref:`generation ` or +:ref:`metageneration ` number of the affected object +meets your precondition criteria. These checks of the generation and +metageneration numbers ensure that the object is in the expected state, +allowing you to perform safe read-modify-write updates and conditional +operations on objects + +Concepts +-------- + +.. _concept-metageneration: + +Metageneration +:::::::::::::: + +When you create a :class:`~google.cloud.storage.bucket.Bucket`, +its :attr:`~google.cloud.storage.bucket.Bucket.metageneration` is initialized +to ``1``, representing the initial version of the bucket's metadata. + +When you first upload a +:class:`~google.cloud.storage.blob.Blob` ("Object" in the GCS back-end docs), +its :attr:`~google.cloud.storage.blob.Blob.metageneration` is likewise +initialized to ``1``. representing the initial version of the blob's metadata. + +The ``metageneration`` attribute is set by the GCS back-end, and is read-only +in the client library. + +Each time you patch or update the bucket's / blob's metadata, its +``metageneration`` is incremented. + + +.. _concept-generation: + +Generation +:::::::::: + +Each time you upload a new version of a file to a +:class:`~google.cloud.storage.blob.Blob` ("Object" in the GCS back-end docs), +the Blob's :attr:`~google.cloud.storage.blob.generation` is changed, and its +:attr:`~google.cloud.storage.blob.metageneration` is reset to ``1`` (the first +metadata version for that generation of the blob). + +The ``generation`` attribute is set by the GCS back-end, and is read-only +in the client library. + +See also +:::::::: + +- `Storage API Generation Precondition docs`_ + +.. _Storage API Generation Precondition docs: + https://cloud.google.com/storage/docs/generations-preconditions + + +Conditional Parameters +---------------------- + +.. _using-if-generation-match: + +Using ``if_generation_match`` +::::::::::::::::::::::::::::: + +Passing the ``if_generation_match`` parameter to a method which retrieves a +blob resource (e.g., +:meth:`Blob.reload `) or modifies +the blob (e.g., +:meth:`Blob.update `) +makes the operation conditional on whether the blob's current ``generation`` +matches the given value. + +As a special case, passing ``0`` as the value for``if_generation_match`` +makes the operation succeed only if there are no live versions of the blob. + + +.. _using-if-generation-not-match: + +Using ``if_generation_not_match`` +::::::::::::::::::::::::::::::::: + +Passing the ``if_generation_not_match`` parameter to a method which retrieves +a blob resource (e.g., +:meth:`Blob.reload `) or modifies +the blob (e.g., +:meth:`Blob.update `) +makes the operation conditional on whether the blob's current ``generation`` +does **not** match the given value. + +If no live version of the blob exists, the precondition fails. + +As a special case, passing ``0`` as the value for ``if_generation_not_match`` +makes the operation succeed only if there **is** a live version of the blob. + + +.. _using-if-metageneration-match: + +Using ``if_metageneration_match`` +::::::::::::::::::::::::::::::::: + +Passing the ``if_metageneration_match`` parameter to a method which retrieves +a blob or bucket resource +(e.g., :meth:`Blob.reload `, +:meth:`Bucket.reload `) +or modifies the blob or bucket (e.g., +:meth:`Blob.update ` +:meth:`Bucket.patch `) +makes the operation conditional on whether the resource's current +``metageneration`` matches the given value. + + +.. _using-if-metageneration-not-match: + +Using ``if_metageneration_not_match`` +::::::::::::::::::::::::::::::::::::: + +Passing the ``if_metageneration_not_match`` parameter to a method which +retrieves a blob or bucket resource +(e.g., :meth:`Blob.reload `, +:meth:`Bucket.reload `) +or modifies the blob or bucket (e.g., +:meth:`Blob.update ` +:meth:`Bucket.patch `) +makes the operation conditional on whether the resource's current +``metageneration`` does **not** match the given value. diff --git a/docs/index.rst b/docs/index.rst index 051bac888..9ece79741 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -22,6 +22,7 @@ API Reference hmac_key notification retry_timeout + generation_metageneration Changelog --------- diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py index 04671035b..ff5767de7 100644 --- a/google/cloud/storage/_helpers.py +++ b/google/cloud/storage/_helpers.py @@ -147,11 +147,11 @@ def reload( self, client=None, projection="noAcl", - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, ): """Reload properties from Cloud Storage. @@ -168,31 +168,26 @@ def reload( Defaults to ``'noAcl'``. Specifies the set of properties to return. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait - for the server response. See: :ref:`configuring_timeouts` - :type if_generation_match: long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: @@ -251,11 +246,11 @@ def _set_properties(self, value): def patch( self, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, ): """Sends all changed properties in a PATCH request. @@ -269,31 +264,26 @@ def patch( :param client: the client to use. If not passed, falls back to the ``client`` stored on the current object. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait - for the server response. See: :ref:`configuring_timeouts` - :type if_generation_match: long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: @@ -327,11 +317,11 @@ def patch( def update( self, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, ): """Sends all properties in a PUT request. @@ -345,31 +335,26 @@ def update( :param client: the client to use. If not passed, falls back to the ``client`` stored on the current object. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait - for the server response. See: :ref:`configuring_timeouts` - :type if_generation_match: long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index c22e6699c..60178aa2e 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -116,6 +116,25 @@ "A checksum of type `{}` was requested, but checksumming is not available " "for downloads when chunk_size is set." ) +_COMPOSE_IF_GENERATION_LIST_DEPRECATED = ( + "'if_generation_match: type list' is deprecated and supported for " + "backwards-compatability reasons only. Use 'if_source_generation_match' " + "instead' to match source objects' generations.", +) +_COMPOSE_IF_GENERATION_LIST_AND_IF_SOURCE_GENERATION_ERROR = ( + "Use 'if_generation_match' to match the generation of the destination " + "object by passing in a generation number, instead of a list. " + "Use 'if_source_generation_match' to match source objects generations." +) +_COMPOSE_IF_METAGENERATION_LIST_DEPRECATED = ( + "'if_metageneration_match: type list' is deprecated and supported for " + "backwards-compatability reasons only. Note that the metageneration to " + "be matched is that of the destination blob. Please pass in a single " + "value (type long).", +) +_COMPOSE_IF_SOURCE_GENERATION_MISMATCH_ERROR = ( + "'if_source_generation_match' length must be the same as 'sources' length" +) _DEFAULT_CHUNKSIZE = 104857600 # 1024 * 1024 B * 100 = 100 MB @@ -616,11 +635,11 @@ def generate_signed_url( def exists( self, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, ): """Determines whether or not this blob exists. @@ -633,33 +652,26 @@ def exists( (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait - for the server response. See: :ref:`configuring_timeouts` - :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: @@ -701,11 +713,11 @@ def exists( def delete( self, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Deletes a blob from Cloud Storage. @@ -718,33 +730,26 @@ def delete( (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait - for the server response. See: :ref:`configuring_timeouts` - :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: @@ -801,25 +806,19 @@ def _get_download_url( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :rtype: str :returns: The download URL for the current blob. @@ -1056,25 +1055,19 @@ def download_to_file( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -1173,25 +1166,19 @@ def download_to_filename( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -1299,25 +1286,19 @@ def download_as_bytes( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -1418,25 +1399,19 @@ def download_as_string( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -1530,26 +1505,19 @@ def download_as_text( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -1746,26 +1714,19 @@ def _do_multipart_upload( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -1937,26 +1898,19 @@ def _initiate_resumable_upload( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -2127,26 +2081,19 @@ def _do_resumable_upload( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -2268,26 +2215,19 @@ def _do_upload( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -2473,26 +2413,19 @@ def upload_from_file( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -2636,26 +2569,19 @@ def upload_from_filename( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -2777,26 +2703,19 @@ def upload_from_string( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: @@ -3226,10 +3145,13 @@ def compose( Setting to 0 makes the operation succeed only if there are no live versions of the object. - Note: In a previous version, this argument worked identically to the - ``if_source_generation_match`` argument. For backwards-compatibility reasons, - if a list is passed in, this argument will behave like ``if_source_generation_match`` - and also issue a DeprecationWarning. + .. note:: + + In a previous version, this argument worked identically to the + ``if_source_generation_match`` argument. For + backwards-compatibility reasons, if a list is passed in, + this argument will behave like ``if_source_generation_match`` + and also issue a DeprecationWarning. :type if_metageneration_match: long :param if_metageneration_match: @@ -3237,13 +3159,14 @@ def compose( destination object's current metageneration matches the given value. - If a list of long is passed in, no match operation will be performed. - (Deprecated: type(list of long) is supported for backwards-compatability reasons only.) + If a list of long is passed in, no match operation will be + performed. (Deprecated: type(list of long) is supported for + backwards-compatability reasons only.) :type if_source_generation_match: list of long :param if_source_generation_match: - (Optional) Makes the operation conditional on whether the current generation - of each source blob matches the corresponding generation. + (Optional) Makes the operation conditional on whether the current + generation of each source blob matches the corresponding generation. The list must match ``sources`` item-to-item. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy @@ -3270,27 +3193,22 @@ def compose( if isinstance(if_generation_match, list): warnings.warn( - "if_generation_match: type list is deprecated and supported for backwards-compatability reasons only." - "Use if_source_generation_match instead to match source objects generations.", + _COMPOSE_IF_GENERATION_LIST_DEPRECATED, DeprecationWarning, stacklevel=2, ) if if_source_generation_match is not None: raise ValueError( - "Use if_generation_match to match the generation of the destination object by passing in a generation number, instead of a list." - "Use if_source_generation_match to match source objects generations." + _COMPOSE_IF_GENERATION_LIST_AND_IF_SOURCE_GENERATION_ERROR ) - # if_generation_match: type list is deprecated. Instead use if_source_generation_match. if_source_generation_match = if_generation_match if_generation_match = None if isinstance(if_metageneration_match, list): warnings.warn( - "if_metageneration_match: type list is deprecated and supported for backwards-compatability reasons only." - "Note that the metageneration to be matched is that of the destination blob." - "Please pass in a single value (type long).", + _COMPOSE_IF_METAGENERATION_LIST_DEPRECATED, DeprecationWarning, stacklevel=2, ) @@ -3300,9 +3218,7 @@ def compose( if if_source_generation_match is None: if_source_generation_match = [None] * sources_len if len(if_source_generation_match) != sources_len: - raise ValueError( - "'if_source_generation_match' length must be the same as 'sources' length" - ) + raise ValueError(_COMPOSE_IF_SOURCE_GENERATION_MISMATCH_ERROR) source_objects = [] for source, source_generation in zip(sources, if_source_generation_match): @@ -3346,7 +3262,6 @@ def rewrite( source, token=None, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, @@ -3355,6 +3270,7 @@ def rewrite( if_source_generation_not_match=None, if_source_metageneration_match=None, if_source_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Rewrite source blob into this one. @@ -3376,37 +3292,29 @@ def rewrite( (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait - for the server response. See: :ref:`configuring_timeouts` - :type if_generation_match: long :param if_generation_match: - (Optional) Makes the operation conditional on whether the - destination object's current generation matches the given value. - Setting to 0 makes the operation succeed only if there are no live - versions of the object. + (Optional) See :ref:`using-if-generation-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Makes the operation conditional on whether the - destination object's current generation does not match the given - value. If no live object exists, the precondition fails. Setting to - 0 makes the operation succeed only if there is a live version of - the object. + (Optional) See :ref:`using-if-generation-not-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Makes the operation conditional on whether the - destination object's current metageneration matches the given - value. + (Optional) See :ref:`using-if-metageneration-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Makes the operation conditional on whether the - destination object's current metageneration does not match the - given value. + (Optional) See :ref:`using-if-metageneration-not-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_source_generation_match: long :param if_source_generation_match: @@ -3428,6 +3336,11 @@ def rewrite( (Optional) Makes the operation conditional on whether the source object's current metageneration does not match the given value. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: (Optional) How to retry the RPC. See: :ref:`configuring_retries` @@ -3533,30 +3446,27 @@ def update_storage_class( :type if_generation_match: long :param if_generation_match: - (Optional) Makes the operation conditional on whether the - destination object's current generation matches the given value. - Setting to 0 makes the operation succeed only if there are no live - versions of the object. + (Optional) See :ref:`using-if-generation-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Makes the operation conditional on whether the - destination object's current generation does not match the given - value. If no live object exists, the precondition fails. Setting to - 0 makes the operation succeed only if there is a live version of - the object. + (Optional) See :ref:`using-if-generation-not-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Makes the operation conditional on whether the - destination object's current metageneration matches the given - value. + (Optional) See :ref:`using-if-metageneration-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Makes the operation conditional on whether the - destination object's current metageneration does not match the - given value. + (Optional) See :ref:`using-if-metageneration-not-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_source_generation_match: long :param if_source_generation_match: @@ -3691,15 +3601,30 @@ def open( newline mode" and writes use the system default. See the Python 'io' module documentation for 'io.TextIOWrapper' for details. - :param kwargs: Keyword arguments to pass to the underlying API calls. + :param kwargs: + Keyword arguments to pass to the underlying API calls. For both uploads and downloads, the following arguments are - supported: "if_generation_match", "if_generation_not_match", - "if_metageneration_match", "if_metageneration_not_match", "timeout", - "retry". For uploads only, the following additional arguments are - supported: "content_type", "num_retries", "predefined_acl", - "checksum". "num_retries" is supported for backwards-compatibility - reasons only; please use "retry" with a Retry object or - ConditionalRetryPolicy instead. + supported: + + - ``if_generation_match`` + - ``if_generation_not_match`` + - ``if_metageneration_match`` + - ``if_metageneration_not_match`` + - ``timeout`` + - ``retry`` + + For uploads only, the following additional arguments are supported: + + - ``content_type`` + - ``num_retries`` + - ``predefined_acl`` + - ``checksum`` + + .. note:: + + ``num_retries`` is supported for backwards-compatibility + reasons only; please use ``retry`` with a Retry object or + ConditionalRetryPolicy instead. :returns: A 'BlobReader' or 'BlobWriter' from 'google.cloud.storage.fileio', or an 'io.TextIOWrapper' around one diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index a4a27d7aa..0dc4ef76d 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -1045,11 +1045,11 @@ def get_blob( client=None, encryption_key=None, generation=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, **kwargs ): @@ -1079,34 +1079,29 @@ def get_blob( https://cloud.google.com/storage/docs/encryption#customer-supplied. :type generation: long - :param generation: (Optional) If present, selects a specific revision of - this object. - - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait - for the server response. See: :ref:`configuring_timeouts` + :param generation: + (Optional) If present, selects a specific revision of this object. :type if_generation_match: long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: @@ -1454,11 +1449,11 @@ def delete_blob( blob_name, client=None, generation=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Deletes a blob from the current bucket. @@ -1487,31 +1482,26 @@ def delete_blob( :param generation: (Optional) If present, permanently deletes a specific revision of this object. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait - for the server response. See: :ref:`configuring_timeouts` - :type if_generation_match: long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: @@ -1581,35 +1571,31 @@ def delete_blobs( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait - for the server response. See: :ref:`configuring_timeouts` - :type if_generation_match: list of long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. The list must match - ``blobs`` item-to-item. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` + Note that the length of the list must match the length of + The list must match ``blobs`` item-to-item. :type if_generation_not_match: list of long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. The list must match - ``blobs`` item-to-item. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` + The list must match ``blobs`` item-to-item. :type if_metageneration_match: list of long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. - The list must match ``blobs`` item-to-item. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` + The list must match ``blobs`` item-to-item. :type if_metageneration_not_match: list of long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. - The list must match ``blobs`` item-to-item. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + The list must match ``blobs`` item-to-item. + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: @@ -1673,7 +1659,6 @@ def copy_blob( client=None, preserve_acl=True, source_generation=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, @@ -1682,6 +1667,7 @@ def copy_blob( if_source_generation_not_match=None, if_source_metageneration_match=None, if_source_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Copy the given blob to the given bucket, optionally with a new name. @@ -1712,67 +1698,54 @@ def copy_blob( :param source_generation: (Optional) The generation of the blob to be copied. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait - for the server response. See: :ref:`configuring_timeouts` - :type if_generation_match: long - :param if_generation_match: (Optional) Makes the operation - conditional on whether the destination - object's current generation matches the - given value. Setting to 0 makes the - operation succeed only if there are no - live versions of the object. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Makes the operation - conditional on whether the - destination object's current - generation does not match the given - value. If no live object exists, - the precondition fails. Setting to - 0 makes the operation succeed only - if there is a live version - of the object. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Makes the operation - conditional on whether the - destination object's current - metageneration matches the given - value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Makes the operation - conditional on whether the - destination object's current - metageneration does not match - the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_source_generation_match: long - :param if_source_generation_match: (Optional) Makes the operation - conditional on whether the source - object's generation matches the - given value. + :param if_source_generation_match: + (Optional) Makes the operation conditional on whether the source + object's generation matches the given value. :type if_source_generation_not_match: long - :param if_source_generation_not_match: (Optional) Makes the operation - conditional on whether the source - object's generation does not match - the given value. + :param if_source_generation_not_match: + (Optional) Makes the operation conditional on whether the source + object's generation does not match the given value. :type if_source_metageneration_match: long - :param if_source_metageneration_match: (Optional) Makes the operation - conditional on whether the source - object's current metageneration - matches the given value. + :param if_source_metageneration_match: + (Optional) Makes the operation conditional on whether the source + object's current metageneration matches the given value. :type if_source_metageneration_not_match: long - :param if_source_metageneration_not_match: (Optional) Makes the operation - conditional on whether the source - object's current metageneration - does not match the given value. + :param if_source_metageneration_not_match: + (Optional) Makes the operation conditional on whether the source + object's current metageneration does not match the given value. + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: @@ -1880,65 +1853,52 @@ def rename_blob( to the ``client`` stored on the current bucket. :type if_generation_match: long - :param if_generation_match: (Optional) Makes the operation - conditional on whether the destination - object's current generation matches the - given value. Setting to 0 makes the - operation succeed only if there are no - live versions of the object. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Makes the operation - conditional on whether the - destination object's current - generation does not match the given - value. If no live object exists, - the precondition fails. Setting to - 0 makes the operation succeed only - if there is a live version - of the object. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Makes the operation - conditional on whether the - destination object's current - metageneration matches the given - value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Makes the operation - conditional on whether the - destination object's current - metageneration does not match - the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_source_generation_match: long - :param if_source_generation_match: (Optional) Makes the operation - conditional on whether the source - object's generation matches the - given value. Also used in the - delete request. + :param if_source_generation_match: + (Optional) Makes the operation conditional on whether the source + object's generation matches the given value. Also used in the + (implied) delete request. :type if_source_generation_not_match: long - :param if_source_generation_not_match: (Optional) Makes the operation - conditional on whether the source - object's generation does not match - the given value. Also used in the - delete request. + :param if_source_generation_not_match: + (Optional) Makes the operation conditional on whether the source + object's generation does not match the given value. Also used in + the (implied) delete request. :type if_source_metageneration_match: long - :param if_source_metageneration_match: (Optional) Makes the operation - conditional on whether the source - object's current metageneration - matches the given value.Also used in the - delete request. + :param if_source_metageneration_match: + (Optional) Makes the operation conditional on whether the source + object's current metageneration matches the given value. Also used + in the (implied) delete request. :type if_source_metageneration_not_match: long - :param if_source_metageneration_not_match: (Optional) Makes the operation - conditional on whether the source - object's current metageneration - does not match the given value. - Also used in the delete request. + :param if_source_metageneration_not_match: + (Optional) Makes the operation conditional on whether the source + object's current metageneration does not match the given value. + Also used in the (implied) delete request. :type timeout: float or tuple :param timeout: diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index df42f0c11..d6f688d92 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -983,37 +983,35 @@ def download_blob_to_file( str, \ ]): The blob resource to pass or URI to download. + file_obj (file): A file handle to which to write the blob's data. + start (int): (Optional) The first byte in a range to be downloaded. + end (int): (Optional) The last byte in a range to be downloaded. + raw_download (bool): (Optional) If true, download the object without any expansion. - if_generation_match (long): - (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. - if_generation_not_match (long): - (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. - if_metageneration_match (long): - (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. - if_metageneration_not_match (long): - (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. + + if_generation_match: long + (Optional) See :ref:`using-if-generation-match` + + if_generation_not_match: long + (Optional) See :ref:`using-if-generation-not-match` + + if_metageneration_match: long + (Optional) See :ref:`using-if-metageneration-match` + + if_metageneration_not_match: long + (Optional) See :ref:`using-if-metageneration-not-match` + timeout ([Union[float, Tuple[float, float]]]): - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + checksum (str): (Optional) The type of checksum to compute to verify the integrity of the object. The response headers must contain a checksum of the diff --git a/google/cloud/storage/fileio.py b/google/cloud/storage/fileio.py index e74b9ed4a..6ac8e057f 100644 --- a/google/cloud/storage/fileio.py +++ b/google/cloud/storage/fileio.py @@ -67,7 +67,8 @@ class BlobReader(io.BufferedIOBase): The default is the chunk_size of the blob, or 40MiB. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable + :param retry: + (Optional) How to retry the RPC. A None value will disable retries. A google.api_core.retry.Retry value will enable retries, and the object will define retriable response codes and errors and configure backoff and timeout options. @@ -88,10 +89,15 @@ class BlobReader(io.BufferedIOBase): configuration changes for Retry objects such as delays and deadlines are respected. - :param download_kwargs: Keyword arguments to pass to the underlying API - calls. The following arguments are supported: "if_generation_match", - "if_generation_not_match", "if_metageneration_match", - "if_metageneration_not_match", "timeout". + :param download_kwargs: + Keyword arguments to pass to the underlying API calls. + The following arguments are supported: + + - ``if_generation_match`` + - ``if_generation_not_match`` + - ``if_metageneration_match`` + - ``if_metageneration_not_match`` + - ``timeout`` """ def __init__(self, blob, chunk_size=None, retry=DEFAULT_RETRY, **download_kwargs): @@ -230,7 +236,8 @@ class BlobWriter(io.BufferedIOBase): expectations. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable + :param retry: + (Optional) How to retry the RPC. A None value will disable retries. A google.api_core.retry.Retry value will enable retries, and the object will define retriable response codes and errors and configure backoff and timeout options. @@ -251,11 +258,19 @@ class BlobWriter(io.BufferedIOBase): configuration changes for Retry objects such as delays and deadlines are respected. - :param upload_kwargs: Keyword arguments to pass to the underlying API - calls. The following arguments are supported: "if_generation_match", - "if_generation_not_match", "if_metageneration_match", - "if_metageneration_not_match", "timeout", "content_type", - "num_retries", "predefined_acl", "checksum". + :param upload_kwargs: + Keyword arguments to pass to the underlying API + calls. The following arguments are supported: + + - ``if_generation_match`` + - ``if_generation_not_match`` + - ``if_metageneration_match`` + - ``if_metageneration_not_match`` + - ``timeout`` + - ``content_type`` + - ``num_retries`` + - ``predefined_acl`` + - ``checksum`` """ def __init__( diff --git a/noxfile.py b/noxfile.py index 0b85dc8b0..c34e8b981 100644 --- a/noxfile.py +++ b/noxfile.py @@ -179,6 +179,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") + session.install("grpcio") session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 158109705..a21385821 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -3996,7 +3996,9 @@ def test_compose_w_generation_match(self): ) @mock.patch("warnings.warn") - def test_compose_w_generation_match_w_warning(self, mock_warn): + def test_compose_w_if_generation_match_list_w_warning(self, mock_warn): + from google.cloud.storage.blob import _COMPOSE_IF_GENERATION_LIST_DEPRECATED + source_1_name = "source-1" source_2_name = "source-2" destination_name = "destination" @@ -4045,13 +4047,10 @@ def test_compose_w_generation_match_w_warning(self, mock_warn): ) mock_warn.assert_called_with( - "if_generation_match: type list is deprecated and supported for backwards-compatability reasons only." - "Use if_source_generation_match instead to match source objects generations.", - DeprecationWarning, - stacklevel=2, + _COMPOSE_IF_GENERATION_LIST_DEPRECATED, DeprecationWarning, stacklevel=2, ) - def test_compose_invalid_generation_match(self): + def test_compose_w_if_generation_match_and_if_s_generation_match(self): source_1_name = "source-1" source_2_name = "source-2" destination_name = "destination" @@ -4073,7 +4072,9 @@ def test_compose_invalid_generation_match(self): client._post_resource.assert_not_called() @mock.patch("warnings.warn") - def test_compose_w_metageneration_match_w_warning(self, mock_warn): + def test_compose_w_if_metageneration_match_list_w_warning(self, mock_warn): + from google.cloud.storage.blob import _COMPOSE_IF_METAGENERATION_LIST_DEPRECATED + source_1_name = "source-1" source_2_name = "source-2" destination_name = "destination" @@ -4108,9 +4109,7 @@ def test_compose_w_metageneration_match_w_warning(self, mock_warn): ) mock_warn.assert_called_with( - "if_metageneration_match: type list is deprecated and supported for backwards-compatability reasons only." - "Note that the metageneration to be matched is that of the destination blob." - "Please pass in a single value (type long).", + _COMPOSE_IF_METAGENERATION_LIST_DEPRECATED, DeprecationWarning, stacklevel=2, ) From edce85e7ddcb931ed3db2a9583dd6a7e791250a1 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 21 Jun 2021 22:31:22 -0700 Subject: [PATCH 30/30] chore: release 1.39.0 (#465) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 21 +++++++++++++++++++++ google/cloud/storage/version.py | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4491c5cad..066b75505 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-storage/#history +## [1.39.0](https://www.github.com/googleapis/python-storage/compare/v1.38.0...v1.39.0) (2021-06-21) + + +### Features + +* media operation retries can be configured using the same interface as with non-media operation ([#447](https://www.github.com/googleapis/python-storage/issues/447)) ([0dbbb8a](https://www.github.com/googleapis/python-storage/commit/0dbbb8ac17a4b632707485ee6c7cc15e4670efaa)) + + +### Bug Fixes + +* add ConnectionError to default retry ([#445](https://www.github.com/googleapis/python-storage/issues/445)) ([8344253](https://www.github.com/googleapis/python-storage/commit/8344253a1969b9d04b81f87a6d7bddd3ddb55006)) +* apply idempotency policies for ACLs ([#458](https://www.github.com/googleapis/python-storage/issues/458)) ([2232f38](https://www.github.com/googleapis/python-storage/commit/2232f38933dbdfeb4f6585291794d332771ffdf2)) +* replace python lifecycle action parsing ValueError with warning ([#437](https://www.github.com/googleapis/python-storage/issues/437)) ([2532d50](https://www.github.com/googleapis/python-storage/commit/2532d506b44fc1ef0fa0a996822d29e7459c465a)) +* revise blob.compose query parameters `if_generation_match` ([#454](https://www.github.com/googleapis/python-storage/issues/454)) ([70d19e7](https://www.github.com/googleapis/python-storage/commit/70d19e72831dee112bb07f38b50beef4890c1155)) + + +### Documentation + +* streamline 'timeout' / 'retry' docs in docstrings ([#461](https://www.github.com/googleapis/python-storage/issues/461)) ([78b2eba](https://www.github.com/googleapis/python-storage/commit/78b2eba81003b437cd24f2b8d269ea2455682507)) +* streamline docstrings for conditional parmas ([#464](https://www.github.com/googleapis/python-storage/issues/464)) ([6999370](https://www.github.com/googleapis/python-storage/commit/69993702390322df07cc2e818003186a47524c2b)) + ## [1.38.0](https://www.github.com/googleapis/python-storage/compare/v1.37.1...v1.38.0) (2021-04-26) diff --git a/google/cloud/storage/version.py b/google/cloud/storage/version.py index 31e5a5cad..05c5a222e 100644 --- a/google/cloud/storage/version.py +++ b/google/cloud/storage/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.38.0" +__version__ = "1.39.0"