From 4ed5666d8eaff94f5d2b8ffa4ae21b786955dd7a Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Wed, 29 Jul 2020 08:23:22 +0530 Subject: [PATCH 01/19] tests:: add unit test to fix coverage (#217) --- tests/unit/test_blob.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 54aeae671..cd4a129d1 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -1482,6 +1482,25 @@ def test_download_as_string_w_response_headers(self): self.assertEqual(blob.md5_hash, "CS9tHYTtyFntzj7B9nkkJQ") self.assertEqual(blob.crc32c, "4gcgLQ") + def test_download_as_string_w_hash_response_header_none(self): + blob_name = "blob-name" + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) + media_link = "http://example.com/media/" + properties = {"mediaLink": media_link} + blob = self._make_one(blob_name, bucket=bucket, properties=properties) + + response = self._mock_requests_response( + http_client.OK, + headers={"X-Goog-Hash": ""}, + # { "x": 5 } gzipped + content=b"\x1f\x8b\x08\x00\xcfo\x17_\x02\xff\xabVP\xaaP\xb2R0U\xa8\x05\x00\xa1\xcaQ\x93\n\x00\x00\x00", + ) + blob._extract_headers_from_download(response) + + self.assertIsNone(blob.md5_hash) + self.assertIsNone(blob.crc32c) + def test_download_as_string_w_generation_match(self): GENERATION_NUMBER = 6 MEDIA_LINK = "http://example.com/media/" From e089b286fcf6bc60cbc6b8639ed20e07c5e7c528 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Wed, 29 Jul 2020 12:58:00 +0530 Subject: [PATCH 02/19] tests: fix system tests (#219) Co-authored-by: Tres Seaver --- tests/system/test_system.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index e135538a8..992b0f81b 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -1280,12 +1280,16 @@ def test_create_signed_read_url_v4_w_csek(self): def test_create_signed_read_url_v2_w_access_token(self): client = iam_credentials_v1.IAMCredentialsClient() service_account_email = Config.CLIENT._credentials.service_account_email - name = client.service_account_path("-", service_account_email) + name = google.api_core.path_template.expand( + "projects/{project}/serviceAccounts/{service_account}", + project="-", + service_account=service_account_email, + ) scope = [ "https://www.googleapis.com/auth/devstorage.read_write", "https://www.googleapis.com/auth/iam", ] - response = client.generate_access_token(name, scope) + response = client.generate_access_token(name=name, scope=scope) self._create_signed_read_url_helper( service_account_email=service_account_email, access_token=response.access_token, @@ -1294,12 +1298,16 @@ def test_create_signed_read_url_v2_w_access_token(self): def test_create_signed_read_url_v4_w_access_token(self): client = iam_credentials_v1.IAMCredentialsClient() service_account_email = Config.CLIENT._credentials.service_account_email - name = client.service_account_path("-", service_account_email) + name = google.api_core.path_template.expand( + "projects/{project}/serviceAccounts/{service_account}", + project="-", + service_account=service_account_email, + ) scope = [ "https://www.googleapis.com/auth/devstorage.read_write", "https://www.googleapis.com/auth/iam", ] - response = client.generate_access_token(name, scope) + response = client.generate_access_token(name=name, scope=scope) self._create_signed_read_url_helper( version="v4", service_account_email=service_account_email, From 9895bdffa60f6455850299cb5819d8b55ec5fd4e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 4 Aug 2020 21:19:23 -0400 Subject: [PATCH 03/19] testing: fix / skip systests broken by dependency releases (#227) See #226. This PR doesn't *fix* the issue, because the KMS breakage still exists: it just skips the KMS systests under Python3, until we have dropped support for Python2. --- tests/system/test_system.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 992b0f81b..84e1fd061 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -34,6 +34,7 @@ from google.cloud.storage.bucket import LifecycleRuleSetStorageClass from google.cloud import kms import google.api_core +from google.api_core import path_template import google.oauth2 from test_utils.retry import RetryErrors from test_utils.system import unique_resource_id @@ -1280,7 +1281,7 @@ def test_create_signed_read_url_v4_w_csek(self): def test_create_signed_read_url_v2_w_access_token(self): client = iam_credentials_v1.IAMCredentialsClient() service_account_email = Config.CLIENT._credentials.service_account_email - name = google.api_core.path_template.expand( + name = path_template.expand( "projects/{project}/serviceAccounts/{service_account}", project="-", service_account=service_account_email, @@ -1298,7 +1299,7 @@ def test_create_signed_read_url_v2_w_access_token(self): def test_create_signed_read_url_v4_w_access_token(self): client = iam_credentials_v1.IAMCredentialsClient() service_account_email = Config.CLIENT._credentials.service_account_email - name = google.api_core.path_template.expand( + name = path_template.expand( "projects/{project}/serviceAccounts/{service_account}", project="-", service_account=service_account_email, @@ -1828,6 +1829,14 @@ def test_access_to_public_bucket(self): retry_429_503(blob.download_to_file)(stream) +_KMS_2_0_BREAKAGE_MESSAGE = """\ +KMS 2.0.0 incompatible with our test setup. + +See https://github.com/googleapis/python-storage/issues/226 +""" + + +@unittest.skipIf(six.PY3, reason=_KMS_2_0_BREAKAGE_MESSAGE) class TestKMSIntegration(TestStorageFiles): FILENAMES = ("file01.txt",) From 4c5adfa6e05bf018d72ee1a7e99679fd55f2c662 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 4 Aug 2020 19:23:12 -0700 Subject: [PATCH 04/19] docs: update docs build (via synth) (#222) * migrate to Trampoline V2 * add docs-presubmit job * create docfx yaml files and upload them to another bucket Source-Author: Takashi Matsuo Source-Date: Thu Jul 30 18:26:35 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 39b527a39f5cd56d4882b3874fc08eed4756cebe Source-Link: https://github.com/googleapis/synthtool/commit/39b527a39f5cd56d4882b3874fc08eed4756cebe Source-Author: Takashi Matsuo Source-Date: Fri Jul 31 16:17:13 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: bfcdbe0da977b2de6c1c0471bb6dc2f1e13bf669 Source-Link: https://github.com/googleapis/synthtool/commit/bfcdbe0da977b2de6c1c0471bb6dc2f1e13bf669 Co-authored-by: Tres Seaver --- .gitignore | 3 +- .kokoro/build.sh | 8 +- .kokoro/docker/docs/Dockerfile | 98 ++++++ .kokoro/docker/docs/fetch_gpg_keys.sh | 45 +++ .kokoro/docs/common.cfg | 21 +- .kokoro/docs/docs-presubmit.cfg | 17 + .kokoro/publish-docs.sh | 39 ++- .kokoro/trampoline_v2.sh | 487 ++++++++++++++++++++++++++ .trampolinerc | 51 +++ docs/conf.py | 3 + noxfile.py | 37 ++ synth.metadata | 6 +- 12 files changed, 793 insertions(+), 22 deletions(-) create mode 100644 .kokoro/docker/docs/Dockerfile create mode 100755 .kokoro/docker/docs/fetch_gpg_keys.sh create mode 100644 .kokoro/docs/docs-presubmit.cfg create mode 100755 .kokoro/trampoline_v2.sh create mode 100644 .trampolinerc diff --git a/.gitignore b/.gitignore index b87e1ed58..b9daa52f1 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,7 @@ pip-log.txt # Built documentation docs/_build bigquery/docs/generated +docs.metadata # Virtual environment env/ @@ -57,4 +58,4 @@ system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc -pylintrc.test \ No newline at end of file +pylintrc.test diff --git a/.kokoro/build.sh b/.kokoro/build.sh index e8b4e8bfa..9e7febd82 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version -python3.6 -m nox +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3.6 -m nox -s "${NOX_SESSION:-}" +else + python3.6 -m nox +fi diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile new file mode 100644 index 000000000..412b0b56a --- /dev/null +++ b/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,98 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + + +COPY fetch_gpg_keys.sh /tmp +# Install the desired versions of Python. +RUN set -ex \ + && export GNUPGHOME="$(mktemp -d)" \ + && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ + && /tmp/fetch_gpg_keys.sh \ + && for PYTHON_VERSION in 3.7.8 3.8.5; do \ + wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ + && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ + && mkdir -p /usr/src/python-${PYTHON_VERSION} \ + && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ + && rm python-${PYTHON_VERSION}.tar.xz \ + && cd /usr/src/python-${PYTHON_VERSION} \ + && ./configure \ + --enable-shared \ + # This works only on Python 2.7 and throws a warning on every other + # version, but seems otherwise harmless. + --enable-unicode=ucs4 \ + --with-system-ffi \ + --without-ensurepip \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + ; done \ + && rm -rf "${GNUPGHOME}" \ + && rm -rf /usr/src/python* \ + && rm -rf ~/.cache/ + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.7 /tmp/get-pip.py \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.7"] diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh new file mode 100755 index 000000000..d653dd868 --- /dev/null +++ b/.kokoro/docker/docs/fetch_gpg_keys.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to fetch gpg keys with retry. +# Avoid jinja parsing the file. +# + +function retry { + if [[ "${#}" -le 1 ]]; then + echo "Usage: ${0} retry_count commands.." + exit 1 + fi + local retries=${1} + local command="${@:2}" + until [[ "${retries}" -le 0 ]]; do + $command && return 0 + if [[ $? -ne 0 ]]; then + echo "command failed, retrying" + ((retries--)) + fi + done + return 1 +} + +# 3.6.9, 3.7.5 (Ned Deily) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + +# 3.8.0 (Łukasz Langa) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + E3FF2839C048B25C084DEBE9B26995E310250568 + +# diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index b1f35bff3..ea96964f1 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-storage/.kokoro/trampoline.sh" +build_file: "python-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" @@ -28,6 +28,23 @@ env_vars: { value: "docs-staging" } +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2-staging" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + # Fetch the token needed for reporting release status to GitHub before_action { fetch_keystore { diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 000000000..111810782 --- /dev/null +++ b/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 45091eddd..8acb14e80 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -18,26 +18,16 @@ set -eo pipefail # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -cd github/python-storage - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version # build docs nox -s docs -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq +python3 -m pip install --user gcp-docuploader # create metadata python3 -m docuploader create-metadata \ @@ -52,4 +42,23 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh new file mode 100755 index 000000000..719bcd5ba --- /dev/null +++ b/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/.trampolinerc b/.trampolinerc new file mode 100644 index 000000000..995ee2911 --- /dev/null +++ b/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/docs/conf.py b/docs/conf.py index 8d2f1e15d..e25583dc4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,6 +20,9 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ diff --git a/noxfile.py b/noxfile.py index fd120fd6d..2ead2eca3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -100,6 +100,10 @@ def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -165,3 +169,36 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/synth.metadata b/synth.metadata index d1fc5fa12..7fcae0740 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,15 +3,15 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-storage", - "sha": "0709ad5121098af68faf2432d8960650d238d8cd" + "remote": "https://github.com/googleapis/python-storage.git", + "sha": "e089b286fcf6bc60cbc6b8639ed20e07c5e7c528" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "bfcdbe0da977b2de6c1c0471bb6dc2f1e13bf669" } } ] From e1f91fcca6c001bc3b0c5f759a7a003fcf60c0a6 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 4 Aug 2020 23:10:03 -0400 Subject: [PATCH 05/19] feat: pass 'client_options' to base class ctor (#225) packaging: pin 'google-cloud-core >= 1.4.0' Closes #210 --- google/cloud/storage/client.py | 10 +++++++++- setup.py | 2 +- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 118377b7c..6c7fa73c8 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -79,6 +79,7 @@ class Client(ClientWithProject): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. + :type client_options: :class:`~google.api_core.client_options.ClientOptions` or :class:`dict` :param client_options: (Optional) Client options used to set user options on the client. API Endpoint should be set through client_options. @@ -100,15 +101,21 @@ def __init__( client_options=None, ): self._base_connection = None + if project is None: no_project = True project = "" else: no_project = False + if project is _marker: project = None + super(Client, self).__init__( - project=project, credentials=credentials, _http=_http + project=project, + credentials=credentials, + client_options=client_options, + _http=_http, ) kw_args = {"client_info": client_info} @@ -126,6 +133,7 @@ def __init__( if no_project: self.project = None + self._connection = Connection(self, **kw_args) self._batch_stack = _LocalStack() diff --git a/setup.py b/setup.py index 91cb1dcc8..872b66c4c 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-auth >= 1.11.0, < 2.0dev", - "google-cloud-core >= 1.2.0, < 2.0dev", + "google-cloud-core >= 1.4.0, < 2.0dev", "google-resumable-media >= 0.6.0, < 2.0dev", ] extras = {} From e8a8638421c862bdf037b2b6e92e76f00caf00a1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 10 Aug 2020 16:54:32 -0400 Subject: [PATCH 06/19] tests: pin g-c-kms to un-break systests (#232) See: #226 --- noxfile.py | 2 +- synth.py | 3 ++- tests/system/test_system.py | 8 -------- 3 files changed, 3 insertions(+), 10 deletions(-) diff --git a/noxfile.py b/noxfile.py index 2ead2eca3..694b5e627 100644 --- a/noxfile.py +++ b/noxfile.py @@ -125,7 +125,7 @@ def system(session): "google-cloud-testutils", "google-cloud-iam", "google-cloud-pubsub", - "google-cloud-kms", + "google-cloud-kms < 2.0dev", ) session.install("-e", ".") diff --git a/synth.py b/synth.py index 296a6311b..577248cba 100644 --- a/synth.py +++ b/synth.py @@ -29,7 +29,8 @@ system_test_external_dependencies=[ "google-cloud-iam", "google-cloud-pubsub", - "google-cloud-kms", + # See: https://github.com/googleapis/python-storage/issues/226 + "google-cloud-kms < 2.0dev", ], ) s.move( diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 84e1fd061..e48f4e230 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -1829,14 +1829,6 @@ def test_access_to_public_bucket(self): retry_429_503(blob.download_to_file)(stream) -_KMS_2_0_BREAKAGE_MESSAGE = """\ -KMS 2.0.0 incompatible with our test setup. - -See https://github.com/googleapis/python-storage/issues/226 -""" - - -@unittest.skipIf(six.PY3, reason=_KMS_2_0_BREAKAGE_MESSAGE) class TestKMSIntegration(TestStorageFiles): FILENAMES = ("file01.txt",) From 73107c35f23c4a358e957c2b8188300a7fa958fe Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Tue, 11 Aug 2020 02:56:07 +0530 Subject: [PATCH 07/19] feat: rename 'Blob.download_as_{string,bytes}', add 'Blob.download_as_text' (#182) Leave 'Blob.download_as_string' as a deprecated alias for 'download_as_bytes'. Co-authored-by: Tres Seaver --- google/cloud/storage/blob.py | 176 ++++++++++++++++++++++++++++++++++- tests/system/test_system.py | 89 ++++++++++-------- tests/unit/test_blob.py | 130 +++++++++++++++++++++++--- 3 files changed, 340 insertions(+), 55 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 07a17867c..1380f41bb 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -1100,7 +1100,7 @@ def download_to_filename( mtime = updated.timestamp() os.utime(file_obj.name, (mtime, mtime)) - def download_as_string( + def download_as_bytes( self, client=None, start=None, @@ -1180,6 +1180,180 @@ def download_as_string( ) return string_buffer.getvalue() + def download_as_string( + self, + client=None, + start=None, + end=None, + raw_download=False, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, + ): + """(Deprecated) Download the contents of this blob as a bytes object. + + If :attr:`user_project` is set on the bucket, bills the API request + to that project. + + .. note:: + Deprecated alias for :meth:`download_as_bytes`. + + :type client: :class:`~google.cloud.storage.client.Client` or + ``NoneType`` + :param client: (Optional) The client to use. If not passed, falls back + to the ``client`` stored on the blob's bucket. + + :type start: int + :param start: (Optional) The first byte in a range to be downloaded. + + :type end: int + :param end: (Optional) The last byte in a range to be downloaded. + + :type raw_download: bool + :param raw_download: + (Optional) If true, download the object without any expansion. + + :type if_generation_match: long + :param if_generation_match: (Optional) Make the operation conditional on whether + the blob's current generation matches the given value. + Setting to 0 makes the operation succeed only if there + are no live versions of the blob. + + :type if_generation_not_match: long + :param if_generation_not_match: (Optional) Make the operation conditional on whether + the blob's current generation does not match the given + value. If no live blob exists, the precondition fails. + Setting to 0 makes the operation succeed only if there + is a live version of the blob. + + :param if_metageneration_match: (Optional) Make the operation conditional on whether the + blob's current metageneration matches the given value. + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the + blob's current metageneration does not match the given value. + + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + :rtype: bytes + :returns: The data stored in this blob. + + :raises: :class:`google.cloud.exceptions.NotFound` + """ + warnings.warn( + "Blob.download_as_string() is deprecated and will be removed in future." + "Use Blob.download_as_bytes() instead.", + PendingDeprecationWarning, + stacklevel=1, + ) + return self.download_as_bytes( + client=client, + start=start, + end=end, + raw_download=raw_download, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, + ) + + def download_as_text( + self, + client=None, + start=None, + end=None, + raw_download=False, + encoding="utf-8", + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, + ): + """Download the contents of this blob as a string. + + If :attr:`user_project` is set on the bucket, bills the API request + to that project. + + :type client: :class:`~google.cloud.storage.client.Client` or + ``NoneType`` + :param client: (Optional) The client to use. If not passed, falls back + to the ``client`` stored on the blob's bucket. + + :type start: int + :param start: (Optional) The first byte in a range to be downloaded. + + :type end: int + :param end: (Optional) The last byte in a range to be downloaded. + + :type raw_download: bool + :param raw_download: + (Optional) If true, download the object without any expansion. + + :type encoding: str + :param encoding: (Optional) The data of the blob will be decoded by + encoding method. Defaults to UTF-8. Apply only + if the value of ``blob.content_encoding`` is None. + + :type if_generation_match: long + :param if_generation_match: (Optional) Make the operation conditional on whether + the blob's current generation matches the given value. + Setting to 0 makes the operation succeed only if there + are no live versions of the blob. + + :type if_generation_not_match: long + :param if_generation_not_match: (Optional) Make the operation conditional on whether + the blob's current generation does not match the given + value. If no live blob exists, the precondition fails. + Setting to 0 makes the operation succeed only if there + is a live version of the blob. + + :param if_metageneration_match: (Optional) Make the operation conditional on whether the + blob's current metageneration matches the given value. + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the + blob's current metageneration does not match the given value. + + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + :rtype: text + :returns: The data stored in this blob. + + :raises: :class:`google.cloud.exceptions.NotFound` + """ + data = self.download_as_bytes( + client=client, + start=start, + end=end, + raw_download=raw_download, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, + ) + + if self.content_encoding: + return data.decode(self.content_encoding) + else: + return data.decode(encoding) + def _get_content_type(self, content_type, filename=None): """Determine the content type from the current object. diff --git a/tests/system/test_system.py b/tests/system/test_system.py index e48f4e230..3fb701d39 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -421,8 +421,8 @@ def test_copy_existing_file_with_user_project(self): ) to_delete.append(new_blob) - base_contents = blob.download_as_string() - copied_contents = new_blob.download_as_string() + base_contents = blob.download_as_bytes() + copied_contents = new_blob.download_as_bytes() self.assertEqual(base_contents, copied_contents) finally: for blob in to_delete: @@ -451,8 +451,8 @@ def test_copy_file_with_generation_match(self): ) to_delete.append(new_blob) - base_contents = blob.download_as_string() - copied_contents = new_blob.download_as_string() + base_contents = blob.download_as_bytes() + copied_contents = new_blob.download_as_bytes() self.assertEqual(base_contents, copied_contents) finally: for blob in to_delete: @@ -481,8 +481,8 @@ def test_copy_file_with_metageneration_match(self): ) to_delete.append(new_blob) - base_contents = blob.download_as_string() - copied_contents = new_blob.download_as_string() + base_contents = blob.download_as_bytes() + copied_contents = new_blob.download_as_bytes() self.assertEqual(base_contents, copied_contents) finally: for blob in to_delete: @@ -508,7 +508,7 @@ def test_bucket_get_blob_with_user_project(self): to_add.upload_from_string(data) try: found = with_user_project.get_blob("blob-name") - self.assertEqual(found.download_as_string(), data) + self.assertEqual(found.download_as_bytes(), data) finally: to_add.delete() @@ -623,8 +623,8 @@ def test_crud_blob_w_user_project(self): blob.reload() # Exercise 'objects.get' (media) w/ userProject. - self.assertEqual(blob0.download_as_string(), file_contents) - self.assertEqual(blob1.download_as_string(), b"gen1") + self.assertEqual(blob0.download_as_bytes(), file_contents) + self.assertEqual(blob1.download_as_bytes(), b"gen1") # Exercise 'objects.patch' w/ userProject. blob0.content_language = "en" @@ -684,10 +684,10 @@ def test_crud_blob_w_generation_match(self): # Exercise 'objects.get' (media) w/ generation match. self.assertEqual( - blob0.download_as_string(if_generation_match=gen0), file_contents + blob0.download_as_bytes(if_generation_match=gen0), file_contents ) self.assertEqual( - blob1.download_as_string(if_generation_not_match=gen0), b"gen1" + blob1.download_as_bytes(if_generation_not_match=gen0), b"gen1" ) # Exercise 'objects.patch' w/ generation match. @@ -825,8 +825,8 @@ def test_copy_existing_file(self): ) self.case_blobs_to_delete.append(new_blob) - base_contents = blob.download_as_string() - copied_contents = new_blob.download_as_string() + base_contents = blob.download_as_bytes() + copied_contents = new_blob.download_as_bytes() self.assertEqual(base_contents, copied_contents) def test_download_blob_w_uri(self): @@ -846,6 +846,15 @@ def test_download_blob_w_uri(self): self.assertEqual(file_contents, stored_contents) + def test_download_blob_as_text(self): + blob = self.bucket.blob("MyBuffer") + file_contents = "Hello World" + blob.upload_from_string(file_contents) + self.case_blobs_to_delete.append(blob) + + stored_contents = blob.download_as_text() + self.assertEqual(file_contents, stored_contents) + def test_upload_gzip_encoded_download_raw(self): payload = b"DEADBEEF" * 1000 raw_stream = io.BytesIO() @@ -857,10 +866,10 @@ def test_upload_gzip_encoded_download_raw(self): blob.content_encoding = "gzip" blob.upload_from_file(raw_stream, rewind=True) - expanded = blob.download_as_string() + expanded = blob.download_as_bytes() self.assertEqual(expanded, payload) - raw = blob.download_as_string(raw_download=True) + raw = blob.download_as_bytes(raw_download=True) self.assertEqual(raw, zipped) def test_resumable_upload_with_generation_match(self): @@ -920,7 +929,7 @@ def test_fetch_object_and_check_content(self): for blob_name, file_contents in test_data.items(): blob = bucket.blob(blob_name) self.assertEqual(blob.name, blob_name) - self.assertEqual(blob.download_as_string(), file_contents) + self.assertEqual(blob.download_as_bytes(), file_contents) class TestStorageListFiles(TestStorageFiles): @@ -1410,7 +1419,7 @@ def test_compose_create_new_blob(self): destination.compose([source_1, source_2]) self.case_blobs_to_delete.append(destination) - composed = destination.download_as_string() + composed = destination.download_as_bytes() self.assertEqual(composed, SOURCE_1 + SOURCE_2) def test_compose_create_new_blob_wo_content_type(self): @@ -1430,7 +1439,7 @@ def test_compose_create_new_blob_wo_content_type(self): self.case_blobs_to_delete.append(destination) self.assertIsNone(destination.content_type) - composed = destination.download_as_string() + composed = destination.download_as_bytes() self.assertEqual(composed, SOURCE_1 + SOURCE_2) def test_compose_replace_existing_blob(self): @@ -1447,7 +1456,7 @@ def test_compose_replace_existing_blob(self): original.compose([original, to_append]) - composed = original.download_as_string() + composed = original.download_as_bytes() self.assertEqual(composed, BEFORE + TO_APPEND) def test_compose_with_generation_match(self): @@ -1475,7 +1484,7 @@ def test_compose_with_generation_match(self): if_metageneration_match=[original.metageneration, to_append.metageneration], ) - composed = original.download_as_string() + composed = original.download_as_bytes() self.assertEqual(composed, BEFORE + TO_APPEND) @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") @@ -1501,7 +1510,7 @@ def test_compose_with_user_project(self): destination.content_type = "text/plain" destination.compose([source_1, source_2]) - composed = destination.download_as_string() + composed = destination.download_as_bytes() self.assertEqual(composed, SOURCE_1 + SOURCE_2) finally: retry_429_harder(created.delete)(force=True) @@ -1517,7 +1526,7 @@ def test_rewrite_create_new_blob_add_encryption_key(self): source = self.bucket.blob("source") source.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(source) - source_data = source.download_as_string() + source_data = source.download_as_bytes() KEY = os.urandom(32) dest = self.bucket.blob("dest", encryption_key=KEY) @@ -1528,7 +1537,7 @@ def test_rewrite_create_new_blob_add_encryption_key(self): self.assertEqual(rewritten, len(source_data)) self.assertEqual(total, len(source_data)) - self.assertEqual(source.download_as_string(), dest.download_as_string()) + self.assertEqual(source.download_as_bytes(), dest.download_as_bytes()) def test_rewrite_rotate_encryption_key(self): BLOB_NAME = "rotating-keys" @@ -1538,7 +1547,7 @@ def test_rewrite_rotate_encryption_key(self): source = self.bucket.blob(BLOB_NAME, encryption_key=SOURCE_KEY) source.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(source) - source_data = source.download_as_string() + source_data = source.download_as_bytes() DEST_KEY = os.urandom(32) dest = self.bucket.blob(BLOB_NAME, encryption_key=DEST_KEY) @@ -1550,7 +1559,7 @@ def test_rewrite_rotate_encryption_key(self): self.assertEqual(rewritten, len(source_data)) self.assertEqual(total, len(source_data)) - self.assertEqual(dest.download_as_string(), source_data) + self.assertEqual(dest.download_as_bytes(), source_data) @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_rewrite_add_key_with_user_project(self): @@ -1566,7 +1575,7 @@ def test_rewrite_add_key_with_user_project(self): source = with_user_project.blob("source") source.upload_from_filename(file_data["path"]) - source_data = source.download_as_string() + source_data = source.download_as_bytes() KEY = os.urandom(32) dest = with_user_project.blob("dest", encryption_key=KEY) @@ -1576,7 +1585,7 @@ def test_rewrite_add_key_with_user_project(self): self.assertEqual(rewritten, len(source_data)) self.assertEqual(total, len(source_data)) - self.assertEqual(source.download_as_string(), dest.download_as_string()) + self.assertEqual(source.download_as_bytes(), dest.download_as_bytes()) finally: retry_429_harder(created.delete)(force=True) @@ -1596,7 +1605,7 @@ def test_rewrite_rotate_with_user_project(self): SOURCE_KEY = os.urandom(32) source = with_user_project.blob(BLOB_NAME, encryption_key=SOURCE_KEY) source.upload_from_filename(file_data["path"]) - source_data = source.download_as_string() + source_data = source.download_as_bytes() DEST_KEY = os.urandom(32) dest = with_user_project.blob(BLOB_NAME, encryption_key=DEST_KEY) @@ -1606,7 +1615,7 @@ def test_rewrite_rotate_with_user_project(self): self.assertEqual(rewritten, len(source_data)) self.assertEqual(total, len(source_data)) - self.assertEqual(dest.download_as_string(), source_data) + self.assertEqual(dest.download_as_bytes(), source_data) finally: retry_429_harder(created.delete)(force=True) @@ -1622,7 +1631,7 @@ def test_rewrite_with_generation_match(self): source = bucket.blob(BLOB_NAME) source.upload_from_filename(file_data["path"]) - source_data = source.download_as_string() + source_data = source.download_as_bytes() dest = bucket.blob(BLOB_NAME) @@ -1640,7 +1649,7 @@ def test_rewrite_with_generation_match(self): self.assertEqual(token, None) self.assertEqual(rewritten, len(source_data)) self.assertEqual(total, len(source_data)) - self.assertEqual(dest.download_as_string(), source_data) + self.assertEqual(dest.download_as_bytes(), source_data) finally: retry_429_harder(created.delete)(force=True) @@ -1907,7 +1916,7 @@ def test_blob_w_explicit_kms_key_name(self): blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(blob) with open(file_data["path"], "rb") as _file_data: - self.assertEqual(blob.download_as_string(), _file_data.read()) + self.assertEqual(blob.download_as_bytes(), _file_data.read()) # We don't know the current version of the key. self.assertTrue(blob.kms_key_name.startswith(kms_key_name)) @@ -1935,7 +1944,7 @@ def test_bucket_w_default_kms_key_name(self): defaulted_blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(defaulted_blob) - self.assertEqual(defaulted_blob.download_as_string(), contents) + self.assertEqual(defaulted_blob.download_as_bytes(), contents) # We don't know the current version of the key. self.assertTrue(defaulted_blob.kms_key_name.startswith(kms_key_name)) @@ -1947,7 +1956,7 @@ def test_bucket_w_default_kms_key_name(self): override_blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(override_blob) - self.assertEqual(override_blob.download_as_string(), contents) + self.assertEqual(override_blob.download_as_bytes(), contents) # We don't know the current version of the key. self.assertTrue(override_blob.kms_key_name.startswith(alt_kms_key_name)) @@ -1958,7 +1967,7 @@ def test_bucket_w_default_kms_key_name(self): alt_blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(alt_blob) - self.assertEqual(alt_blob.download_as_string(), contents) + self.assertEqual(alt_blob.download_as_bytes(), contents) # We don't know the current version of the key. self.assertTrue(alt_blob.kms_key_name.startswith(alt_kms_key_name)) @@ -1969,7 +1978,7 @@ def test_bucket_w_default_kms_key_name(self): cleartext_blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(cleartext_blob) - self.assertEqual(cleartext_blob.download_as_string(), contents) + self.assertEqual(cleartext_blob.download_as_bytes(), contents) self.assertIsNone(cleartext_blob.kms_key_name) def test_rewrite_rotate_csek_to_cmek(self): @@ -1980,7 +1989,7 @@ def test_rewrite_rotate_csek_to_cmek(self): source = self.bucket.blob(BLOB_NAME, encryption_key=SOURCE_KEY) source.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(source) - source_data = source.download_as_string() + source_data = source.download_as_bytes() kms_key_name = self._kms_key_name() @@ -2002,7 +2011,7 @@ def test_rewrite_rotate_csek_to_cmek(self): self.assertEqual(rewritten, len(source_data)) self.assertEqual(total, len(source_data)) - self.assertEqual(dest.download_as_string(), source_data) + self.assertEqual(dest.download_as_bytes(), source_data) def test_upload_new_blob_w_bucket_cmek_enabled(self): blob_name = "test-blob" @@ -2022,7 +2031,7 @@ def test_upload_new_blob_w_bucket_cmek_enabled(self): blob.upload_from_string(alt_payload, if_generation_match=blob.generation) self.case_blobs_to_delete.append(blob) - self.assertEqual(blob.download_as_string(), alt_payload) + self.assertEqual(blob.download_as_bytes(), alt_payload) self.bucket.default_kms_key_name = None self.bucket.patch() @@ -2250,7 +2259,7 @@ def test_new_bucket_w_ubla(self): blob.upload_from_string(payload) found = bucket.get_blob(blob_name) - self.assertEqual(found.download_as_string(), payload) + self.assertEqual(found.download_as_bytes(), payload) blob_acl = blob.acl with self.assertRaises(exceptions.BadRequest): diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index cd4a129d1..68011e438 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -1417,7 +1417,7 @@ def test_download_to_filename_w_key(self): stream = blob._do_download.mock_calls[0].args[1] self.assertEqual(stream.name, temp.name) - def _download_as_string_helper(self, raw_download, timeout=None): + def _download_as_bytes_helper(self, raw_download, timeout=None): blob_name = "blob-name" client = mock.Mock(spec=["_http"]) bucket = _Bucket(client) @@ -1428,13 +1428,10 @@ def _download_as_string_helper(self, raw_download, timeout=None): if timeout is None: expected_timeout = self._get_default_timeout() - fetched = blob.download_as_string(raw_download=raw_download) + fetched = blob.download_as_bytes(raw_download=raw_download) else: expected_timeout = timeout - fetched = blob.download_as_string( - raw_download=raw_download, timeout=timeout - ) - + fetched = blob.download_as_bytes(raw_download=raw_download, timeout=timeout) self.assertEqual(fetched, b"") headers = {"accept-encoding": "gzip"} @@ -1501,7 +1498,7 @@ def test_download_as_string_w_hash_response_header_none(self): self.assertIsNone(blob.md5_hash) self.assertIsNone(blob.crc32c) - def test_download_as_string_w_generation_match(self): + def test_download_as_bytes_w_generation_match(self): GENERATION_NUMBER = 6 MEDIA_LINK = "http://example.com/media/" @@ -1511,7 +1508,7 @@ def test_download_as_string_w_generation_match(self): ) blob.download_to_file = mock.Mock() - fetched = blob.download_as_string(if_generation_match=GENERATION_NUMBER) + fetched = blob.download_as_bytes(if_generation_match=GENERATION_NUMBER) self.assertEqual(fetched, b"") blob.download_to_file.assert_called_once_with( @@ -1527,14 +1524,119 @@ def test_download_as_string_w_generation_match(self): timeout=self._get_default_timeout(), ) - def test_download_as_string_wo_raw(self): - self._download_as_string_helper(raw_download=False) + def test_download_as_bytes_wo_raw(self): + self._download_as_bytes_helper(raw_download=False) + + def test_download_as_bytes_w_raw(self): + self._download_as_bytes_helper(raw_download=True) + + def test_download_as_byte_w_custom_timeout(self): + self._download_as_bytes_helper(raw_download=False, timeout=9.58) + + def _download_as_text_helper(self, raw_download, encoding=None, timeout=None): + blob_name = "blob-name" + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) + media_link = "http://example.com/media/" + properties = {"mediaLink": media_link} + if encoding: + properties["contentEncoding"] = encoding + blob = self._make_one(blob_name, bucket=bucket, properties=properties) + blob._do_download = mock.Mock() + + if timeout is None: + expected_timeout = self._get_default_timeout() + fetched = blob.download_as_text(raw_download=raw_download) + else: + expected_timeout = timeout + fetched = blob.download_as_text(raw_download=raw_download, timeout=timeout) + + self.assertEqual(fetched, "") + + headers = {"accept-encoding": "gzip"} + blob._do_download.assert_called_once_with( + client._http, + mock.ANY, + media_link, + headers, + None, + None, + raw_download, + timeout=expected_timeout, + ) + stream = blob._do_download.mock_calls[0].args[1] + self.assertIsInstance(stream, io.BytesIO) + + def test_download_as_text_w_generation_match(self): + GENERATION_NUMBER = 6 + MEDIA_LINK = "http://example.com/media/" + + client = mock.Mock(spec=["_http"]) + blob = self._make_one( + "blob-name", bucket=_Bucket(client), properties={"mediaLink": MEDIA_LINK} + ) + blob.download_to_file = mock.Mock() + + fetched = blob.download_as_text(if_generation_match=GENERATION_NUMBER) + self.assertEqual(fetched, "") + + blob.download_to_file.assert_called_once_with( + mock.ANY, + client=None, + start=None, + end=None, + raw_download=False, + if_generation_match=GENERATION_NUMBER, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + ) + + def test_download_as_text_wo_raw(self): + self._download_as_text_helper(raw_download=False) + + def test_download_as_text_w_raw(self): + self._download_as_text_helper(raw_download=True) - def test_download_as_string_w_raw(self): - self._download_as_string_helper(raw_download=True) + def test_download_as_text_w_custom_timeout(self): + self._download_as_text_helper(raw_download=False, timeout=9.58) - def test_download_as_string_w_custom_timeout(self): - self._download_as_string_helper(raw_download=False, timeout=9.58) + def test_download_as_text_w_encoding(self): + self._download_as_text_helper(raw_download=False, encoding="utf-8") + + @mock.patch("warnings.warn") + def test_download_as_string(self, mock_warn): + MEDIA_LINK = "http://example.com/media/" + + client = mock.Mock(spec=["_http"]) + blob = self._make_one( + "blob-name", bucket=_Bucket(client), properties={"mediaLink": MEDIA_LINK} + ) + blob.download_to_file = mock.Mock() + + fetched = blob.download_as_string() + self.assertEqual(fetched, b"") + + blob.download_to_file.assert_called_once_with( + mock.ANY, + client=None, + start=None, + end=None, + raw_download=False, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + ) + + mock_warn.assert_called_with( + "Blob.download_as_string() is deprecated and will be removed in future." + "Use Blob.download_as_bytes() instead.", + PendingDeprecationWarning, + stacklevel=1, + ) def test__get_content_type_explicit(self): blob = self._make_one(u"blob-name", bucket=None) From 351413233cba0741fcda08757ad4e2555044e1ad Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 11 Aug 2020 15:34:16 -0700 Subject: [PATCH 08/19] chore(py_library): add some excludes in docs/conf.py (#233) This should fix build failures in python-bigquery. Example: https://github.com/googleapis/python-bigquery/pull/205 Source-Author: Takashi Matsuo Source-Date: Mon Aug 3 15:08:00 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 4f8f5dc24af79694887385015294e4dbb214c352 Source-Link: https://github.com/googleapis/synthtool/commit/4f8f5dc24af79694887385015294e4dbb214c352 Co-authored-by: Tres Seaver --- docs/conf.py | 8 +++++++- synth.metadata | 4 ++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index e25583dc4..e9460a533 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -21,6 +21,7 @@ sys.path.insert(0, os.path.abspath("..")) # For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 sys.path.insert(0, os.path.abspath(".")) __version__ = "" @@ -93,7 +94,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. diff --git a/synth.metadata b/synth.metadata index 7fcae0740..e92268768 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,14 +4,14 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-storage.git", - "sha": "e089b286fcf6bc60cbc6b8639ed20e07c5e7c528" + "sha": "e1f91fcca6c001bc3b0c5f759a7a003fcf60c0a6" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "bfcdbe0da977b2de6c1c0471bb6dc2f1e13bf669" + "sha": "4f8f5dc24af79694887385015294e4dbb214c352" } } ] From acea15b49baaa0acdbeb615d9213d8bb3762e7f7 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Thu, 13 Aug 2020 23:56:22 +0530 Subject: [PATCH 09/19] chore: pin google-cloud-ccore to 1.4.1 (#242) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 872b66c4c..e74f251cc 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-auth >= 1.11.0, < 2.0dev", - "google-cloud-core >= 1.4.0, < 2.0dev", + "google-cloud-core >= 1.4.1, < 2.0dev", "google-resumable-media >= 0.6.0, < 2.0dev", ] extras = {} From 23cfb65c3a3b10759c67846e162e4ed77a3f5307 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Sat, 15 Aug 2020 00:34:00 +0530 Subject: [PATCH 10/19] fix: extract hashes correctly during download (#238) Co-authored-by: Tres Seaver --- google/cloud/storage/blob.py | 2 +- tests/system/test_system.py | 12 ++++++++++++ tests/unit/test_blob.py | 4 ++-- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 1380f41bb..c2ff5790e 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -804,7 +804,7 @@ def _extract_headers_from_download(self, response): digests = {} for encoded_digest in x_goog_hash.split(","): - match = re.match(r"(crc32c|md5)=([\w\d]+)==", encoded_digest) + match = re.match(r"(crc32c|md5)=([\w\d/]+={0,3})", encoded_digest) if match: method, digest = match.groups() digests[method] = digest diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 3fb701d39..d14187baa 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -910,6 +910,18 @@ def test_upload_blob_owner(self): owner = same_blob.owner self.assertIn(user_email, owner["entity"]) + def test_blob_crc32_md5_hash(self): + blob = self.bucket.blob("MyBuffer") + file_contents = b"Hello World" + blob.upload_from_string(file_contents) + self.case_blobs_to_delete.append(blob) + + download_blob = self.bucket.blob("MyBuffer") + + self.assertEqual(download_blob.download_as_string(), file_contents) + self.assertEqual(download_blob.crc32c, blob.crc32c) + self.assertEqual(download_blob.md5_hash, blob.md5_hash) + class TestUnicode(unittest.TestCase): @vpcsc_config.skip_if_inside_vpcsc diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 68011e438..20e20abe4 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -1476,8 +1476,8 @@ def test_download_as_string_w_response_headers(self): self.assertEqual(blob.content_encoding, "gzip") self.assertEqual(blob.cache_control, "max-age=1337;public") self.assertEqual(blob.storage_class, "STANDARD") - self.assertEqual(blob.md5_hash, "CS9tHYTtyFntzj7B9nkkJQ") - self.assertEqual(blob.crc32c, "4gcgLQ") + self.assertEqual(blob.md5_hash, "CS9tHYTtyFntzj7B9nkkJQ==") + self.assertEqual(blob.crc32c, "4gcgLQ==") def test_download_as_string_w_hash_response_header_none(self): blob_name = "blob-name" From cd95200661dc8c94af4f5803245204b03696c7fc Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Tue, 18 Aug 2020 01:17:29 +0530 Subject: [PATCH 11/19] test: add retry for bucket cmek enabled system test (#245) --- tests/system/test_system.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index d14187baa..c881c3da7 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -2037,6 +2037,7 @@ def test_upload_new_blob_w_bucket_cmek_enabled(self): blob = self.bucket.blob(blob_name) blob.upload_from_string(payload) + retry_429_harder(blob.reload)() # We don't know the current version of the key. self.assertTrue(blob.kms_key_name.startswith(kms_key_name)) @@ -2046,7 +2047,7 @@ def test_upload_new_blob_w_bucket_cmek_enabled(self): self.assertEqual(blob.download_as_bytes(), alt_payload) self.bucket.default_kms_key_name = None - self.bucket.patch() + retry_429_harder(self.bucket.patch)() self.assertIsNone(self.bucket.default_kms_key_name) From 413f7b55827c7d1e1d915a96568cd19c4a3868fa Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 24 Aug 2020 17:12:18 -0400 Subject: [PATCH 12/19] tests: replace unsafe 'tempfile.mktemp' usage (#248) Closes #247 --- tests/system/test_system.py | 63 ++++++++++++++++++++----------------- 1 file changed, 34 insertions(+), 29 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index c881c3da7..25b347f83 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -568,12 +568,12 @@ def test_large_encrypted_file_write_from_stream(self): md5_hash = md5_hash.encode("utf-8") self.assertEqual(md5_hash, file_data["hash"]) - temp_filename = tempfile.mktemp() - with open(temp_filename, "wb") as file_obj: - blob.download_to_file(file_obj) + with tempfile.NamedTemporaryFile() as temp_f: + with open(temp_f.name, "wb") as file_obj: + blob.download_to_file(file_obj) - with open(temp_filename, "rb") as file_obj: - md5_temp_hash = _base64_md5hash(file_obj) + with open(temp_f.name, "rb") as file_obj: + md5_temp_hash = _base64_md5hash(file_obj) self.assertEqual(md5_temp_hash, file_data["hash"]) @@ -777,12 +777,14 @@ def test_direct_write_and_read_into_file(self): same_blob = self.bucket.blob("MyBuffer") same_blob.reload() # Initialize properties. - temp_filename = tempfile.mktemp() - with open(temp_filename, "wb") as file_obj: - same_blob.download_to_file(file_obj) - with open(temp_filename, "rb") as file_obj: - stored_contents = file_obj.read() + with tempfile.NamedTemporaryFile() as temp_f: + + with open(temp_f.name, "wb") as file_obj: + same_blob.download_to_file(file_obj) + + with open(temp_f.name, "rb") as file_obj: + stored_contents = file_obj.read() self.assertEqual(file_contents, stored_contents) @@ -796,21 +798,23 @@ def test_download_w_generation_match(self): same_blob = self.bucket.blob("MyBuffer") same_blob.reload() # Initialize properties. - temp_filename = tempfile.mktemp() - with open(temp_filename, "wb") as file_obj: - with self.assertRaises(google.api_core.exceptions.PreconditionFailed): + + with tempfile.NamedTemporaryFile() as temp_f: + + with open(temp_f.name, "wb") as file_obj: + with self.assertRaises(google.api_core.exceptions.PreconditionFailed): + same_blob.download_to_file( + file_obj, if_generation_match=WRONG_GENERATION_NUMBER + ) + same_blob.download_to_file( - file_obj, if_generation_match=WRONG_GENERATION_NUMBER + file_obj, + if_generation_match=blob.generation, + if_metageneration_match=blob.metageneration, ) - same_blob.download_to_file( - file_obj, - if_generation_match=blob.generation, - if_metageneration_match=blob.metageneration, - ) - - with open(temp_filename, "rb") as file_obj: - stored_contents = file_obj.read() + with open(temp_f.name, "rb") as file_obj: + stored_contents = file_obj.read() self.assertEqual(file_contents, stored_contents) @@ -835,14 +839,15 @@ def test_download_blob_w_uri(self): blob.upload_from_string(file_contents) self.case_blobs_to_delete.append(blob) - temp_filename = tempfile.mktemp() - with open(temp_filename, "wb") as file_obj: - Config.CLIENT.download_blob_to_file( - "gs://" + self.bucket.name + "/MyBuffer", file_obj - ) + with tempfile.NamedTemporaryFile() as temp_f: + + with open(temp_f.name, "wb") as file_obj: + Config.CLIENT.download_blob_to_file( + "gs://" + self.bucket.name + "/MyBuffer", file_obj + ) - with open(temp_filename, "rb") as file_obj: - stored_contents = file_obj.read() + with open(temp_f.name, "rb") as file_obj: + stored_contents = file_obj.read() self.assertEqual(file_contents, stored_contents) From 136c0970f8ef7ad4751104e3b8b7dd3204220a67 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Tue, 25 Aug 2020 22:31:08 +0530 Subject: [PATCH 13/19] feat(storage): add support of daysSinceNoncurrentTime and noncurrentTimeBefore (#162) * feat(storage): add support of daysSinceNoncurrentTime and noncurrentTimeBefore * feat(storage): fix code coverage * feat(storage): add custom method to convert datetime to string * feat(storage): remove custom method as server support microsec * feat(storage): change the return type of noncurrent_time_before * feat(storage): change non_current_time type from datetime to date * feat: nit Co-authored-by: Jonathan Lui Co-authored-by: Frank Natividad --- google/cloud/storage/bucket.py | 37 +++++++++++++++++++++++++++++-- tests/system/test_system.py | 17 +++++++++++++-- tests/unit/test_bucket.py | 40 ++++++++++++++++++++++++++++++++++ 3 files changed, 90 insertions(+), 4 deletions(-) diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index ad7eaf6df..a0ef863bb 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -148,7 +148,7 @@ class LifecycleRuleConditions(dict): See: https://cloud.google.com/storage/docs/lifecycle :type age: int - :param age: (Optional) Apply rule action to items whos age, in days, + :param age: (Optional) Apply rule action to items whose age, in days, exceeds this value. :type created_before: datetime.date @@ -170,6 +170,19 @@ class LifecycleRuleConditions(dict): :param number_of_newer_versions: (Optional) Apply rule action to versioned items having N newer versions. + :type days_since_noncurrent_time: int + :param days_since_noncurrent_time: (Optional) Apply rule action to items whose number of days + elapsed since the non current timestamp. This condition + is relevant only for versioned objects. The value of the field + must be a non negative integer. If it's zero, the object version + will become eligible for lifecycle action as soon as it becomes + non current. + + :type noncurrent_time_before: :class:`datetime.date` + :param noncurrent_time_before: (Optional) Date object parsed from RFC3339 valid date, apply + rule action to items whose non current time is before this date. + This condition is relevant only for versioned objects, e.g, 2019-03-16. + :raises ValueError: if no arguments are passed. """ @@ -180,6 +193,8 @@ def __init__( is_live=None, matches_storage_class=None, number_of_newer_versions=None, + days_since_noncurrent_time=None, + noncurrent_time_before=None, _factory=False, ): conditions = {} @@ -202,6 +217,12 @@ def __init__( if not _factory and not conditions: raise ValueError("Supply at least one condition") + if days_since_noncurrent_time is not None: + conditions["daysSinceNoncurrentTime"] = days_since_noncurrent_time + + if noncurrent_time_before is not None: + conditions["noncurrentTimeBefore"] = noncurrent_time_before.isoformat() + super(LifecycleRuleConditions, self).__init__(conditions) @classmethod @@ -245,6 +266,18 @@ def number_of_newer_versions(self): """Conditon's 'number_of_newer_versions' value.""" return self.get("numNewerVersions") + @property + def days_since_noncurrent_time(self): + """Conditon's 'days_since_noncurrent_time' value.""" + return self.get("daysSinceNoncurrentTime") + + @property + def noncurrent_time_before(self): + """Conditon's 'noncurrent_time_before' value.""" + before = self.get("noncurrentTimeBefore") + if before is not None: + return datetime_helpers.from_iso8601_date(before) + class LifecycleRuleDelete(dict): """Map a lifecycle rule deleting matching items. @@ -274,7 +307,7 @@ def from_api_repr(cls, resource): class LifecycleRuleSetStorageClass(dict): - """Map a lifecycle rule upating storage class of matching items. + """Map a lifecycle rule updating storage class of matching items. :type storage_class: str, one of :attr:`Bucket.STORAGE_CLASSES`. :param storage_class: new storage class to assign to matching items. diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 25b347f83..e5ddc648a 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -192,14 +192,22 @@ def test_bucket_create_w_alt_storage_class(self): self.assertEqual(created.storage_class, constants.ARCHIVE_STORAGE_CLASS) def test_lifecycle_rules(self): + import datetime from google.cloud.storage import constants new_bucket_name = "w-lifcycle-rules" + unique_resource_id("-") + noncurrent_before = datetime.date(2018, 8, 1) self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) bucket = Config.CLIENT.bucket(new_bucket_name) - bucket.add_lifecycle_delete_rule(age=42) + bucket.add_lifecycle_delete_rule( + age=42, + number_of_newer_versions=3, + days_since_noncurrent_time=2, + noncurrent_time_before=noncurrent_before, + ) + bucket.add_lifecycle_set_storage_class_rule( constants.COLDLINE_STORAGE_CLASS, is_live=False, @@ -207,7 +215,12 @@ def test_lifecycle_rules(self): ) expected_rules = [ - LifecycleRuleDelete(age=42), + LifecycleRuleDelete( + age=42, + number_of_newer_versions=3, + days_since_noncurrent_time=2, + noncurrent_time_before=noncurrent_before, + ), LifecycleRuleSetStorageClass( constants.COLDLINE_STORAGE_CLASS, is_live=False, diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 3c5f2e68d..2336416c4 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -77,6 +77,7 @@ def test_ctor_w_created_before_and_is_live(self): self.assertEqual(conditions.is_live, False) self.assertIsNone(conditions.matches_storage_class) self.assertIsNone(conditions.number_of_newer_versions) + self.assertIsNone(conditions.noncurrent_time_before) def test_ctor_w_number_of_newer_versions(self): conditions = self._make_one(number_of_newer_versions=3) @@ -88,17 +89,54 @@ def test_ctor_w_number_of_newer_versions(self): self.assertIsNone(conditions.matches_storage_class) self.assertEqual(conditions.number_of_newer_versions, 3) + def test_ctor_w_days_since_noncurrent_time(self): + conditions = self._make_one( + number_of_newer_versions=3, days_since_noncurrent_time=2 + ) + expected = {"numNewerVersions": 3, "daysSinceNoncurrentTime": 2} + self.assertEqual(dict(conditions), expected) + self.assertIsNone(conditions.age) + self.assertIsNone(conditions.created_before) + self.assertIsNone(conditions.is_live) + self.assertIsNone(conditions.matches_storage_class) + self.assertEqual(conditions.number_of_newer_versions, 3) + self.assertEqual(conditions.days_since_noncurrent_time, 2) + + def test_ctor_w_noncurrent_time_before(self): + import datetime + + noncurrent_before = datetime.date(2018, 8, 1) + conditions = self._make_one( + number_of_newer_versions=3, noncurrent_time_before=noncurrent_before + ) + + expected = { + "numNewerVersions": 3, + "noncurrentTimeBefore": noncurrent_before.isoformat(), + } + self.assertEqual(dict(conditions), expected) + self.assertIsNone(conditions.age) + self.assertIsNone(conditions.created_before) + self.assertIsNone(conditions.is_live) + self.assertIsNone(conditions.matches_storage_class) + self.assertEqual(conditions.number_of_newer_versions, 3) + self.assertEqual(conditions.noncurrent_time_before, noncurrent_before) + def test_from_api_repr(self): import datetime + noncurrent_before = datetime.date(2018, 8, 1) before = datetime.date(2018, 8, 1) klass = self._get_target_class() + resource = { "age": 10, "createdBefore": "2018-08-01", "isLive": True, "matchesStorageClass": ["COLDLINE"], "numNewerVersions": 3, + "daysSinceNoncurrentTime": 2, + "noncurrentTimeBefore": noncurrent_before.isoformat(), } conditions = klass.from_api_repr(resource) self.assertEqual(conditions.age, 10) @@ -106,6 +144,8 @@ def test_from_api_repr(self): self.assertEqual(conditions.is_live, True) self.assertEqual(conditions.matches_storage_class, ["COLDLINE"]) self.assertEqual(conditions.number_of_newer_versions, 3) + self.assertEqual(conditions.days_since_noncurrent_time, 2) + self.assertEqual(conditions.noncurrent_time_before, noncurrent_before) class Test_LifecycleRuleDelete(unittest.TestCase): From 3465d08e098edb250dee5e97d1fb9ded8bae5700 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Tue, 25 Aug 2020 23:24:02 +0530 Subject: [PATCH 14/19] fix: change datetime.now to utcnow (#251) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #228 🦕 I think it's related to https://github.com/googleapis/python-storage/issues/244 issue so changed datetime.now() to datetime.utcnow() --- tests/system/test_system.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index e5ddc648a..e92ae3254 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -2368,7 +2368,7 @@ def test_get_signed_policy_v4(self): {"bucket": bucket_name}, ["starts-with", "$Content-Type", "text/pla"], ], - expiration=datetime.datetime.now() + datetime.timedelta(hours=1), + expiration=datetime.datetime.utcnow() + datetime.timedelta(hours=1), fields={"content-type": "text/plain"}, ) with open(blob_name, "r") as f: @@ -2395,7 +2395,7 @@ def test_get_signed_policy_v4_invalid_field(self): {"bucket": bucket_name}, ["starts-with", "$Content-Type", "text/pla"], ], - expiration=datetime.datetime.now() + datetime.timedelta(hours=1), + expiration=datetime.datetime.utcnow() + datetime.timedelta(hours=1), fields={"x-goog-random": "invalid_field", "content-type": "text/plain"}, ) with open(blob_name, "r") as f: From a8de5868f32b45868f178f420138fcd2fe42f5fd Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Wed, 26 Aug 2020 23:11:18 +0530 Subject: [PATCH 15/19] feat: error message return from api (#235) * feat(storage): error message retyrn from api * feat: add comment for clarification * fix: remove unknown error Co-authored-by: Tres Seaver Co-authored-by: Frank Natividad --- google/cloud/storage/blob.py | 8 +++++++- tests/unit/test_blob.py | 12 +++++++++--- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index c2ff5790e..08a86a52d 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -3427,7 +3427,13 @@ def _raise_from_invalid_response(error): to the failed status code """ response = error.response - error_message = str(error) + + # The 'response.text' gives the actual reason of error, where 'error' gives + # the message of expected status code. + if response.text: + error_message = response.text + ": " + str(error) + else: + error_message = str(error) message = u"{method} {url}: {error}".format( method=response.request.method, url=response.request.url, error=error_message diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 20e20abe4..b2589499d 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -4356,7 +4356,7 @@ def _call_fut(error): return _raise_from_invalid_response(error) - def _helper(self, message, code=http_client.BAD_REQUEST, args=()): + def _helper(self, message, code=http_client.BAD_REQUEST, reason=None, args=()): import requests from google.resumable_media import InvalidResponse @@ -4364,6 +4364,7 @@ def _helper(self, message, code=http_client.BAD_REQUEST, args=()): response = requests.Response() response.request = requests.Request("GET", "http://example.com").prepare() + response._content = reason response.status_code = code error = InvalidResponse(response, message, *args) @@ -4381,9 +4382,14 @@ def test_default(self): def test_w_206_and_args(self): message = "Failure" + reason = b"Not available" args = ("one", "two") - exc_info = self._helper(message, code=http_client.PARTIAL_CONTENT, args=args) - expected = "GET http://example.com/: {}".format((message,) + args) + exc_info = self._helper( + message, code=http_client.PARTIAL_CONTENT, reason=reason, args=args + ) + expected = "GET http://example.com/: {}: {}".format( + reason.decode("utf-8"), (message,) + args + ) self.assertEqual(exc_info.exception.message, expected) self.assertEqual(exc_info.exception.errors, []) From 180873de139f7f8e00b7bef423bc15760cf68cc2 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Wed, 26 Aug 2020 23:30:23 +0530 Subject: [PATCH 16/19] feat: add support for 'Blob.custom_time' and lifecycle rules (#199) * feat(storage): add support of custom time metadata and timestamp * feat(storage): change the return type of custom_time_before * feat(storage): add setter method * feat(storage): add test for None value * feat(storage): changes in unittest * feat(storage): change custom_time type to date * feat: change custom_time to datetime * feat: nit Co-authored-by: Jonathan Lui Co-authored-by: Tres Seaver Co-authored-by: Frank Natividad --- google/cloud/storage/blob.py | 34 ++++++++++++++++++++++++++ google/cloud/storage/bucket.py | 32 +++++++++++++++++++++++++ tests/system/test_system.py | 7 +++++- tests/unit/test_blob.py | 44 ++++++++++++++++++++++++++++++++++ tests/unit/test_bucket.py | 40 ++++++++++++++++++++++++++++++- 5 files changed, 155 insertions(+), 2 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 08a86a52d..b8f01f63f 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -52,6 +52,7 @@ from google.api_core.iam import Policy from google.cloud import exceptions from google.cloud._helpers import _bytes_to_unicode +from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import _rfc3339_to_datetime from google.cloud._helpers import _to_bytes from google.cloud.exceptions import NotFound @@ -3348,6 +3349,39 @@ def updated(self): if value is not None: return _rfc3339_to_datetime(value) + @property + def custom_time(self): + """Retrieve the custom time for the object. + + See https://cloud.google.com/storage/docs/json_api/v1/objects + + :rtype: :class:`datetime.datetime` or ``NoneType`` + :returns: Datetime object parsed from RFC3339 valid timestamp, or + ``None`` if the blob's resource has not been loaded from + the server (see :meth:`reload`). + """ + value = self._properties.get("customTime") + if value is not None: + return _rfc3339_to_datetime(value) + + @custom_time.setter + def custom_time(self, value): + """Set the custom time for the object. Once set it can't be unset + and only changed to a custom datetime in the future. If the + custom_time must be unset, you must either perform a rewrite operation + or upload the data again. + + See https://cloud.google.com/storage/docs/json_api/v1/objects + + :type value: :class:`datetime.datetime` + :param value: (Optional) Set the custom time of blob. Datetime object + parsed from RFC3339 valid timestamp. + """ + if value is not None: + value = _datetime_to_rfc3339(value) + + self._properties["customTime"] = value + def _get_encryption_headers(key, source=False): """Builds customer encryption key headers diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index a0ef863bb..e68703fac 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -170,6 +170,18 @@ class LifecycleRuleConditions(dict): :param number_of_newer_versions: (Optional) Apply rule action to versioned items having N newer versions. + :type days_since_custom_time: int + :param days_since_custom_time: (Optional) Apply rule action to items whose number of days + elapsed since the custom timestamp. This condition is relevant + only for versioned objects. The value of the field must be a non + negative integer. If it's zero, the object version will become + eligible for lifecycle action as soon as it becomes custom. + + :type custom_time_before: :class:`datetime.date` + :param custom_time_before: (Optional) Date object parsed from RFC3339 valid date, apply rule action + to items whose custom time is before this date. This condition is relevant + only for versioned objects, e.g., 2019-03-16. + :type days_since_noncurrent_time: int :param days_since_noncurrent_time: (Optional) Apply rule action to items whose number of days elapsed since the non current timestamp. This condition @@ -193,6 +205,8 @@ def __init__( is_live=None, matches_storage_class=None, number_of_newer_versions=None, + days_since_custom_time=None, + custom_time_before=None, days_since_noncurrent_time=None, noncurrent_time_before=None, _factory=False, @@ -214,6 +228,12 @@ def __init__( if number_of_newer_versions is not None: conditions["numNewerVersions"] = number_of_newer_versions + if days_since_custom_time is not None: + conditions["daysSinceCustomTime"] = days_since_custom_time + + if custom_time_before is not None: + conditions["customTimeBefore"] = custom_time_before.isoformat() + if not _factory and not conditions: raise ValueError("Supply at least one condition") @@ -266,6 +286,18 @@ def number_of_newer_versions(self): """Conditon's 'number_of_newer_versions' value.""" return self.get("numNewerVersions") + @property + def days_since_custom_time(self): + """Conditon's 'days_since_custom_time' value.""" + return self.get("daysSinceCustomTime") + + @property + def custom_time_before(self): + """Conditon's 'custom_time_before' value.""" + before = self.get("customTimeBefore") + if before is not None: + return datetime_helpers.from_iso8601_date(before) + @property def days_since_noncurrent_time(self): """Conditon's 'days_since_noncurrent_time' value.""" diff --git a/tests/system/test_system.py b/tests/system/test_system.py index e92ae3254..7d6e79b07 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -196,7 +196,9 @@ def test_lifecycle_rules(self): from google.cloud.storage import constants new_bucket_name = "w-lifcycle-rules" + unique_resource_id("-") + custom_time_before = datetime.date(2018, 8, 1) noncurrent_before = datetime.date(2018, 8, 1) + self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) @@ -204,10 +206,11 @@ def test_lifecycle_rules(self): bucket.add_lifecycle_delete_rule( age=42, number_of_newer_versions=3, + days_since_custom_time=2, + custom_time_before=custom_time_before, days_since_noncurrent_time=2, noncurrent_time_before=noncurrent_before, ) - bucket.add_lifecycle_set_storage_class_rule( constants.COLDLINE_STORAGE_CLASS, is_live=False, @@ -218,6 +221,8 @@ def test_lifecycle_rules(self): LifecycleRuleDelete( age=42, number_of_newer_versions=3, + days_since_custom_time=2, + custom_time_before=custom_time_before, days_since_noncurrent_time=2, noncurrent_time_before=noncurrent_before, ), diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index b2589499d..d4b60a28c 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -157,6 +157,7 @@ def _set_properties_helper(self, kms_key_name=None): "crc32c": CRC32C, "componentCount": COMPONENT_COUNT, "etag": ETAG, + "customTime": NOW, } if kms_key_name is not None: @@ -188,6 +189,7 @@ def _set_properties_helper(self, kms_key_name=None): self.assertEqual(blob.crc32c, CRC32C) self.assertEqual(blob.component_count, COMPONENT_COUNT) self.assertEqual(blob.etag, ETAG) + self.assertEqual(blob.custom_time, now) if kms_key_name is not None: self.assertEqual(blob.kms_key_name, kms_key_name) @@ -4248,6 +4250,48 @@ def test_updated_unset(self): blob = self._make_one("blob-name", bucket=BUCKET) self.assertIsNone(blob.updated) + def test_custom_time_getter(self): + from google.cloud._helpers import _RFC3339_MICROS + from google.cloud._helpers import UTC + + BLOB_NAME = "blob-name" + bucket = _Bucket() + TIMESTAMP = datetime.datetime(2014, 11, 5, 20, 34, 37, tzinfo=UTC) + TIME_CREATED = TIMESTAMP.strftime(_RFC3339_MICROS) + properties = {"customTime": TIME_CREATED} + blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) + self.assertEqual(blob.custom_time, TIMESTAMP) + + def test_custom_time_setter(self): + from google.cloud._helpers import UTC + + BLOB_NAME = "blob-name" + bucket = _Bucket() + TIMESTAMP = datetime.datetime(2014, 11, 5, 20, 34, 37, tzinfo=UTC) + blob = self._make_one(BLOB_NAME, bucket=bucket) + self.assertIsNone(blob.custom_time) + blob.custom_time = TIMESTAMP + self.assertEqual(blob.custom_time, TIMESTAMP) + + def test_custom_time_setter_none_value(self): + from google.cloud._helpers import _RFC3339_MICROS + from google.cloud._helpers import UTC + + BLOB_NAME = "blob-name" + bucket = _Bucket() + TIMESTAMP = datetime.datetime(2014, 11, 5, 20, 34, 37, tzinfo=UTC) + TIME_CREATED = TIMESTAMP.strftime(_RFC3339_MICROS) + properties = {"customTime": TIME_CREATED} + blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) + self.assertEqual(blob.custom_time, TIMESTAMP) + blob.custom_time = None + self.assertIsNone(blob.custom_time) + + def test_custom_time_unset(self): + BUCKET = object() + blob = self._make_one("blob-name", bucket=BUCKET) + self.assertIsNone(blob.custom_time) + def test_from_string_w_valid_uri(self): from google.cloud.storage.blob import Blob diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 2336416c4..38a358da4 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -77,6 +77,8 @@ def test_ctor_w_created_before_and_is_live(self): self.assertEqual(conditions.is_live, False) self.assertIsNone(conditions.matches_storage_class) self.assertIsNone(conditions.number_of_newer_versions) + self.assertIsNone(conditions.days_since_custom_time) + self.assertIsNone(conditions.custom_time_before) self.assertIsNone(conditions.noncurrent_time_before) def test_ctor_w_number_of_newer_versions(self): @@ -89,6 +91,19 @@ def test_ctor_w_number_of_newer_versions(self): self.assertIsNone(conditions.matches_storage_class) self.assertEqual(conditions.number_of_newer_versions, 3) + def test_ctor_w_days_since_custom_time(self): + conditions = self._make_one( + number_of_newer_versions=3, days_since_custom_time=2 + ) + expected = {"numNewerVersions": 3, "daysSinceCustomTime": 2} + self.assertEqual(dict(conditions), expected) + self.assertIsNone(conditions.age) + self.assertIsNone(conditions.created_before) + self.assertIsNone(conditions.is_live) + self.assertIsNone(conditions.matches_storage_class) + self.assertEqual(conditions.number_of_newer_versions, 3) + self.assertEqual(conditions.days_since_custom_time, 2) + def test_ctor_w_days_since_noncurrent_time(self): conditions = self._make_one( number_of_newer_versions=3, days_since_noncurrent_time=2 @@ -102,6 +117,25 @@ def test_ctor_w_days_since_noncurrent_time(self): self.assertEqual(conditions.number_of_newer_versions, 3) self.assertEqual(conditions.days_since_noncurrent_time, 2) + def test_ctor_w_custom_time_before(self): + import datetime + + custom_time_before = datetime.date(2018, 8, 1) + conditions = self._make_one( + number_of_newer_versions=3, custom_time_before=custom_time_before + ) + expected = { + "numNewerVersions": 3, + "customTimeBefore": custom_time_before.isoformat(), + } + self.assertEqual(dict(conditions), expected) + self.assertIsNone(conditions.age) + self.assertIsNone(conditions.created_before) + self.assertIsNone(conditions.is_live) + self.assertIsNone(conditions.matches_storage_class) + self.assertEqual(conditions.number_of_newer_versions, 3) + self.assertEqual(conditions.custom_time_before, custom_time_before) + def test_ctor_w_noncurrent_time_before(self): import datetime @@ -125,16 +159,18 @@ def test_ctor_w_noncurrent_time_before(self): def test_from_api_repr(self): import datetime + custom_time_before = datetime.date(2018, 8, 1) noncurrent_before = datetime.date(2018, 8, 1) before = datetime.date(2018, 8, 1) klass = self._get_target_class() - resource = { "age": 10, "createdBefore": "2018-08-01", "isLive": True, "matchesStorageClass": ["COLDLINE"], "numNewerVersions": 3, + "daysSinceCustomTime": 2, + "customTimeBefore": custom_time_before.isoformat(), "daysSinceNoncurrentTime": 2, "noncurrentTimeBefore": noncurrent_before.isoformat(), } @@ -144,6 +180,8 @@ def test_from_api_repr(self): self.assertEqual(conditions.is_live, True) self.assertEqual(conditions.matches_storage_class, ["COLDLINE"]) self.assertEqual(conditions.number_of_newer_versions, 3) + self.assertEqual(conditions.days_since_custom_time, 2) + self.assertEqual(conditions.custom_time_before, custom_time_before) self.assertEqual(conditions.days_since_noncurrent_time, 2) self.assertEqual(conditions.noncurrent_time_before, noncurrent_before) From 23b7d1c3155deae3c804c510dee3a7cec97cd46c Mon Sep 17 00:00:00 2001 From: Andrew Gorcester Date: Wed, 26 Aug 2020 12:20:49 -0700 Subject: [PATCH 17/19] feat: add configurable checksumming for blob uploads and downloads (#246) Co-authored-by: Tres Seaver Co-authored-by: Frank Natividad --- google/cloud/storage/blob.py | 185 ++++++++++++++++++++++++++++++++++- setup.py | 2 +- tests/system/test_system.py | 89 +++++++++++++++++ tests/unit/test_blob.py | 82 ++++++++++++++-- 4 files changed, 344 insertions(+), 14 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index b8f01f63f..2940b52aa 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -29,6 +29,7 @@ import copy import hashlib from io import BytesIO +import logging import mimetypes import os import re @@ -108,6 +109,11 @@ _READ_LESS_THAN_SIZE = ( "Size {:d} was specified but the file-like object only had " "{:d} bytes remaining." ) +_CHUNKED_DOWNLOAD_CHECKSUM_MESSAGE = ( + "A checksum of type `{}` was requested, but checksumming is not available " + "for downloads when chunk_size is set." +) + _DEFAULT_CHUNKSIZE = 104857600 # 1024 * 1024 B * 100 = 100 MB _MAX_MULTIPART_SIZE = 8388608 # 8 MB @@ -823,6 +829,7 @@ def _do_download( end=None, raw_download=False, timeout=_DEFAULT_TIMEOUT, + checksum="md5", ): """Perform a download without any error handling. @@ -860,6 +867,17 @@ def _do_download( repeated several times using the same timeout each time. Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify the integrity + of the object. The response headers must contain a checksum of the + requested type. If the headers lack an appropriate checksum (for + instance in the case of transcoded or ranged downloads where the + remote service does not know the correct checksum, including + downloads where chunk_size is set) an INFO-level log will be + emitted. Supported values are "md5", "crc32c" and None. The default + is "md5". """ if self.chunk_size is None: if raw_download: @@ -868,12 +886,21 @@ def _do_download( klass = Download download = klass( - download_url, stream=file_obj, headers=headers, start=start, end=end + download_url, + stream=file_obj, + headers=headers, + start=start, + end=end, + checksum=checksum, ) response = download.consume(transport, timeout=timeout) self._extract_headers_from_download(response) else: + if checksum: + msg = _CHUNKED_DOWNLOAD_CHECKSUM_MESSAGE.format(checksum) + logging.info(msg) + if raw_download: klass = RawChunkedDownload else: @@ -903,6 +930,7 @@ def download_to_file( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, + checksum="md5", ): """Download the contents of this blob into a file-like object. @@ -979,6 +1007,17 @@ def download_to_file( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify the integrity + of the object. The response headers must contain a checksum of the + requested type. If the headers lack an appropriate checksum (for + instance in the case of transcoded or ranged downloads where the + remote service does not know the correct checksum, including + downloads where chunk_size is set) an INFO-level log will be + emitted. Supported values are "md5", "crc32c" and None. The default + is "md5". + :raises: :class:`google.cloud.exceptions.NotFound` """ client = self._require_client(client) @@ -1004,6 +1043,7 @@ def download_to_file( end, raw_download, timeout=timeout, + checksum=checksum, ) except resumable_media.InvalidResponse as exc: _raise_from_invalid_response(exc) @@ -1020,6 +1060,7 @@ def download_to_filename( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, + checksum="md5", ): """Download the contents of this blob into a named file. @@ -1072,6 +1113,17 @@ def download_to_filename( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify the integrity + of the object. The response headers must contain a checksum of the + requested type. If the headers lack an appropriate checksum (for + instance in the case of transcoded or ranged downloads where the + remote service does not know the correct checksum, including + downloads where chunk_size is set) an INFO-level log will be + emitted. Supported values are "md5", "crc32c" and None. The default + is "md5". + :raises: :class:`google.cloud.exceptions.NotFound` """ try: @@ -1087,6 +1139,7 @@ def download_to_filename( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, + checksum=checksum, ) except resumable_media.DataCorruption: # Delete the corrupt downloaded file. @@ -1112,6 +1165,7 @@ def download_as_bytes( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, + checksum="md5", ): """Download the contents of this blob as a bytes object. @@ -1161,6 +1215,17 @@ def download_as_bytes( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify the integrity + of the object. The response headers must contain a checksum of the + requested type. If the headers lack an appropriate checksum (for + instance in the case of transcoded or ranged downloads where the + remote service does not know the correct checksum, including + downloads where chunk_size is set) an INFO-level log will be + emitted. Supported values are "md5", "crc32c" and None. The default + is "md5". + :rtype: bytes :returns: The data stored in this blob. @@ -1178,6 +1243,7 @@ def download_as_bytes( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, + checksum=checksum, ) return string_buffer.getvalue() @@ -1453,6 +1519,7 @@ def _do_multipart_upload( if_metageneration_match, if_metageneration_not_match, timeout=_DEFAULT_TIMEOUT, + checksum=None, ): """Perform a multipart upload. @@ -1514,6 +1581,14 @@ def _do_multipart_upload( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify + the integrity of the object. The request metadata will be amended + to include the computed value. Using this option will override a + manually-set checksum value. Supported values are "md5", + "crc32c" and None. The default is None. + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the multipart upload request. @@ -1569,7 +1644,7 @@ def _do_multipart_upload( ) upload_url = _add_query_parameters(base_url, name_value_pairs) - upload = MultipartUpload(upload_url, headers=headers) + upload = MultipartUpload(upload_url, headers=headers, checksum=checksum) if num_retries is not None: upload._retry_strategy = resumable_media.RetryStrategy( @@ -1597,6 +1672,7 @@ def _initiate_resumable_upload( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, + checksum=None, ): """Initiate a resumable upload. @@ -1671,6 +1747,16 @@ def _initiate_resumable_upload( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify + the integrity of the object. After the upload is complete, the + server-computed checksum of the resulting object will be checked + and google.resumable_media.common.DataCorruption will be raised on + a mismatch. On a validation failure, the client will attempt to + delete the uploaded object automatically. Supported values + are "md5", "crc32c" and None. The default is None. + :rtype: tuple :returns: Pair of @@ -1727,7 +1813,9 @@ def _initiate_resumable_upload( ) upload_url = _add_query_parameters(base_url, name_value_pairs) - upload = ResumableUpload(upload_url, chunk_size, headers=headers) + upload = ResumableUpload( + upload_url, chunk_size, headers=headers, checksum=checksum + ) if num_retries is not None: upload._retry_strategy = resumable_media.RetryStrategy( @@ -1759,6 +1847,7 @@ def _do_resumable_upload( if_metageneration_match, if_metageneration_not_match, timeout=_DEFAULT_TIMEOUT, + checksum=None, ): """Perform a resumable upload. @@ -1823,6 +1912,16 @@ def _do_resumable_upload( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify + the integrity of the object. After the upload is complete, the + server-computed checksum of the resulting object will be checked + and google.resumable_media.common.DataCorruption will be raised on + a mismatch. On a validation failure, the client will attempt to + delete the uploaded object automatically. Supported values + are "md5", "crc32c" and None. The default is None. + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the final chunk is uploaded. @@ -1839,10 +1938,16 @@ def _do_resumable_upload( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, + checksum=checksum, ) while not upload.finished: - response = upload.transmit_next_chunk(transport, timeout=timeout) + try: + response = upload.transmit_next_chunk(transport, timeout=timeout) + except resumable_media.DataCorruption: + # Attempt to delete the corrupted object. + self.delete() + raise return response @@ -1859,6 +1964,7 @@ def _do_upload( if_metageneration_match, if_metageneration_not_match, timeout=_DEFAULT_TIMEOUT, + checksum=None, ): """Determine an upload strategy and then perform the upload. @@ -1924,6 +2030,19 @@ def _do_upload( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify + the integrity of the object. If the upload is completed in a single + request, the checksum will be entirely precomputed and the remote + server will handle verification and error handling. If the upload + is too large and must be transmitted in multiple requests, the + checksum will be incrementally computed and the client will handle + verification and error handling, raising + google.resumable_media.common.DataCorruption on a mismatch and + attempting to delete the corrupted file. Supported values are + "md5", "crc32c" and None. The default is None. + :rtype: dict :returns: The parsed JSON from the "200 OK" response. This will be the **only** response in the multipart case and it will be the @@ -1942,6 +2061,7 @@ def _do_upload( if_metageneration_match, if_metageneration_not_match, timeout=timeout, + checksum=checksum, ) else: response = self._do_resumable_upload( @@ -1956,6 +2076,7 @@ def _do_upload( if_metageneration_match, if_metageneration_not_match, timeout=timeout, + checksum=checksum, ) return response.json() @@ -1974,6 +2095,7 @@ def upload_from_file( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, + checksum=None, ): """Upload the contents of this blob from a file-like object. @@ -2068,6 +2190,19 @@ def upload_from_file( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify + the integrity of the object. If the upload is completed in a single + request, the checksum will be entirely precomputed and the remote + server will handle verification and error handling. If the upload + is too large and must be transmitted in multiple requests, the + checksum will be incrementally computed and the client will handle + verification and error handling, raising + google.resumable_media.common.DataCorruption on a mismatch and + attempting to delete the corrupted file. Supported values are + "md5", "crc32c" and None. The default is None. + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the upload response returns an error status. @@ -2094,6 +2229,7 @@ def upload_from_file( if_metageneration_match, if_metageneration_not_match, timeout=timeout, + checksum=checksum, ) self._set_properties(created_json) except resumable_media.InvalidResponse as exc: @@ -2110,6 +2246,7 @@ def upload_from_filename( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, + checksum=None, ): """Upload this blob's contents from the content of a named file. @@ -2176,6 +2313,19 @@ def upload_from_filename( repeated several times using the same timeout each time. Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify + the integrity of the object. If the upload is completed in a single + request, the checksum will be entirely precomputed and the remote + server will handle verification and error handling. If the upload + is too large and must be transmitted in multiple requests, the + checksum will be incrementally computed and the client will handle + verification and error handling, raising + google.resumable_media.common.DataCorruption on a mismatch and + attempting to delete the corrupted file. Supported values are + "md5", "crc32c" and None. The default is None. """ content_type = self._get_content_type(content_type, filename=filename) @@ -2192,6 +2342,7 @@ def upload_from_filename( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, + checksum=checksum, ) def upload_from_string( @@ -2205,6 +2356,7 @@ def upload_from_string( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, + checksum=None, ): """Upload contents of this blob from the provided string. @@ -2266,6 +2418,19 @@ def upload_from_string( repeated several times using the same timeout each time. Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify + the integrity of the object. If the upload is completed in a single + request, the checksum will be entirely precomputed and the remote + server will handle verification and error handling. If the upload + is too large and must be transmitted in multiple requests, the + checksum will be incrementally computed and the client will handle + verification and error handling, raising + google.resumable_media.common.DataCorruption on a mismatch and + attempting to delete the corrupted file. Supported values are + "md5", "crc32c" and None. The default is None. """ data = _to_bytes(data, encoding="utf-8") string_buffer = BytesIO(data) @@ -2289,6 +2454,7 @@ def create_resumable_upload_session( origin=None, client=None, timeout=_DEFAULT_TIMEOUT, + checksum=None, ): """Create a resumable upload session. @@ -2354,6 +2520,16 @@ def create_resumable_upload_session( Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. + :type checksum: str + :param checksum: + (Optional) The type of checksum to compute to verify + the integrity of the object. After the upload is complete, the + server-computed checksum of the resulting object will be checked + and google.resumable_media.common.DataCorruption will be raised on + a mismatch. On a validation failure, the client will attempt to + delete the uploaded object automatically. Supported values + are "md5", "crc32c" and None. The default is None. + :rtype: str :returns: The resumable upload session URL. The upload can be completed by making an HTTP PUT request with the @@ -2383,6 +2559,7 @@ def create_resumable_upload_session( extra_headers=extra_headers, chunk_size=self._CHUNK_SIZE_MULTIPLE, timeout=timeout, + checksum=checksum, ) return upload.resumable_url diff --git a/setup.py b/setup.py index e74f251cc..ce0ebbecf 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,7 @@ dependencies = [ "google-auth >= 1.11.0, < 2.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", - "google-resumable-media >= 0.6.0, < 2.0dev", + "google-resumable-media >= 1.0.0, < 2.0dev", ] extras = {} diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 7d6e79b07..e6636b41d 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -22,6 +22,7 @@ import tempfile import time import unittest +import mock import requests import six @@ -33,6 +34,8 @@ from google.cloud.storage.bucket import LifecycleRuleDelete from google.cloud.storage.bucket import LifecycleRuleSetStorageClass from google.cloud import kms +from google import resumable_media +import google.auth import google.api_core from google.api_core import path_template import google.oauth2 @@ -573,6 +576,37 @@ def test_large_file_write_from_stream(self): md5_hash = md5_hash.encode("utf-8") self.assertEqual(md5_hash, file_data["hash"]) + def test_large_file_write_from_stream_with_checksum(self): + blob = self.bucket.blob("LargeFile") + + file_data = self.FILES["big"] + with open(file_data["path"], "rb") as file_obj: + blob.upload_from_file(file_obj, checksum="crc32c") + self.case_blobs_to_delete.append(blob) + + md5_hash = blob.md5_hash + if not isinstance(md5_hash, six.binary_type): + md5_hash = md5_hash.encode("utf-8") + self.assertEqual(md5_hash, file_data["hash"]) + + def test_large_file_write_from_stream_with_failed_checksum(self): + blob = self.bucket.blob("LargeFile") + + file_data = self.FILES["big"] + + # Intercept the digest processing at the last stage and replace it with garbage. + # This is done with a patch to monkey-patch the resumable media library's checksum + # processing; it does not mock a remote interface like a unit test would. The + # remote API is still exercised. + with open(file_data["path"], "rb") as file_obj: + with mock.patch( + "google.resumable_media._helpers.prepare_checksum_digest", + return_value="FFFFFF==", + ): + with self.assertRaises(resumable_media.DataCorruption): + blob.upload_from_file(file_obj, checksum="crc32c") + self.assertFalse(blob.exists()) + def test_large_encrypted_file_write_from_stream(self): blob = self.bucket.blob("LargeFile", encryption_key=self.ENCRYPTION_KEY) @@ -607,6 +641,32 @@ def test_small_file_write_from_filename(self): md5_hash = md5_hash.encode("utf-8") self.assertEqual(md5_hash, file_data["hash"]) + def test_small_file_write_from_filename_with_checksum(self): + blob = self.bucket.blob("SmallFile") + + file_data = self.FILES["simple"] + blob.upload_from_filename(file_data["path"], checksum="crc32c") + self.case_blobs_to_delete.append(blob) + + md5_hash = blob.md5_hash + if not isinstance(md5_hash, six.binary_type): + md5_hash = md5_hash.encode("utf-8") + self.assertEqual(md5_hash, file_data["hash"]) + + def test_small_file_write_from_filename_with_failed_checksum(self): + blob = self.bucket.blob("SmallFile") + + file_data = self.FILES["simple"] + # Intercept the digest processing at the last stage and replace it with garbage + with mock.patch( + "google.resumable_media._helpers.prepare_checksum_digest", + return_value="FFFFFF==", + ): + with self.assertRaises(google.api_core.exceptions.BadRequest): + blob.upload_from_filename(file_data["path"], checksum="crc32c") + + self.assertFalse(blob.exists()) + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_crud_blob_w_user_project(self): with_user_project = Config.CLIENT.bucket( @@ -836,6 +896,35 @@ def test_download_w_generation_match(self): self.assertEqual(file_contents, stored_contents) + def test_download_w_failed_crc32c_checksum(self): + blob = self.bucket.blob("FailedChecksumBlob") + file_contents = b"Hello World" + blob.upload_from_string(file_contents) + self.case_blobs_to_delete.append(blob) + + with tempfile.NamedTemporaryFile() as temp_f: + # Intercept the digest processing at the last stage and replace it with garbage. + # This is done with a patch to monkey-patch the resumable media library's checksum + # processing; it does not mock a remote interface like a unit test would. The + # remote API is still exercised. + with mock.patch( + "google.resumable_media._helpers.prepare_checksum_digest", + return_value="FFFFFF==", + ): + with self.assertRaises(resumable_media.DataCorruption): + blob.download_to_filename(temp_f.name, checksum="crc32c") + + # Confirm the file was deleted on failure + self.assertFalse(os.path.isfile(temp_f.name)) + + # Now download with checksumming turned off + blob.download_to_filename(temp_f.name, checksum=None) + + with open(temp_f.name, "rb") as file_obj: + stored_contents = file_obj.read() + + self.assertEqual(file_contents, stored_contents) + def test_copy_existing_file(self): filename = self.FILES["logo"]["path"] blob = storage.Blob("CloudLogo", bucket=self.bucket) diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index d4b60a28c..9bf60d42d 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -986,11 +986,21 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): if w_range: patched.assert_called_once_with( - download_url, stream=file_obj, headers=headers, start=1, end=3 + download_url, + stream=file_obj, + headers=headers, + start=1, + end=3, + checksum="md5", ) else: patched.assert_called_once_with( - download_url, stream=file_obj, headers=headers, start=None, end=None + download_url, + stream=file_obj, + headers=headers, + start=None, + end=None, + checksum="md5", ) patched.return_value.consume.assert_called_once_with( @@ -1014,7 +1024,9 @@ def test__do_download_wo_chunks_w_custom_timeout(self): w_range=False, raw_download=False, timeout=9.58 ) - def _do_download_helper_w_chunks(self, w_range, raw_download, timeout=None): + def _do_download_helper_w_chunks( + self, w_range, raw_download, timeout=None, checksum="md5" + ): blob_name = "blob-name" client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) bucket = _Bucket(client) @@ -1057,6 +1069,7 @@ def side_effect(*args, **kwargs): start=1, end=3, raw_download=raw_download, + checksum=checksum, **timeout_kwarg ) else: @@ -1066,6 +1079,7 @@ def side_effect(*args, **kwargs): download_url, headers, raw_download=raw_download, + checksum=checksum, **timeout_kwarg ) @@ -1096,6 +1110,24 @@ def test__do_download_w_chunks_w_range_w_raw(self): def test__do_download_w_chunks_w_custom_timeout(self): self._do_download_helper_w_chunks(w_range=True, raw_download=True, timeout=9.58) + def test__do_download_w_chunks_w_checksum(self): + from google.cloud.storage import blob as blob_module + + with mock.patch("logging.info") as patch: + self._do_download_helper_w_chunks( + w_range=False, raw_download=False, checksum="md5" + ) + patch.assert_called_once_with( + blob_module._CHUNKED_DOWNLOAD_CHECKSUM_MESSAGE.format("md5") + ) + + def test__do_download_w_chunks_wo_checksum(self): + with mock.patch("logging.info") as patch: + self._do_download_helper_w_chunks( + w_range=False, raw_download=False, checksum=None + ) + patch.assert_not_called() + def test_download_to_file_with_failure(self): import requests from google.resumable_media import InvalidResponse @@ -1132,6 +1164,7 @@ def test_download_to_file_with_failure(self): None, False, timeout=self._get_default_timeout(), + checksum="md5", ) def test_download_to_file_wo_media_link(self): @@ -1162,6 +1195,7 @@ def test_download_to_file_wo_media_link(self): None, False, timeout=self._get_default_timeout(), + checksum="md5", ) def test_download_to_file_w_generation_match(self): @@ -1191,6 +1225,7 @@ def test_download_to_file_w_generation_match(self): None, False, timeout=self._get_default_timeout(), + checksum="md5", ) def _download_to_file_helper(self, use_chunks, raw_download, timeout=None): @@ -1228,6 +1263,7 @@ def _download_to_file_helper(self, use_chunks, raw_download, timeout=None): None, raw_download, timeout=expected_timeout, + checksum="md5", ) def test_download_to_file_wo_chunks_wo_raw(self): @@ -1293,6 +1329,7 @@ def _download_to_filename_helper(self, updated, raw_download, timeout=None): None, raw_download, timeout=expected_timeout, + checksum="md5", ) stream = blob._do_download.mock_calls[0].args[1] self.assertEqual(stream.name, temp.name) @@ -1324,6 +1361,7 @@ def test_download_to_filename_w_generation_match(self): None, False, timeout=self._get_default_timeout(), + checksum="md5", ) def test_download_to_filename_w_updated_wo_raw(self): @@ -1381,6 +1419,7 @@ def test_download_to_filename_corrupted(self): None, False, timeout=self._get_default_timeout(), + checksum="md5", ) stream = blob._do_download.mock_calls[0].args[1] self.assertEqual(stream.name, filename) @@ -1415,6 +1454,7 @@ def test_download_to_filename_w_key(self): None, False, timeout=self._get_default_timeout(), + checksum="md5", ) stream = blob._do_download.mock_calls[0].args[1] self.assertEqual(stream.name, temp.name) @@ -1446,6 +1486,7 @@ def _download_as_bytes_helper(self, raw_download, timeout=None): None, raw_download, timeout=expected_timeout, + checksum="md5", ) stream = blob._do_download.mock_calls[0].args[1] self.assertIsInstance(stream, io.BytesIO) @@ -1524,6 +1565,7 @@ def test_download_as_bytes_w_generation_match(self): if_metageneration_match=None, if_metageneration_not_match=None, timeout=self._get_default_timeout(), + checksum="md5", ) def test_download_as_bytes_wo_raw(self): @@ -1565,6 +1607,7 @@ def _download_as_text_helper(self, raw_download, encoding=None, timeout=None): None, raw_download, timeout=expected_timeout, + checksum="md5", ) stream = blob._do_download.mock_calls[0].args[1] self.assertIsInstance(stream, io.BytesIO) @@ -1593,6 +1636,7 @@ def test_download_as_text_w_generation_match(self): if_metageneration_match=None, if_metageneration_not_match=None, timeout=self._get_default_timeout(), + checksum="md5", ) def test_download_as_text_wo_raw(self): @@ -1631,6 +1675,7 @@ def test_download_as_string(self, mock_warn): if_metageneration_match=None, if_metageneration_not_match=None, timeout=self._get_default_timeout(), + checksum="md5", ) mock_warn.assert_called_with( @@ -2132,7 +2177,9 @@ def test__initiate_resumable_upload_with_generation_not_match(self): def test__initiate_resumable_upload_with_predefined_acl(self): self._initiate_resumable_helper(predefined_acl="private") - def _make_resumable_transport(self, headers1, headers2, headers3, total_bytes): + def _make_resumable_transport( + self, headers1, headers2, headers3, total_bytes, data_corruption=False + ): from google import resumable_media fake_transport = mock.Mock(spec=["request"]) @@ -2142,9 +2189,12 @@ def _make_resumable_transport(self, headers1, headers2, headers3, total_bytes): resumable_media.PERMANENT_REDIRECT, headers2 ) json_body = '{{"size": "{:d}"}}'.format(total_bytes) - fake_response3 = self._mock_requests_response( - http_client.OK, headers3, content=json_body.encode("utf-8") - ) + if data_corruption: + fake_response3 = resumable_media.DataCorruption(None) + else: + fake_response3 = self._mock_requests_response( + http_client.OK, headers3, content=json_body.encode("utf-8") + ) responses = [fake_response1, fake_response2, fake_response3] fake_transport.request.side_effect = responses @@ -2255,6 +2305,7 @@ def _do_resumable_helper( if_metageneration_match=None, if_metageneration_not_match=None, timeout=None, + data_corruption=False, ): bucket = _Bucket(name="yesterday") blob = self._make_one(u"blob-name", bucket=bucket) @@ -2274,7 +2325,7 @@ def _do_resumable_helper( headers1 = {"location": resumable_url} headers2 = {"range": "bytes=0-{:d}".format(blob.chunk_size - 1)} transport, responses = self._make_resumable_transport( - headers1, headers2, {}, total_bytes + headers1, headers2, {}, total_bytes, data_corruption=data_corruption ) # Create some mock arguments and call the method under test. @@ -2363,6 +2414,16 @@ def test__do_resumable_upload_with_retry(self): def test__do_resumable_upload_with_predefined_acl(self): self._do_resumable_helper(predefined_acl="private") + def test__do_resumable_upload_with_data_corruption(self): + from google.resumable_media import DataCorruption + + with mock.patch("google.cloud.storage.blob.Blob.delete") as patch: + try: + self._do_resumable_helper(data_corruption=True) + except Exception as e: + self.assertTrue(patch.called) + self.assertIsInstance(e, DataCorruption) + def _do_upload_helper( self, chunk_size=None, @@ -2434,6 +2495,7 @@ def _do_upload_helper( if_metageneration_match, if_metageneration_not_match, timeout=expected_timeout, + checksum=None, ) blob._do_resumable_upload.assert_not_called() else: @@ -2450,6 +2512,7 @@ def _do_upload_helper( if_metageneration_match, if_metageneration_not_match, timeout=expected_timeout, + checksum=None, ) def test__do_upload_uses_multipart(self): @@ -2526,6 +2589,7 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): if_metageneration_match, if_metageneration_not_match, timeout=expected_timeout, + checksum=None, ) return stream @@ -2586,7 +2650,7 @@ def _do_upload_mock_call_helper( self.assertIsNone(pos_args[9]) # if_metageneration_not_match expected_timeout = self._get_default_timeout() if timeout is None else timeout - self.assertEqual(kwargs, {"timeout": expected_timeout}) + self.assertEqual(kwargs, {"timeout": expected_timeout, "checksum": None}) return pos_args[1] From e722376371cb8a3acc46d6c84fb41f4e874f41aa Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 26 Aug 2020 15:42:14 -0400 Subject: [PATCH 18/19] fix: repair mal-formed docstring (#255) Co-authored-by: Frank Natividad --- google/cloud/storage/blob.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 2940b52aa..28b16682d 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -3543,10 +3543,13 @@ def custom_time(self): @custom_time.setter def custom_time(self, value): - """Set the custom time for the object. Once set it can't be unset - and only changed to a custom datetime in the future. If the - custom_time must be unset, you must either perform a rewrite operation - or upload the data again. + """Set the custom time for the object. + + Once set on the server side object, this value can't be unset, but may + only changed to a custom datetime in the future. + + If :attr:`custom_time` must be unset, either perform a rewrite + operation or upload the data again. See https://cloud.google.com/storage/docs/json_api/v1/objects From 22eeb2f66e1372798a07c97aa30236c178e1da1b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 26 Aug 2020 14:26:08 -0700 Subject: [PATCH 19/19] chore: release 1.31.0 (#231) * chore: updated CHANGELOG.md [ci skip] * chore: updated setup.cfg [ci skip] * chore: updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 24 ++++++++++++++++++++++++ setup.py | 2 +- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0e117c6b1..84b9303c8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,30 @@ [1]: https://pypi.org/project/google-cloud-storage/#history +## [1.31.0](https://www.github.com/googleapis/python-storage/compare/v1.30.0...v1.31.0) (2020-08-26) + + +### Features + +* add configurable checksumming for blob uploads and downloads ([#246](https://www.github.com/googleapis/python-storage/issues/246)) ([23b7d1c](https://www.github.com/googleapis/python-storage/commit/23b7d1c3155deae3c804c510dee3a7cec97cd46c)) +* add support for 'Blob.custom_time' and lifecycle rules ([#199](https://www.github.com/googleapis/python-storage/issues/199)) ([180873d](https://www.github.com/googleapis/python-storage/commit/180873de139f7f8e00b7bef423bc15760cf68cc2)) +* error message return from api ([#235](https://www.github.com/googleapis/python-storage/issues/235)) ([a8de586](https://www.github.com/googleapis/python-storage/commit/a8de5868f32b45868f178f420138fcd2fe42f5fd)) +* **storage:** add support of daysSinceNoncurrentTime and noncurrentTimeBefore ([#162](https://www.github.com/googleapis/python-storage/issues/162)) ([136c097](https://www.github.com/googleapis/python-storage/commit/136c0970f8ef7ad4751104e3b8b7dd3204220a67)) +* pass 'client_options' to base class ctor ([#225](https://www.github.com/googleapis/python-storage/issues/225)) ([e1f91fc](https://www.github.com/googleapis/python-storage/commit/e1f91fcca6c001bc3b0c5f759a7a003fcf60c0a6)), closes [#210](https://www.github.com/googleapis/python-storage/issues/210) +* rename 'Blob.download_as_{string,bytes}', add 'Blob.download_as_text' ([#182](https://www.github.com/googleapis/python-storage/issues/182)) ([73107c3](https://www.github.com/googleapis/python-storage/commit/73107c35f23c4a358e957c2b8188300a7fa958fe)) + + +### Bug Fixes + +* change datetime.now to utcnow ([#251](https://www.github.com/googleapis/python-storage/issues/251)) ([3465d08](https://www.github.com/googleapis/python-storage/commit/3465d08e098edb250dee5e97d1fb9ded8bae5700)), closes [#228](https://www.github.com/googleapis/python-storage/issues/228) +* extract hashes correctly during download ([#238](https://www.github.com/googleapis/python-storage/issues/238)) ([23cfb65](https://www.github.com/googleapis/python-storage/commit/23cfb65c3a3b10759c67846e162e4ed77a3f5307)) +* repair mal-formed docstring ([#255](https://www.github.com/googleapis/python-storage/issues/255)) ([e722376](https://www.github.com/googleapis/python-storage/commit/e722376371cb8a3acc46d6c84fb41f4e874f41aa)) + + +### Documentation + +* update docs build (via synth) ([#222](https://www.github.com/googleapis/python-storage/issues/222)) ([4c5adfa](https://www.github.com/googleapis/python-storage/commit/4c5adfa6e05bf018d72ee1a7e99679fd55f2c662)) + ## [1.30.0](https://www.github.com/googleapis/python-storage/compare/v1.29.0...v1.30.0) (2020-07-24) diff --git a/setup.py b/setup.py index ce0ebbecf..fd678eee0 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-storage" description = "Google Cloud Storage API client library" -version = "1.30.0" +version = "1.31.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'