diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 108063d4d..ff5126c18 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 + digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 diff --git a/.github/release-please.yml b/.github/release-please.yml index 4507ad059..466597e5b 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 000000000..d4ca94189 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true diff --git a/.repo-metadata.json b/.repo-metadata.json index 124b40eb9..670aba793 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -2,9 +2,9 @@ "name": "bigquery", "name_pretty": "Google Cloud BigQuery", "product_documentation": "https://cloud.google.com/bigquery", - "client_documentation": "https://googleapis.dev/python/bigquery/latest", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigquery/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "release_level": "ga", + "release_level": "stable", "language": "python", "library_type": "GAPIC_COMBO", "repo": "googleapis/python-bigquery", @@ -12,5 +12,6 @@ "api_id": "bigquery.googleapis.com", "requires_billing": false, "default_version": "v2", - "codeowner_team": "@googleapis/api-bigquery" + "codeowner_team": "@googleapis/api-bigquery", + "api_shortname": "bigquery" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 5ba219d20..6e69fa621 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,18 @@ [1]: https://pypi.org/project/google-cloud-bigquery/#history +## [2.32.0](https://github.com/googleapis/python-bigquery/compare/v2.31.0...v2.32.0) (2022-01-12) + + +### Features + +* support authorized dataset entity ([#1075](https://github.com/googleapis/python-bigquery/issues/1075)) ([c098cd0](https://github.com/googleapis/python-bigquery/commit/c098cd01c755633bfaba7193dd5c044a489a5b61)) + + +### Bug Fixes + +* remove query text from exception message, use `exception.debug_message` instead ([#1105](https://github.com/googleapis/python-bigquery/issues/1105)) ([e23114c](https://github.com/googleapis/python-bigquery/commit/e23114ce362e09ac72f733a640e53a561cc9ce69)) + ## [2.31.0](https://www.github.com/googleapis/python-bigquery/compare/v2.30.1...v2.31.0) (2021-11-24) diff --git a/README.rst b/README.rst index d0ad059a2..bafa06693 100644 --- a/README.rst +++ b/README.rst @@ -52,7 +52,7 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.6, < 3.10 +Python >= 3.6, < 3.11 Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/google/cloud/bigquery/dataset.py b/google/cloud/bigquery/dataset.py index ff015d605..499072de2 100644 --- a/google/cloud/bigquery/dataset.py +++ b/google/cloud/bigquery/dataset.py @@ -77,10 +77,10 @@ def _get_routine_reference(self, routine_id): class AccessEntry(object): """Represents grant of an access role to an entity. - An entry must have exactly one of the allowed :attr:`ENTITY_TYPES`. If - anything but ``view`` or ``routine`` are set, a ``role`` is also required. - ``role`` is omitted for ``view`` and ``routine``, because they are always - read-only. + An entry must have exactly one of the allowed + :class:`google.cloud.bigquery.enums.EntityTypes`. If anything but ``view``, ``routine``, + or ``dataset`` are set, a ``role`` is also required. ``role`` is omitted for ``view``, + ``routine``, ``dataset``, because they are always read-only. See https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets. @@ -88,17 +88,18 @@ class AccessEntry(object): role (str): Role granted to the entity. The following string values are supported: `'READER'`, `'WRITER'`, `'OWNER'`. It may also be - :data:`None` if the ``entity_type`` is ``view`` or ``routine``. + :data:`None` if the ``entity_type`` is ``view``, ``routine``, or ``dataset``. entity_type (str): - Type of entity being granted the role. One of :attr:`ENTITY_TYPES`. + Type of entity being granted the role. See + :class:`google.cloud.bigquery.enums.EntityTypes` for supported types. entity_id (Union[str, Dict[str, str]]): - If the ``entity_type`` is not 'view' or 'routine', the ``entity_id`` - is the ``str`` ID of the entity being granted the role. If the - ``entity_type`` is 'view' or 'routine', the ``entity_id`` is a ``dict`` - representing the view or routine from a different dataset to grant - access to in the following format for views:: + If the ``entity_type`` is not 'view', 'routine', or 'dataset', the + ``entity_id`` is the ``str`` ID of the entity being granted the role. If + the ``entity_type`` is 'view' or 'routine', the ``entity_id`` is a ``dict`` + representing the view or routine from a different dataset to grant access + to in the following format for views:: { 'projectId': string, @@ -114,11 +115,22 @@ class AccessEntry(object): 'routineId': string } + If the ``entity_type`` is 'dataset', the ``entity_id`` is a ``dict`` that includes + a 'dataset' field with a ``dict`` representing the dataset and a 'target_types' + field with a ``str`` value of the dataset's resource type:: + + { + 'dataset': { + 'projectId': string, + 'datasetId': string, + }, + 'target_types: 'VIEWS' + } + Raises: ValueError: - If the ``entity_type`` is not among :attr:`ENTITY_TYPES`, or if a - ``view`` or a ``routine`` has ``role`` set, or a non ``view`` and - non ``routine`` **does not** have a ``role`` set. + If a ``view``, ``routine``, or ``dataset`` has ``role`` set, or a non ``view``, + non ``routine``, and non ``dataset`` **does not** have a ``role`` set. Examples: >>> entry = AccessEntry('OWNER', 'userByEmail', 'user@example.com') @@ -131,27 +143,9 @@ class AccessEntry(object): >>> entry = AccessEntry(None, 'view', view) """ - ENTITY_TYPES = frozenset( - [ - "userByEmail", - "groupByEmail", - "domain", - "specialGroup", - "view", - "iamMember", - "routine", - ] - ) - """Allowed entity types.""" - - def __init__(self, role, entity_type, entity_id): - if entity_type not in self.ENTITY_TYPES: - message = "Entity type %r not among: %s" % ( - entity_type, - ", ".join(self.ENTITY_TYPES), - ) - raise ValueError(message) - if entity_type in ("view", "routine"): + def __init__(self, role=None, entity_type=None, entity_id=None): + self._properties = {} + if entity_type in ("view", "routine", "dataset"): if role is not None: raise ValueError( "Role must be None for a %r. Received " @@ -162,7 +156,6 @@ def __init__(self, role, entity_type, entity_id): raise ValueError( "Role must be set for entity " "type %r" % (entity_type,) ) - self._role = role self._entity_type = entity_type self._entity_id = entity_id @@ -214,7 +207,8 @@ def to_api_repr(self): Returns: Dict[str, object]: Access entry represented as an API resource """ - resource = {self._entity_type: self._entity_id} + resource = copy.deepcopy(self._properties) + resource[self._entity_type] = self._entity_id if self._role is not None: resource["role"] = self._role return resource @@ -241,7 +235,10 @@ def from_api_repr(cls, resource: dict) -> "AccessEntry": entity_type, entity_id = entry.popitem() if len(entry) != 0: raise ValueError("Entry has unexpected keys remaining.", entry) - return cls(role, entity_type, entity_id) + + config = cls(role, entity_type, entity_id) + config._properties = copy.deepcopy(resource) + return config class DatasetReference(object): diff --git a/google/cloud/bigquery/enums.py b/google/cloud/bigquery/enums.py index 0eaaffd2e..7fc0a5fd6 100644 --- a/google/cloud/bigquery/enums.py +++ b/google/cloud/bigquery/enums.py @@ -232,6 +232,19 @@ def _make_sql_scalars_enum(): StandardSqlDataTypes = _make_sql_scalars_enum() +class EntityTypes(str, enum.Enum): + """Enum of allowed entity type names in AccessEntry""" + + USER_BY_EMAIL = "userByEmail" + GROUP_BY_EMAIL = "groupByEmail" + DOMAIN = "domain" + DATASET = "dataset" + SPECIAL_GROUP = "specialGroup" + VIEW = "view" + IAM_MEMBER = "iamMember" + ROUTINE = "routine" + + # See also: https://cloud.google.com/bigquery/data-types#legacy_sql_data_types # and https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types class SqlTypeNames(str, enum.Enum): diff --git a/google/cloud/bigquery/job/query.py b/google/cloud/bigquery/job/query.py index 36e388238..2dd945984 100644 --- a/google/cloud/bigquery/job/query.py +++ b/google/cloud/bigquery/job/query.py @@ -66,6 +66,7 @@ _CONTAINS_ORDER_BY = re.compile(r"ORDER\s+BY", re.IGNORECASE) +_EXCEPTION_FOOTER_TEMPLATE = "{message}\n\nLocation: {location}\nJob ID: {job_id}\n" _TIMEOUT_BUFFER_SECS = 0.1 @@ -1196,17 +1197,17 @@ def _blocking_poll(self, timeout=None, **kwargs): super(QueryJob, self)._blocking_poll(timeout=timeout, **kwargs) @staticmethod - def _format_for_exception(query, job_id): + def _format_for_exception(message: str, query: str): """Format a query for the output in exception message. Args: + message (str): The original exception message. query (str): The SQL query to format. - job_id (str): The ID of the job that ran the query. Returns: str: A formatted query text. """ - template = "\n\n(job ID: {job_id})\n\n{header}\n\n{ruler}\n{body}\n{ruler}" + template = "{message}\n\n{header}\n\n{ruler}\n{body}\n{ruler}" lines = query.splitlines() max_line_len = max(len(line) for line in lines) @@ -1223,7 +1224,7 @@ def _format_for_exception(query, job_id): "{:4}:{}".format(n, line) for n, line in enumerate(lines, start=1) ) - return template.format(job_id=job_id, header=header, ruler=ruler, body=body) + return template.format(message=message, header=header, ruler=ruler, body=body) def _begin(self, client=None, retry=DEFAULT_RETRY, timeout=None): """API call: begin the job via a POST request @@ -1248,7 +1249,10 @@ def _begin(self, client=None, retry=DEFAULT_RETRY, timeout=None): try: super(QueryJob, self)._begin(client=client, retry=retry, timeout=timeout) except exceptions.GoogleAPICallError as exc: - exc.message += self._format_for_exception(self.query, self.job_id) + exc.message = _EXCEPTION_FOOTER_TEMPLATE.format( + message=exc.message, location=self.location, job_id=self.job_id + ) + exc.debug_message = self._format_for_exception(exc.message, self.query) exc.query_job = self raise @@ -1447,7 +1451,10 @@ def do_get_result(): do_get_result() except exceptions.GoogleAPICallError as exc: - exc.message += self._format_for_exception(self.query, self.job_id) + exc.message = _EXCEPTION_FOOTER_TEMPLATE.format( + message=exc.message, location=self.location, job_id=self.job_id + ) + exc.debug_message = self._format_for_exception(exc.message, self.query) # type: ignore exc.query_job = self # type: ignore raise except requests.exceptions.Timeout as exc: diff --git a/google/cloud/bigquery/version.py b/google/cloud/bigquery/version.py index 6329658af..b8c5af9a2 100644 --- a/google/cloud/bigquery/version.py +++ b/google/cloud/bigquery/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.31.0" +__version__ = "2.32.0" diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md index 55c97b32f..8249522ff 100644 --- a/samples/AUTHORING_GUIDE.md +++ b/samples/AUTHORING_GUIDE.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md index 34c882b6f..f5fe2e6ba 100644 --- a/samples/CONTRIBUTING.md +++ b/samples/CONTRIBUTING.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md \ No newline at end of file diff --git a/samples/geography/noxfile.py b/samples/geography/noxfile.py index 93a9122cc..3bbef5d54 100644 --- a/samples/geography/noxfile.py +++ b/samples/geography/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/geography/requirements.txt b/samples/geography/requirements.txt index e2de86673..ca7e38f84 100644 --- a/samples/geography/requirements.txt +++ b/samples/geography/requirements.txt @@ -1,49 +1,47 @@ -attrs==21.2.0 -cachetools==4.2.4 -certifi==2021.5.30 -cffi==1.14.6 -charset-normalizer==2.0.6 -click==8.0.1 +attrs==21.4.0 +certifi==2021.10.8 +cffi==1.15.0 +charset-normalizer==2.0.10 +click==8.0.3 click-plugins==1.1.1 cligj==0.7.2 dataclasses==0.6; python_version < '3.7' Fiona==1.8.20 geojson==2.5.0 -geopandas==0.9.0 -google-api-core==2.0.1 -google-auth==2.2.1 -google-cloud-bigquery==2.27.1 -google-cloud-bigquery-storage==2.9.0 -google-cloud-core==2.0.0 -google-crc32c==1.2.0 -google-resumable-media==2.0.3 -googleapis-common-protos==1.53.0 -grpcio==1.41.0 -idna==3.2 -importlib-metadata==4.8.1 -libcst==0.3.21 +geopandas==0.9.0; python_version < '3.7' +geopandas==0.10.2; python_version >= '3.7' +google-api-core==2.3.2 +google-auth==2.3.3 +google-cloud-bigquery==2.31.0 +google-cloud-bigquery-storage==2.10.1 +google-cloud-core==2.2.1 +google-crc32c==1.3.0 +google-resumable-media==2.1.0 +googleapis-common-protos==1.54.0 +grpcio==1.43.0 +idna==3.3 +libcst==0.3.23 munch==2.5.0 mypy-extensions==0.4.3 -packaging==21.0 +packaging==21.3 pandas==1.1.5; python_version < '3.7' pandas==1.3.4; python_version >= '3.7' -proto-plus==1.19.2 -protobuf==3.18.0 -pyarrow==6.0.0 +proto-plus==1.19.8 +protobuf==3.19.1 +pyarrow==6.0.1 pyasn1==0.4.8 pyasn1-modules==0.2.8 -pycparser==2.20 -pyparsing==2.4.7 +pycparser==2.21 +pyparsing==3.0.6 pyproj==3.0.1; python_version < "3.7" pyproj==3.1.0; python_version > "3.6" python-dateutil==2.8.2 -pytz==2021.1 -PyYAML==5.4.1 -requests==2.26.0 -rsa==4.7.2 +pytz==2021.3 +PyYAML==6.0 +requests==2.27.1 +rsa==4.8 Shapely==1.8.0 six==1.16.0 -typing-extensions==3.10.0.2 +typing-extensions==4.0.1 typing-inspect==0.7.1 urllib3==1.26.7 -zipp==3.6.0 diff --git a/samples/magics/noxfile.py b/samples/magics/noxfile.py index 93a9122cc..3bbef5d54 100644 --- a/samples/magics/noxfile.py +++ b/samples/magics/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/magics/requirements-test.txt b/samples/magics/requirements-test.txt index caa48813a..5b73f1fd5 100644 --- a/samples/magics/requirements-test.txt +++ b/samples/magics/requirements-test.txt @@ -1,3 +1,3 @@ -google-cloud-testutils==1.1.0 +google-cloud-testutils==1.3.1 pytest==6.2.5 mock==4.0.3 diff --git a/samples/magics/requirements.txt b/samples/magics/requirements.txt index 5cc7ec33f..f4337e8fb 100644 --- a/samples/magics/requirements.txt +++ b/samples/magics/requirements.txt @@ -1,11 +1,11 @@ -google-cloud-bigquery-storage==2.9.0 +google-cloud-bigquery-storage==2.10.1 google-auth-oauthlib==0.4.6 -grpcio==1.41.0 +grpcio==1.43.0 ipython==7.16.1; python_version < '3.7' ipython==7.29.0; python_version >= '3.7' matplotlib==3.3.4; python_version < '3.7' matplotlib==3.5.0rc1; python_version >= '3.7' pandas==1.1.5; python_version < '3.7' pandas==1.3.4; python_version >= '3.7' -pyarrow==6.0.0 -pytz==2021.1 +pyarrow==6.0.1 +pytz==2021.3 diff --git a/samples/snippets/README.rst b/samples/snippets/README.rst index 7c3e19e68..05af1e812 100644 --- a/samples/snippets/README.rst +++ b/samples/snippets/README.rst @@ -1,4 +1,3 @@ - .. This file is automatically generated. Do not edit this file directly. Google BigQuery Python Samples @@ -16,11 +15,14 @@ This directory contains samples for Google BigQuery. `Google BigQuery`_ is Googl .. _Google BigQuery: https://cloud.google.com/bigquery/docs +To run the sample, you need to have the `BigQuery Admin` role. + + + Setup ------------------------------------------------------------------------------- - Authentication ++++++++++++++ @@ -31,9 +33,6 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started - - - Install Dependencies ++++++++++++++++++++ @@ -64,15 +63,9 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ - - - - - Samples ------------------------------------------------------------------------------- - Quickstart +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -89,8 +82,6 @@ To run this sample: $ python quickstart.py - - Simple Application +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -107,8 +98,6 @@ To run this sample: $ python simple_app.py - - User Credentials +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -124,7 +113,6 @@ To run this sample: $ python user_credentials.py - usage: user_credentials.py [-h] [--launch-browser] project Command-line application to run a query using user credentials. @@ -143,10 +131,6 @@ To run this sample: - - - - The client library ------------------------------------------------------------------------------- @@ -162,5 +146,4 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/samples/snippets/authorized_view_tutorial.py b/samples/snippets/authorized_view_tutorial.py index b6a20c6ec..66810c036 100644 --- a/samples/snippets/authorized_view_tutorial.py +++ b/samples/snippets/authorized_view_tutorial.py @@ -24,6 +24,7 @@ def run_authorized_view_tutorial(override_values={}): # Create a source dataset # [START bigquery_avt_create_source_dataset] from google.cloud import bigquery + from google.cloud.bigquery.enums import EntityTypes client = bigquery.Client() source_dataset_id = "github_source_data" @@ -106,7 +107,7 @@ def run_authorized_view_tutorial(override_values={}): # analyst_group_email = 'data_analysts@example.com' access_entries = shared_dataset.access_entries access_entries.append( - bigquery.AccessEntry("READER", "groupByEmail", analyst_group_email) + bigquery.AccessEntry("READER", EntityTypes.GROUP_BY_EMAIL, analyst_group_email) ) shared_dataset.access_entries = access_entries shared_dataset = client.update_dataset( @@ -118,7 +119,7 @@ def run_authorized_view_tutorial(override_values={}): # [START bigquery_avt_source_dataset_access] access_entries = source_dataset.access_entries access_entries.append( - bigquery.AccessEntry(None, "view", view.reference.to_api_repr()) + bigquery.AccessEntry(None, EntityTypes.VIEW, view.reference.to_api_repr()) ) source_dataset.access_entries = access_entries source_dataset = client.update_dataset( diff --git a/samples/snippets/conftest.py b/samples/snippets/conftest.py index 74984f902..e8aa08487 100644 --- a/samples/snippets/conftest.py +++ b/samples/snippets/conftest.py @@ -50,6 +50,11 @@ def dataset_id(bigquery_client: bigquery.Client, project_id: str): bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True) +@pytest.fixture(scope="session") +def entity_id(bigquery_client: bigquery.Client, dataset_id: str): + return "cloud-developer-relations@google.com" + + @pytest.fixture(scope="session") def dataset_id_us_east1(bigquery_client: bigquery.Client, project_id: str): dataset_id = prefixer.create_prefix() diff --git a/samples/snippets/dataset_access_test.py b/samples/snippets/dataset_access_test.py new file mode 100644 index 000000000..21776c149 --- /dev/null +++ b/samples/snippets/dataset_access_test.py @@ -0,0 +1,48 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import revoke_dataset_access +import update_dataset_access + + +def test_dataset_access_permissions(capsys, dataset_id, entity_id, bigquery_client): + original_dataset = bigquery_client.get_dataset(dataset_id) + update_dataset_access.update_dataset_access(dataset_id, entity_id) + full_dataset_id = "{}.{}".format( + original_dataset.project, original_dataset.dataset_id + ) + + out, err = capsys.readouterr() + assert ( + "Updated dataset '{}' with modified user permissions.".format(full_dataset_id) + in out + ) + + updated_dataset = bigquery_client.get_dataset(dataset_id) + updated_dataset_entries = list(updated_dataset.access_entries) + updated_dataset_entity_ids = {entry.entity_id for entry in updated_dataset_entries} + assert entity_id in updated_dataset_entity_ids + revoke_dataset_access.revoke_dataset_access(dataset_id, entity_id) + revoked_dataset = bigquery_client.get_dataset(dataset_id) + revoked_dataset_entries = list(revoked_dataset.access_entries) + + full_dataset_id = f"{updated_dataset.project}.{updated_dataset.dataset_id}" + out, err = capsys.readouterr() + assert ( + f"Revoked dataset access for '{entity_id}' to ' dataset '{full_dataset_id}.'" + in out + ) + assert len(revoked_dataset_entries) == len(updated_dataset_entries) - 1 + revoked_dataset_entity_ids = {entry.entity_id for entry in revoked_dataset_entries} + assert entity_id not in revoked_dataset_entity_ids diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 93a9122cc..3bbef5d54 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index caa48813a..5b73f1fd5 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,3 +1,3 @@ -google-cloud-testutils==1.1.0 +google-cloud-testutils==1.3.1 pytest==6.2.5 mock==4.0.3 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index f79552392..bef333720 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,11 +1,11 @@ -google-cloud-bigquery-storage==2.9.0 +google-cloud-bigquery-storage==2.10.1 google-auth-oauthlib==0.4.6 -grpcio==1.41.0 +grpcio==1.43.0 ipython==7.16.1; python_version < '3.7' ipython==7.29.0; python_version >= '3.7' matplotlib==3.3.4; python_version < '3.7' matplotlib==3.4.1; python_version >= '3.7' pandas==1.1.5; python_version < '3.7' pandas==1.3.4; python_version >= '3.7' -pyarrow==6.0.0 -pytz==2021.1 +pyarrow==6.0.1 +pytz==2021.3 diff --git a/samples/snippets/revoke_dataset_access.py b/samples/snippets/revoke_dataset_access.py new file mode 100644 index 000000000..ce78f5750 --- /dev/null +++ b/samples/snippets/revoke_dataset_access.py @@ -0,0 +1,52 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def revoke_dataset_access(dataset_id: str, entity_id: str): + original_dataset_id = dataset_id + original_entity_id = entity_id + + # [START bigquery_revoke_dataset_access] + + # TODO(developer): Set dataset_id to the ID of the dataset to fetch. + dataset_id = "your-project.your_dataset" + + # TODO(developer): Set entity_id to the ID of the email or group from whom you are revoking access. + entity_id = "user-or-group-to-remove@example.com" + # [END bigquery_revoke_dataset_access] + dataset_id = original_dataset_id + entity_id = original_entity_id + # [START bigquery_revoke_dataset_access] + + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() + + dataset = client.get_dataset(dataset_id) # Make an API request. + + entries = list(dataset.access_entries) + dataset.access_entries = [ + entry for entry in entries if entry.entity_id != entity_id + ] + + dataset = client.update_dataset( + dataset, + # Update just the `access_entries` property of the dataset. + ["access_entries"], + ) # Make an API request. + + full_dataset_id = f"{dataset.project}.{dataset.dataset_id}" + print(f"Revoked dataset access for '{entity_id}' to ' dataset '{full_dataset_id}.'") + # [END bigquery_revoke_dataset_access] diff --git a/samples/snippets/update_dataset_access.py b/samples/snippets/update_dataset_access.py new file mode 100644 index 000000000..1448213a6 --- /dev/null +++ b/samples/snippets/update_dataset_access.py @@ -0,0 +1,72 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def update_dataset_access(dataset_id: str, entity_id: str): + original_dataset_id = dataset_id + original_entity_id = entity_id + + # [START bigquery_update_dataset_access] + + # TODO(developer): Set dataset_id to the ID of the dataset to fetch. + dataset_id = "your-project.your_dataset" + + # TODO(developer): Set entity_id to the ID of the email or group from whom + # you are adding access. Alternatively, to the JSON REST API representation + # of the entity, such as a view's table reference. + entity_id = "user-or-group-to-add@example.com" + + from google.cloud.bigquery.enums import EntityTypes + + # TODO(developer): Set entity_type to the type of entity you are granting access to. + # Common types include: + # + # * "userByEmail" -- A single user or service account. For example "fred@example.com" + # * "groupByEmail" -- A group of users. For example "example@googlegroups.com" + # * "view" -- An authorized view. For example + # {"projectId": "p", "datasetId": "d", "tableId": "v"} + # + # For a complete reference, see the REST API reference documentation: + # https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#Dataset.FIELDS.access + entity_type = EntityTypes.GROUP_BY_EMAIL + + # TODO(developer): Set role to a one of the "Basic roles for datasets" + # described here: + # https://cloud.google.com/bigquery/docs/access-control-basic-roles#dataset-basic-roles + role = "READER" + # [END bigquery_update_dataset_access] + dataset_id = original_dataset_id + entity_id = original_entity_id + # [START bigquery_update_dataset_access] + + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() + + dataset = client.get_dataset(dataset_id) # Make an API request. + + entries = list(dataset.access_entries) + entries.append( + bigquery.AccessEntry(role=role, entity_type=entity_type, entity_id=entity_id,) + ) + dataset.access_entries = entries + + dataset = client.update_dataset(dataset, ["access_entries"]) # Make an API request. + + full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) + print( + "Updated dataset '{}' with modified user permissions.".format(full_dataset_id) + ) + # [END bigquery_update_dataset_access] diff --git a/samples/tests/test_copy_table.py b/samples/tests/test_copy_table.py index 0b95c5443..726410e86 100644 --- a/samples/tests/test_copy_table.py +++ b/samples/tests/test_copy_table.py @@ -12,10 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + from .. import copy_table def test_copy_table(capsys, table_with_data_id, random_table_id, client): + pytest.skip("b/210907595: copy fails for shakespeare table") copy_table.copy_table(table_with_data_id, random_table_id) out, err = capsys.readouterr() diff --git a/samples/tests/test_copy_table_cmek.py b/samples/tests/test_copy_table_cmek.py index ac04675c9..63163d563 100644 --- a/samples/tests/test_copy_table_cmek.py +++ b/samples/tests/test_copy_table_cmek.py @@ -12,10 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + from .. import copy_table_cmek def test_copy_table_cmek(capsys, random_table_id, table_with_data_id, kms_key_name): + pytest.skip("b/210907595: copy fails for shakespeare table") copy_table_cmek.copy_table_cmek(random_table_id, table_with_data_id, kms_key_name) out, err = capsys.readouterr() diff --git a/tests/system/test_client.py b/tests/system/test_client.py index 91bcff155..077bb2886 100644 --- a/tests/system/test_client.py +++ b/tests/system/test_client.py @@ -1207,6 +1207,8 @@ def test_extract_table(self): self.assertIn("Bharney Rhubble", got) def test_copy_table(self): + pytest.skip("b/210907595: copy fails for shakespeare table") + # If we create a new table to copy from, the test won't work # because the new rows will be stored in the streaming buffer, # and copy jobs don't read the streaming buffer. @@ -1723,7 +1725,7 @@ def test_dbapi_connection_does_not_leak_sockets(self): connection.close() conn_count_end = len(current_process.connections()) - self.assertEqual(conn_count_end, conn_count_start) + self.assertLessEqual(conn_count_end, conn_count_start) def _load_table_for_dml(self, rows, dataset_id, table_id): from google.cloud._testing import _NamedTemporaryFile diff --git a/tests/system/test_pandas.py b/tests/system/test_pandas.py index 1541dd3b9..f3534cd19 100644 --- a/tests/system/test_pandas.py +++ b/tests/system/test_pandas.py @@ -268,7 +268,7 @@ def test_load_table_from_dataframe_w_nulls(bigquery_client, dataset_id): See: https://github.com/googleapis/google-cloud-python/issues/7370 """ # Schema with all scalar types. - scalars_schema = ( + table_schema = ( bigquery.SchemaField("bool_col", "BOOLEAN"), bigquery.SchemaField("bytes_col", "BYTES"), bigquery.SchemaField("date_col", "DATE"), @@ -283,15 +283,6 @@ def test_load_table_from_dataframe_w_nulls(bigquery_client, dataset_id): bigquery.SchemaField("ts_col", "TIMESTAMP"), ) - table_schema = scalars_schema + ( - # TODO: Array columns can't be read due to NULLABLE versus REPEATED - # mode mismatch. See: - # https://issuetracker.google.com/133415569#comment3 - # bigquery.SchemaField("array_col", "INTEGER", mode="REPEATED"), - # TODO: Support writing StructArrays to Parquet. See: - # https://jira.apache.org/jira/browse/ARROW-2587 - # bigquery.SchemaField("struct_col", "RECORD", fields=scalars_schema), - ) num_rows = 100 nulls = [None] * num_rows df_data = [ @@ -372,7 +363,8 @@ def test_load_table_from_dataframe_w_explicit_schema(bigquery_client, dataset_id # See: # https://github.com/googleapis/python-bigquery/issues/61 # https://issuetracker.google.com/issues/151765076 - scalars_schema = ( + table_schema = ( + bigquery.SchemaField("row_num", "INTEGER"), bigquery.SchemaField("bool_col", "BOOLEAN"), bigquery.SchemaField("bytes_col", "BYTES"), bigquery.SchemaField("date_col", "DATE"), @@ -387,17 +379,8 @@ def test_load_table_from_dataframe_w_explicit_schema(bigquery_client, dataset_id bigquery.SchemaField("ts_col", "TIMESTAMP"), ) - table_schema = scalars_schema + ( - # TODO: Array columns can't be read due to NULLABLE versus REPEATED - # mode mismatch. See: - # https://issuetracker.google.com/133415569#comment3 - # bigquery.SchemaField("array_col", "INTEGER", mode="REPEATED"), - # TODO: Support writing StructArrays to Parquet. See: - # https://jira.apache.org/jira/browse/ARROW-2587 - # bigquery.SchemaField("struct_col", "RECORD", fields=scalars_schema), - ) - df_data = [ + ("row_num", [1, 2, 3]), ("bool_col", [True, None, False]), ("bytes_col", [b"abc", None, b"def"]), ("date_col", [datetime.date(1, 1, 1), None, datetime.date(9999, 12, 31)]), @@ -464,6 +447,22 @@ def test_load_table_from_dataframe_w_explicit_schema(bigquery_client, dataset_id assert tuple(table.schema) == table_schema assert table.num_rows == 3 + result = bigquery_client.list_rows(table).to_dataframe() + result.sort_values("row_num", inplace=True) + + # Check that extreme DATE/DATETIME values are loaded correctly. + # https://github.com/googleapis/python-bigquery/issues/1076 + assert result["date_col"][0] == datetime.date(1, 1, 1) + assert result["date_col"][2] == datetime.date(9999, 12, 31) + assert result["dt_col"][0] == datetime.datetime(1, 1, 1, 0, 0, 0) + assert result["dt_col"][2] == datetime.datetime(9999, 12, 31, 23, 59, 59, 999999) + assert result["ts_col"][0] == datetime.datetime( + 1, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc + ) + assert result["ts_col"][2] == datetime.datetime( + 9999, 12, 31, 23, 59, 59, 999999, tzinfo=datetime.timezone.utc + ) + def test_load_table_from_dataframe_w_struct_datatype(bigquery_client, dataset_id): """Test that a DataFrame with struct datatype can be uploaded if a diff --git a/tests/unit/job/test_query.py b/tests/unit/job/test_query.py index 4da035b78..5fb76b9e9 100644 --- a/tests/unit/job/test_query.py +++ b/tests/unit/job/test_query.py @@ -1360,13 +1360,19 @@ def test_result_error(self): exc_job_instance = getattr(exc_info.exception, "query_job", None) self.assertIs(exc_job_instance, job) + # Query text could contain sensitive information, so it must not be + # included in logs / exception representation. full_text = str(exc_info.exception) assert job.job_id in full_text - assert "Query Job SQL Follows" in full_text + assert "Query Job SQL Follows" not in full_text + # It is useful to have query text available, so it is provided in a + # debug_message property. + debug_message = exc_info.exception.debug_message + assert "Query Job SQL Follows" in debug_message for i, line in enumerate(query.splitlines(), start=1): expected_line = "{}:{}".format(i, line) - assert expected_line in full_text + assert expected_line in debug_message def test_result_transport_timeout_error(self): query = textwrap.dedent( @@ -1452,13 +1458,19 @@ def test__begin_error(self): exc_job_instance = getattr(exc_info.exception, "query_job", None) self.assertIs(exc_job_instance, job) + # Query text could contain sensitive information, so it must not be + # included in logs / exception representation. full_text = str(exc_info.exception) assert job.job_id in full_text - assert "Query Job SQL Follows" in full_text + assert "Query Job SQL Follows" not in full_text + # It is useful to have query text available, so it is provided in a + # debug_message property. + debug_message = exc_info.exception.debug_message + assert "Query Job SQL Follows" in debug_message for i, line in enumerate(query.splitlines(), start=1): expected_line = "{}:{}".format(i, line) - assert expected_line in full_text + assert expected_line in debug_message def test__begin_w_timeout(self): PATH = "/projects/%s/jobs" % (self.PROJECT,) diff --git a/tests/unit/test_dataset.py b/tests/unit/test_dataset.py index b3a53a08d..c554782bf 100644 --- a/tests/unit/test_dataset.py +++ b/tests/unit/test_dataset.py @@ -141,6 +141,28 @@ def test_to_api_repr_routine(self): exp_resource = {"routine": routine} self.assertEqual(resource, exp_resource) + def test_to_api_repr_dataset(self): + dataset = { + "dataset": {"projectId": "my-project", "datasetId": "my_dataset"}, + "target_types": "VIEWS", + } + entry = self._make_one(None, "dataset", dataset) + resource = entry.to_api_repr() + exp_resource = {"dataset": dataset} + self.assertEqual(resource, exp_resource) + + def test_to_api_w_incorrect_role(self): + dataset = { + "dataset": { + "projectId": "my-project", + "datasetId": "my_dataset", + "tableId": "my_table", + }, + "target_type": "VIEW", + } + with self.assertRaises(ValueError): + self._make_one("READER", "dataset", dataset) + def test_from_api_repr(self): resource = {"role": "OWNER", "userByEmail": "salmon@example.com"} entry = self._get_target_class().from_api_repr(resource) @@ -150,8 +172,22 @@ def test_from_api_repr(self): def test_from_api_repr_w_unknown_entity_type(self): resource = {"role": "READER", "unknown": "UNKNOWN"} - with self.assertRaises(ValueError): - self._get_target_class().from_api_repr(resource) + entry = self._get_target_class().from_api_repr(resource) + self.assertEqual(entry.role, "READER") + self.assertEqual(entry.entity_type, "unknown") + self.assertEqual(entry.entity_id, "UNKNOWN") + exp_resource = entry.to_api_repr() + self.assertEqual(resource, exp_resource) + + def test_to_api_repr_w_extra_properties(self): + resource = { + "role": "READER", + "userByEmail": "salmon@example.com", + } + entry = self._get_target_class().from_api_repr(resource) + entry._properties["specialGroup"] = resource["specialGroup"] = "projectReaders" + exp_resource = entry.to_api_repr() + self.assertEqual(resource, exp_resource) def test_from_api_repr_entries_w_extra_keys(self): resource = {