diff --git a/google/cloud/logging_v2/_gapic.py b/google/cloud/logging_v2/_gapic.py index 7a6d70650..3661d3d09 100644 --- a/google/cloud/logging_v2/_gapic.py +++ b/google/cloud/logging_v2/_gapic.py @@ -49,10 +49,11 @@ def list_entries( *, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entry resources. + """Return a generator of log entry resources. Args: resource_names (Sequence[str]): Names of one or more parent resources @@ -69,14 +70,16 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ # full resource names are expected by the API resource_names = resource_names @@ -89,19 +92,27 @@ def list_entries( ) response = self._gapic_api.list_log_entries(request=request) - page_iter = iter(response) + log_iter = iter(response) # We attach a mutable loggers dictionary so that as Logger # objects are created by entry_from_resource, they can be # re-used by other log entries from the same logger. loggers = {} - def log_entries_pager(page_iter): - for page in page_iter: - log_entry_dict = _parse_log_entry(LogEntryPB.pb(page)) + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") + + # create generator + def log_entries_pager(log_iter): + i = 0 + for entry in log_iter: + if max_results is not None and i >= max_results: + break + log_entry_dict = _parse_log_entry(LogEntryPB.pb(entry)) yield entry_from_resource(log_entry_dict, self._client, loggers=loggers) + i += 1 - return log_entries_pager(page_iter) + return log_entries_pager(log_iter) def write_entries( self, @@ -175,7 +186,7 @@ def __init__(self, gapic_api, client): self._gapic_api = gapic_api self._client = client - def list_sinks(self, parent, *, page_size=0, page_token=None): + def list_sinks(self, parent, *, max_results=None, page_size=None, page_token=None): """List sinks for the parent resource. Args: @@ -187,27 +198,37 @@ def list_sinks(self, parent, *, page_size=0, page_token=None): "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". - page_size (Optional[int]): Maximum number of sinks to return, If not passed, - defaults to a value set by the API. - page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.Sink] + Generator[~logging_v2.Sink] """ request = ListSinksRequest( parent=parent, page_size=page_size, page_token=page_token ) response = self._gapic_api.list_sinks(request) - page_iter = iter(response) + sink_iter = iter(response) + + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") - def sinks_pager(page_iter): - for page in page_iter: + def sinks_pager(sink_iter): + i = 0 + for entry in sink_iter: + if max_results is not None and i >= max_results: + break # Convert the GAPIC sink type into the handwritten `Sink` type - yield Sink.from_api_repr(LogSink.to_dict(page), client=self._client) + yield Sink.from_api_repr(LogSink.to_dict(entry), client=self._client) + i += 1 - return sinks_pager(page_iter) + return sinks_pager(sink_iter) def sink_create( self, parent, sink_name, filter_, destination, *, unique_writer_identity=False @@ -347,33 +368,47 @@ def __init__(self, gapic_api, client): self._gapic_api = gapic_api self._client = client - def list_metrics(self, project, *, page_size=0, page_token=None): + def list_metrics( + self, project, *, max_results=None, page_size=None, page_token=None + ): """List metrics for the project associated with this client. Args: project (str): ID of the project whose metrics are to be listed. - page_size (int): Maximum number of metrics to return, If not passed, - defaults to a value set by the API. - page_token (str): Opaque marker for the next "page" of metrics. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterable[logging_v2.Metric]: Iterable of metrics. + Generator[logging_v2.Metric] """ path = f"projects/{project}" request = ListLogMetricsRequest( parent=path, page_size=page_size, page_token=page_token, ) response = self._gapic_api.list_log_metrics(request=request) - page_iter = iter(response) + metric_iter = iter(response) + + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") - def metrics_pager(page_iter): - for page in page_iter: + def metrics_pager(metric_iter): + i = 0 + for entry in metric_iter: + if max_results is not None and i >= max_results: + break # Convert GAPIC metrics type into handwritten `Metric` type - yield Metric.from_api_repr(LogMetric.to_dict(page), client=self._client) + yield Metric.from_api_repr( + LogMetric.to_dict(entry), client=self._client + ) + i += 1 - return metrics_pager(page_iter) + return metrics_pager(metric_iter) def metric_create(self, project, metric_name, filter_, description): """Create a metric resource. diff --git a/google/cloud/logging_v2/_http.py b/google/cloud/logging_v2/_http.py index 68bde346a..21fb38606 100644 --- a/google/cloud/logging_v2/_http.py +++ b/google/cloud/logging_v2/_http.py @@ -74,6 +74,7 @@ def list_entries( *, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): @@ -94,14 +95,16 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ extra_params = {"resourceNames": resource_names} @@ -131,7 +134,8 @@ def list_entries( ) # This method uses POST to make a read-only request. iterator._HTTP_METHOD = "POST" - return iterator + + return _entries_pager(iterator, max_results) def write_entries( self, @@ -219,7 +223,7 @@ def __init__(self, client): self._client = client self.api_request = client._connection.api_request - def list_sinks(self, parent, *, page_size=None, page_token=None): + def list_sinks(self, parent, *, max_results=None, page_size=None, page_token=None): """List sinks for the parent resource. See @@ -234,14 +238,17 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". - page_size (Optional[int]): Maximum number of sinks to return, If not passed, - defaults to a value set by the API. - page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.Sink] + Generator[~logging_v2.Sink] """ extra_params = {} @@ -249,7 +256,7 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): extra_params["pageSize"] = page_size path = f"/{parent}/sinks" - return page_iterator.HTTPIterator( + iterator = page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, path=path, @@ -259,6 +266,8 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): extra_params=extra_params, ) + return _entries_pager(iterator, max_results) + def sink_create( self, parent, sink_name, filter_, destination, *, unique_writer_identity=False ): @@ -373,24 +382,27 @@ def __init__(self, client): self._client = client self.api_request = client._connection.api_request - def list_metrics(self, project, *, page_size=None, page_token=None): + def list_metrics( + self, project, *, max_results=None, page_size=None, page_token=None + ): """List metrics for the project associated with this client. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list Args: - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[google.cloud.logging_v2.metric.Metric] + Generator[logging_v2.Metric] + """ extra_params = {} @@ -398,7 +410,7 @@ def list_metrics(self, project, *, page_size=None, page_token=None): extra_params["pageSize"] = page_size path = f"/projects/{project}/metrics" - return page_iterator.HTTPIterator( + iterator = page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, path=path, @@ -407,6 +419,7 @@ def list_metrics(self, project, *, page_size=None, page_token=None): page_token=page_token, extra_params=extra_params, ) + return _entries_pager(iterator, max_results) def metric_create(self, project, metric_name, filter_, description): """Create a metric resource. @@ -469,6 +482,18 @@ def metric_delete(self, project, metric_name): self.api_request(method="DELETE", path=target) +def _entries_pager(page_iter, max_results=None): + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") + + i = 0 + for page in page_iter: + if max_results is not None and i >= max_results: + break + yield page + i += 1 + + def _item_to_entry(iterator, resource, loggers): """Convert a log entry resource to the native object. diff --git a/google/cloud/logging_v2/client.py b/google/cloud/logging_v2/client.py index 8b92e8e8c..0dbc85a26 100644 --- a/google/cloud/logging_v2/client.py +++ b/google/cloud/logging_v2/client.py @@ -204,10 +204,11 @@ def list_entries( resource_names=None, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entry resources. + """Return a generator of log entry resources. Args: resource_names (Sequence[str]): Names of one or more parent resources @@ -226,14 +227,17 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ if resource_names is None: resource_names = [f"projects/{self.project}"] @@ -243,6 +247,7 @@ def list_entries( resource_names=resource_names, filter_=filter_, order_by=order_by, + max_results=max_results, page_size=page_size, page_token=page_token, ) @@ -266,7 +271,9 @@ def sink(self, name, *, filter_=None, destination=None): """ return Sink(name, filter_=filter_, destination=destination, client=self) - def list_sinks(self, *, parent=None, page_size=None, page_token=None): + def list_sinks( + self, *, parent=None, max_results=None, page_size=None, page_token=None + ): """List sinks for the a parent resource. See @@ -283,22 +290,25 @@ def list_sinks(self, *, parent=None, page_size=None, page_token=None): "folders/[FOLDER_ID]". If not passed, defaults to the project bound to the API's client. - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.sink.Sink] + Generator[~logging_v2.Sink] """ if parent is None: parent = f"projects/{self.project}" return self.sinks_api.list_sinks( - parent=parent, page_size=page_size, page_token=page_token + parent=parent, + max_results=max_results, + page_size=page_size, + page_token=page_token, ) def metric(self, name, *, filter_=None, description=""): @@ -319,27 +329,30 @@ def metric(self, name, *, filter_=None, description=""): """ return Metric(name, filter_=filter_, client=self, description=description) - def list_metrics(self, *, page_size=None, page_token=None): + def list_metrics(self, *, max_results=None, page_size=None, page_token=None): """List metrics for the project associated with this client. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list Args: - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.metric.Metric] + Generator[logging_v2.Metric] """ return self.metrics_api.list_metrics( - self.project, page_size=page_size, page_token=page_token + self.project, + max_results=max_results, + page_size=page_size, + page_token=page_token, ) def get_default_handler(self, **kw): diff --git a/google/cloud/logging_v2/logger.py b/google/cloud/logging_v2/logger.py index 01221fc7b..404871bef 100644 --- a/google/cloud/logging_v2/logger.py +++ b/google/cloud/logging_v2/logger.py @@ -264,10 +264,11 @@ def list_entries( resource_names=None, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entries. + """Return a generator of log entry resources. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list @@ -289,19 +290,16 @@ def list_entries( By default, a 24 hour filter is applied. order_by (Optional[str]): One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (Optional[int]): - Optional. The maximum number of entries in each page of results - from this request. Non-positive values are ignored. Defaults - to a sensible value set by the API. - page_token (Optional[str]): - Optional. If present, return the next batch of entries, using - the value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing - the token. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.entries.LogEntry] + Generator[~logging_v2.LogEntry] """ if resource_names is None: @@ -317,6 +315,7 @@ def list_entries( resource_names=resource_names, filter_=filter_, order_by=order_by, + max_results=max_results, page_size=page_size, page_token=page_token, ) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 365e94215..4f0233d74 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -36,6 +36,7 @@ from google.cloud.logging_v2.handlers.transports import SyncTransport from google.cloud.logging_v2 import client from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2.entries import TextEntry from google.protobuf.struct_pb2 import Struct, Value, ListValue, NullValue @@ -97,11 +98,13 @@ class Config(object): """ CLIENT = None + HTTP_CLIENT = None use_mtls = os.environ.get("GOOGLE_API_USE_MTLS_ENDPOINT", "never") def setUpModule(): Config.CLIENT = client.Client() + Config.HTTP_CLIENT = client.Client(_use_grpc=False) # Skip the test cases using bigquery, storage and pubsub clients for mTLS testing. @@ -186,34 +189,34 @@ def test_list_entry_with_auditlog(self): audit_dict = { "@type": type_url, "methodName": "test", - "requestMetadata": {"callerIp": "::1", "callerSuppliedUserAgent": "test"}, "resourceName": "test", "serviceName": "test", - "status": {"code": 0}, } audit_struct = self._dict_to_struct(audit_dict) - logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") - logger.log_proto(audit_struct) - - # retrieve log - retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) - protobuf_entry = retry(lambda: next(logger.list_entries()))() - - self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) - self.assertIsNone(protobuf_entry.payload_pb) - self.assertIsInstance(protobuf_entry.payload_json, dict) - self.assertEqual(protobuf_entry.payload_json["@type"], type_url) - self.assertEqual( - protobuf_entry.payload_json["methodName"], audit_dict["methodName"] - ) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url - ) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["methodName"], - audit_dict["methodName"], - ) + gapic_logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") + http_logger = Config.HTTP_CLIENT.logger(f"audit-proto-{uuid.uuid1()}-http") + for logger in [gapic_logger, http_logger]: + logger.log_proto(audit_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() + + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.payload_json["methodName"], audit_dict["methodName"] + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["methodName"], + audit_dict["methodName"], + ) def test_list_entry_with_requestlog(self): """ @@ -244,63 +247,70 @@ def test_list_entry_with_requestlog(self): } req_struct = self._dict_to_struct(req_dict) - logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") - logger.log_proto(req_struct) - - # retrieve log - retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) - protobuf_entry = retry(lambda: next(logger.list_entries()))() - - self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) - self.assertIsNone(protobuf_entry.payload_pb) - self.assertIsInstance(protobuf_entry.payload_json, dict) - self.assertEqual(protobuf_entry.payload_json["@type"], type_url) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url - ) + gapic_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") + http_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}-http") + for logger in [gapic_logger, http_logger]: + logger.log_proto(req_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() + + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) def test_log_text(self): TEXT_PAYLOAD = "System test: test_log_text" - logger = Config.CLIENT.logger(self._logger_name("log_text")) - self.to_delete.append(logger) - logger.log_text(TEXT_PAYLOAD) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log_text(TEXT_PAYLOAD) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + self.assertTrue(isinstance(entries[0], TextEntry)) def test_log_text_with_timestamp(self): text_payload = "System test: test_log_text_with_timestamp" - logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_ts_http")) now = datetime.utcnow() - - self.to_delete.append(logger) - - logger.log_text(text_payload, timestamp=now) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, text_payload) - self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) - self.assertIsInstance(entries[0].received_timestamp, datetime) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log_text(text_payload, timestamp=now) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, text_payload) + self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) + self.assertIsInstance(entries[0].received_timestamp, datetime) def test_log_text_with_resource(self): text_payload = "System test: test_log_text_with_timestamp" - logger = Config.CLIENT.logger(self._logger_name("log_text_res")) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_res")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_res_http")) now = datetime.utcnow() - resource = Resource( - type="gae_app", - labels={"module_id": "default", "version_id": "test", "zone": ""}, - ) + for logger in [gapic_logger, http_logger]: + resource = Resource( + type="gae_app", + labels={"module_id": "default", "version_id": "test", "zone": ""}, + ) - self.to_delete.append(logger) + self.to_delete.append(logger) - logger.log_text(text_payload, timestamp=now, resource=resource) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, text_payload) - # project_id is output only so we don't want it in assertion - del entries[0].resource.labels["project_id"] - self.assertEqual(entries[0].resource, resource) + logger.log_text(text_payload, timestamp=now, resource=resource) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, text_payload) + # project_id is output only so we don't want it in assertion + del entries[0].resource.labels["project_id"] + self.assertEqual(entries[0].resource, resource) def test_log_text_w_metadata(self): TEXT_PAYLOAD = "System test: test_log_text" @@ -310,35 +320,42 @@ def test_log_text_w_metadata(self): URI = "https://api.example.com/endpoint" STATUS = 500 REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - logger = Config.CLIENT.logger(self._logger_name("log_text_md")) - self.to_delete.append(logger) - - logger.log_text( - TEXT_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, http_request=REQUEST - ) - entries = _list_entries(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_md")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_md_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + + logger.log_text( + TEXT_PAYLOAD, + insert_id=INSERT_ID, + severity=SEVERITY, + http_request=REQUEST, + ) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) + self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertEqual(entry.payload, TEXT_PAYLOAD) - self.assertEqual(entry.insert_id, INSERT_ID) - self.assertEqual(entry.severity, SEVERITY) + entry = entries[0] + self.assertEqual(entry.payload, TEXT_PAYLOAD) + self.assertEqual(entry.insert_id, INSERT_ID) + self.assertEqual(entry.severity, SEVERITY) - request = entry.http_request - self.assertEqual(request["requestMethod"], METHOD) - self.assertEqual(request["requestUrl"], URI) - self.assertEqual(request["status"], STATUS) + request = entry.http_request + self.assertEqual(request["requestMethod"], METHOD) + self.assertEqual(request["requestUrl"], URI) + self.assertEqual(request["status"], STATUS) def test_log_struct(self): - logger = Config.CLIENT.logger(self._logger_name("log_struct")) - self.to_delete.append(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) - logger.log_struct(self.JSON_PAYLOAD) - entries = _list_entries(logger) + logger.log_struct(self.JSON_PAYLOAD) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) def test_log_struct_w_metadata(self): INSERT_ID = "INSERTID" @@ -347,54 +364,63 @@ def test_log_struct_w_metadata(self): URI = "https://api.example.com/endpoint" STATUS = 500 REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) - self.to_delete.append(logger) - - logger.log_struct( - self.JSON_PAYLOAD, - insert_id=INSERT_ID, - severity=SEVERITY, - http_request=REQUEST, - ) - entries = _list_entries(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_md_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + + logger.log_struct( + self.JSON_PAYLOAD, + insert_id=INSERT_ID, + severity=SEVERITY, + http_request=REQUEST, + ) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) - self.assertEqual(entries[0].insert_id, INSERT_ID) - self.assertEqual(entries[0].severity, SEVERITY) - request = entries[0].http_request - self.assertEqual(request["requestMethod"], METHOD) - self.assertEqual(request["requestUrl"], URI) - self.assertEqual(request["status"], STATUS) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(entries[0].insert_id, INSERT_ID) + self.assertEqual(entries[0].severity, SEVERITY) + request = entries[0].http_request + self.assertEqual(request["requestMethod"], METHOD) + self.assertEqual(request["requestUrl"], URI) + self.assertEqual(request["status"], STATUS) def test_log_w_text(self): TEXT_PAYLOAD = "System test: test_log_w_text" - logger = Config.CLIENT.logger(self._logger_name("log_w_text")) - self.to_delete.append(logger) - logger.log(TEXT_PAYLOAD) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_text")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_text")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log(TEXT_PAYLOAD) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) def test_log_w_struct(self): - logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) - self.to_delete.append(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_struct_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) - logger.log(self.JSON_PAYLOAD) - entries = _list_entries(logger) + logger.log(self.JSON_PAYLOAD) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) def test_log_empty(self): - logger = Config.CLIENT.logger(self._logger_name("log_empty")) - self.to_delete.append(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_empty")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_empty_http")) - logger.log() - entries = _list_entries(logger) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) - self.assertEqual(len(entries), 1) - self.assertIsNone(entries[0].payload) + logger.log() + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertIsNone(entries[0].payload) def test_log_handler_async(self): LOG_MESSAGE = "It was the worst of times" @@ -722,6 +748,51 @@ def test_update_sink(self): self.assertEqual(sink.filter_, UPDATED_FILTER) self.assertEqual(sink.destination, dataset_uri) + def test_api_equality_list_logs(self): + unique_id = uuid.uuid1() + gapic_logger = Config.CLIENT.logger(f"api-list-{unique_id}") + http_logger = Config.HTTP_CLIENT.logger(f"api-list-{unique_id}") + # write logs + log_count = 5 + for i in range(log_count): + gapic_logger.log_text(f"test {i}") + + def retryable(): + max_results = 3 + gapic_generator = gapic_logger.list_entries(max_results=max_results) + http_generator = http_logger.list_entries(max_results=max_results) + # returned objects should be consistent + self.assertEqual(type(gapic_generator), type(http_generator)) + gapic_list, http_list = list(gapic_generator), list(http_generator) + # max_results should limit the number of logs returned + self.assertEqual(len(gapic_list), max_results) + self.assertEqual(len(http_list), max_results) + # returned logs should be the same + self.assertEqual(gapic_list[0].insert_id, http_list[0].insert_id) + # should return in ascending order + self.assertEqual(gapic_list[0].payload, "test 0") + # test reverse ordering + gapic_generator = gapic_logger.list_entries( + max_results=max_results, order_by=google.cloud.logging_v2.DESCENDING + ) + http_generator = http_logger.list_entries( + max_results=max_results, order_by=google.cloud.logging_v2.DESCENDING + ) + gapic_list, http_list = list(gapic_generator), list(http_generator) + self.assertEqual(len(gapic_list), max_results) + self.assertEqual(len(http_list), max_results) + # http and gapic results should be consistent + self.assertEqual(gapic_list[0].insert_id, http_list[0].insert_id) + # returned logs should be in descending order + self.assertEqual(gapic_list[0].payload, f"test {log_count-1}") + + RetryErrors( + (ServiceUnavailable, InternalServerError, AssertionError), + delay=2, + backoff=2, + max_tries=3, + )(retryable)() + class _DeleteWrapper(object): def __init__(self, publisher, topic_path): diff --git a/tests/unit/test__gapic.py b/tests/unit/test__gapic.py index 5da1c7122..d8c4bf57e 100644 --- a/tests/unit/test__gapic.py +++ b/tests/unit/test__gapic.py @@ -32,7 +32,7 @@ FILTER = "logName:syslog AND severity>=ERROR" -class Test_LoggingAPI(object): +class Test_LoggingAPI(unittest.TestCase): LOG_NAME = "log_name" LOG_PATH = f"projects/{PROJECT}/logs/{LOG_NAME}" @@ -107,6 +107,49 @@ def test_list_entries_with_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_logs_with_max_results(self): + client = self.make_logging_api() + log_entry_msg = LogEntryPB(log_name=self.LOG_PATH, text_payload="text") + + with mock.patch.object( + type(client._gapic_api.transport.list_log_entries), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogEntriesResponse( + entries=[log_entry_msg, log_entry_msg] + ) + result = client.list_entries( + [PROJECT_PATH], + filter_=FILTER, + order_by=google.cloud.logging.ASCENDING, + page_size=42, + page_token="token", + max_results=1, + ) + + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_logs_negative_max_results(self): + client = self.make_logging_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_log_entries), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogEntriesResponse(entries=[]) + result = client.list_entries( + [PROJECT_PATH], + filter_=FILTER, + order_by=google.cloud.logging.ASCENDING, + page_size=42, + page_token="token", + max_results=-1, + ) + # Check the request + list(result) + call.assert_called_once() + def test_write_entries_single(self): client = self.make_logging_api() @@ -141,7 +184,7 @@ def test_logger_delete(self): assert call.call_args.args[0].log_name == self.LOG_PATH -class Test_SinksAPI(object): +class Test_SinksAPI(unittest.TestCase): SINK_NAME = "sink_name" PARENT_PATH = f"projects/{PROJECT}" SINK_PATH = f"projects/{PROJECT}/sinks/{SINK_NAME}" @@ -208,6 +251,40 @@ def test_list_sinks_with_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_sinks_with_max_results(self): + client = self.make_sinks_api() + sink_msg = LogSink( + name=self.SINK_NAME, destination=self.DESTINATION_URI, filter=FILTER + ) + + with mock.patch.object( + type(client._gapic_api.transport.list_sinks), "__call__" + ) as call: + call.return_value = logging_v2.types.ListSinksResponse( + sinks=[sink_msg, sink_msg] + ) + result = client.list_sinks( + self.PARENT_PATH, page_size=42, page_token="token", max_results=1 + ) + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_sinks_negative_max_results(self): + client = self.make_sinks_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_sinks), "__call__" + ) as call: + call.return_value = logging_v2.types.ListSinksResponse(sinks=[]) + result = client.list_sinks( + self.PARENT_PATH, page_size=42, page_token="token", max_results=-1 + ) + # Check the request + list(result) + call.assert_called_once() + def test_sink_create(self): client = self.make_sinks_api() with mock.patch.object( @@ -315,7 +392,7 @@ def test_sink_delete(self): assert request.sink_name == self.SINK_PATH -class Test_MetricsAPI(object): +class Test_MetricsAPI(unittest.TestCase): METRIC_NAME = "metric_name" METRIC_PATH = f"projects/{PROJECT}/metrics/{METRIC_NAME}" DESCRIPTION = "Description" @@ -379,6 +456,39 @@ def test_list_metrics_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_metrics_with_max_results(self): + client = self.make_metrics_api() + metric = logging_v2.types.LogMetric( + name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER + ) + with mock.patch.object( + type(client._gapic_api.transport.list_log_metrics), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogMetricsResponse( + metrics=[metric, metric] + ) + result = client.list_metrics( + PROJECT, page_size=42, page_token="token", max_results=1 + ) + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_metrics_negative_max_results(self): + client = self.make_metrics_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_log_metrics), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogMetricsResponse(metrics=[]) + result = client.list_metrics( + PROJECT, page_size=42, page_token="token", max_results=-1 + ) + # Check the request + list(result) + call.assert_called_once() + def test_metric_create(self): client = self.make_metrics_api() diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py index e927f6c15..2154b6f57 100644 --- a/tests/unit/test__http.py +++ b/tests/unit/test__http.py @@ -129,16 +129,20 @@ def _make_timestamp(): NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) return NOW, _datetime_to_rfc3339_w_nanos(NOW) - def test_list_entries_no_paging(self): + def test_list_entries_with_limits(self): from google.cloud.logging import Client from google.cloud.logging import TextEntry from google.cloud.logging import Logger NOW, TIMESTAMP = self._make_timestamp() IID = "IID" + IID1 = "IID1" + IID2 = "IID2" TEXT = "TEXT" SENT = {"resourceNames": [self.PROJECT_PATH]} - TOKEN = "TOKEN" + PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" RETURNED = { "entries": [ { @@ -147,24 +151,42 @@ def test_list_entries_no_paging(self): "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}", - } + }, + { + "jsonPayload": PAYLOAD, + "insertId": IID1, + "resource": {"type": "global"}, + "timestamp": TIMESTAMP, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": PROTO_PAYLOAD, + "insertId": IID2, + "resource": {"type": "global"}, + "timestamp": TIMESTAMP, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, ], - "nextPageToken": TOKEN, } client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) + # try with negative max_results + with self.assertRaises(ValueError): + client._connection = _Connection(RETURNED) + api = self._make_one(client) + empty = list(api.list_entries([self.PROJECT_PATH], max_results=-1)) + # try with max_results of 0 client._connection = _Connection(RETURNED) api = self._make_one(client) - - iterator = api.list_entries([self.PROJECT_PATH]) - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the entries returned. + empty = list(api.list_entries([self.PROJECT_PATH], max_results=0)) + self.assertEqual(empty, []) + # try with single result + client._connection = _Connection(RETURNED) + api = self._make_one(client) + iterator = api.list_entries([self.PROJECT_PATH], max_results=1) + entries = list(iterator) + # check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) @@ -183,7 +205,7 @@ def test_list_entries_no_paging(self): called_with, {"method": "POST", "path": expected_path, "data": SENT} ) - def test_list_entries_w_paging(self): + def test_list_entries(self): from google.cloud.logging import DESCENDING from google.cloud.logging import Client from google.cloud.logging import Logger @@ -241,11 +263,8 @@ def test_list_entries_w_paging(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token - # First check the token. - self.assertIsNone(token) - # Then check the entries returned. + # Check the entries returned. self.assertEqual(len(entries), 2) entry1 = entries[0] self.assertIsInstance(entry1, StructEntry) @@ -361,32 +380,38 @@ def test_ctor(self): self.assertIs(api._client, client) self.assertEqual(api.api_request, connection.api_request) - def test_list_sinks_no_paging(self): + def test_list_sinks_max_returned(self): from google.cloud.logging import Sink - TOKEN = "TOKEN" RETURNED = { "sinks": [ { "name": self.SINK_PATH, "filter": self.FILTER, "destination": self.DESTINATION_URI, - } + }, + {"name": "test", "filter": "test", "destination": "test"}, ], - "nextPageToken": TOKEN, } + # try with negative max_results + with self.assertRaises(ValueError): + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_sinks(self.PROJECT_PATH, max_results=-1)) + # try with max_results of 0 conn = _Connection(RETURNED) client = _Client(conn) api = self._make_one(client) - - iterator = api.list_sinks(self.PROJECT_PATH) - page = next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. + empty = list(api.list_sinks(self.PROJECT_PATH, max_results=0)) + self.assertEqual(empty, []) + # try with single result + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + iterator = api.list_sinks(self.PROJECT_PATH, max_results=1) + sinks = list(iterator) + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -401,7 +426,7 @@ def test_list_sinks_no_paging(self): called_with, {"method": "GET", "path": path, "query_params": {}} ) - def test_list_sinks_w_paging(self): + def test_list_sinks(self): from google.cloud.logging import Sink TOKEN = "TOKEN" @@ -423,11 +448,7 @@ def test_list_sinks_w_paging(self): self.PROJECT_PATH, page_size=PAGE_SIZE, page_token=TOKEN ) sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -632,26 +653,35 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_list_metrics_no_paging(self): + def test_list_metrics_max_results(self): from google.cloud.logging import Metric - TOKEN = "TOKEN" RETURNED = { - "metrics": [{"name": self.METRIC_PATH, "filter": self.FILTER}], - "nextPageToken": TOKEN, + "metrics": [ + {"name": self.METRIC_PATH, "filter": self.FILTER}, + {"name": "test", "filter": "test"}, + ], } + # try with negative max_results + with self.assertRaises(ValueError): + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_metrics(self.PROJECT, max_results=-1)) + # try with max_results of 0 + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_metrics(self.PROJECT, max_results=0)) + self.assertEqual(empty, []) + # try with single result conn = _Connection(RETURNED) client = _Client(conn) api = self._make_one(client) - iterator = api.list_metrics(self.PROJECT) - page = next(iterator.pages) - metrics = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the metrics returned. + iterator = api.list_metrics(self.PROJECT, max_results=1) + metrics = list(iterator) + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) @@ -666,7 +696,7 @@ def test_list_metrics_no_paging(self): called_with, {"method": "GET", "path": path, "query_params": {}} ) - def test_list_metrics_w_paging(self): + def test_list_metrics(self): from google.cloud.logging import Metric TOKEN = "TOKEN" @@ -678,11 +708,7 @@ def test_list_metrics_w_paging(self): iterator = api.list_metrics(self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) metrics = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the metrics returned. + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 46526fb21..1a31e9c0c 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -259,7 +259,6 @@ def test_list_entries_defaults(self): IID = "IID" TEXT = "TEXT" - TOKEN = "TOKEN" ENTRIES = [ { "textPayload": TEXT, @@ -272,13 +271,11 @@ def test_list_entries_defaults(self): client = self._make_one( project=self.PROJECT, credentials=creds, _use_grpc=False ) - returned = {"entries": ENTRIES, "nextPageToken": TOKEN} + returned = {"entries": ENTRIES} client._connection = _Connection(returned) iterator = client.list_entries() - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token + entries = list(iterator) self.assertEqual(len(entries), 1) entry = entries[0] @@ -289,7 +286,6 @@ def test_list_entries_defaults(self): self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) - self.assertEqual(token, TOKEN) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) @@ -342,6 +338,12 @@ def test_list_entries_explicit(self): "resource": {"type": "global"}, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, + { + "protoPayload": "ignored", + "insertId": "ignored", + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, ] client = self._make_one( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False @@ -355,13 +357,10 @@ def test_list_entries_explicit(self): order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, + max_results=2, ) entries = list(iterator) - token = iterator.next_page_token - - # First, check the token. - self.assertIsNone(token) - # Then check the entries. + # Check the entries. self.assertEqual(len(entries), 2) entry = entries[0] self.assertIsInstance(entry, StructEntry) @@ -423,7 +422,6 @@ def test_list_entries_explicit_timestamp(self): PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} PROTO_PAYLOAD = PAYLOAD.copy() PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" - TOKEN = "TOKEN" PAGE_SIZE = 42 ENTRIES = [ { @@ -450,14 +448,9 @@ def test_list_entries_explicit_timestamp(self): filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, - page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token - - # First, check the token. - self.assertIsNone(token) - # Then check the entries. + # Check the entries. self.assertEqual(len(entries), 2) entry = entries[0] self.assertIsInstance(entry, StructEntry) @@ -491,7 +484,6 @@ def test_list_entries_explicit_timestamp(self): "filter": INPUT_FILTER, "orderBy": DESCENDING, "pageSize": PAGE_SIZE, - "pageToken": TOKEN, "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], }, }, @@ -529,7 +521,6 @@ def test_list_sinks_no_paging(self): from google.cloud.logging import Sink PROJECT = "PROJECT" - TOKEN = "TOKEN" SINK_NAME = "sink_name" FILTER = "logName:syslog AND severity>=ERROR" SINKS = [ @@ -538,17 +529,13 @@ def test_list_sinks_no_paging(self): client = self._make_one( project=PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"sinks": SINKS, "nextPageToken": TOKEN} + returned = {"sinks": SINKS} client._connection = _Connection(returned) iterator = client.list_sinks() - page = next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token + sinks = list(iterator) - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -573,7 +560,8 @@ def test_list_sinks_with_paging(self): TOKEN = "TOKEN" PAGE_SIZE = 42 SINKS = [ - {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI} + {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI}, + {"name": "test", "filter": "test", "destination": "test"}, ] client = self._make_one( project=PROJECT, credentials=_make_credentials(), _use_grpc=False @@ -581,13 +569,11 @@ def test_list_sinks_with_paging(self): returned = {"sinks": SINKS} client._connection = _Connection(returned) - iterator = client.list_sinks(page_size=PAGE_SIZE, page_token=TOKEN) + iterator = client.list_sinks( + page_size=PAGE_SIZE, page_token=TOKEN, max_results=1 + ) sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -678,29 +664,27 @@ def test_list_metrics_with_paging(self): from google.cloud.logging import Metric token = "TOKEN" - next_token = "T00KEN" page_size = 42 metrics = [ { "name": self.METRIC_NAME, "filter": self.FILTER, "description": self.DESCRIPTION, - } + }, + {"name": "test", "filter": "test", "description": "test"}, ] client = self._make_one( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"metrics": metrics, "nextPageToken": next_token} + returned = {"metrics": metrics} client._connection = _Connection(returned) # Execute request. - iterator = client.list_metrics(page_size=page_size, page_token=token) - page = next(iterator.pages) - metrics = list(page) - - # First check the token. - self.assertEqual(iterator.next_page_token, next_token) - # Then check the metrics returned. + iterator = client.list_metrics( + page_size=page_size, page_token=token, max_results=1 + ) + metrics = list(iterator) + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index 0d8fd1208..ef13c923c 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -605,23 +605,18 @@ def test_delete_w_alternate_client(self): def test_list_entries_defaults(self): from google.cloud.logging import Client - TOKEN = "TOKEN" - client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"nextPageToken": TOKEN} + returned = {} client._connection = _Connection(returned) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries() - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token + entries = list(iterator) self.assertEqual(len(entries), 0) - self.assertEqual(token, TOKEN) LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) # check call payload @@ -668,10 +663,8 @@ def test_list_entries_explicit(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token self.assertEqual(len(entries), 0) - self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) @@ -728,10 +721,8 @@ def test_list_entries_explicit_timestamp(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token self.assertEqual(len(entries), 0) - self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,) @@ -751,6 +742,100 @@ def test_list_entries_explicit_timestamp(self): }, ) + def test_list_entries_limit(self): + from google.cloud.logging import DESCENDING + from google.cloud.logging import ProtobufEntry + from google.cloud.logging import StructEntry + from google.cloud.logging import Logger + from google.cloud.logging import Client + + PROJECT1 = "PROJECT1" + PROJECT2 = "PROJECT2" + INPUT_FILTER = "logName:LOGNAME" + IID1 = "IID1" + IID2 = "IID2" + PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" + TOKEN = "TOKEN" + PAGE_SIZE = 42 + ENTRIES = [ + { + "jsonPayload": PAYLOAD, + "insertId": IID1, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": PROTO_PAYLOAD, + "insertId": IID2, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": "ignored", + "insertId": "ignored", + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + ] + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + returned = {"entries": ENTRIES} + client._connection = _Connection(returned) + logger = self._make_one(self.LOGGER_NAME, client=client) + + iterator = logger.list_entries( + resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"], + filter_=INPUT_FILTER, + order_by=DESCENDING, + page_size=PAGE_SIZE, + page_token=TOKEN, + max_results=2, + ) + entries = list(iterator) + # Check the entries. + self.assertEqual(len(entries), 2) + entry = entries[0] + self.assertIsInstance(entry, StructEntry) + self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.payload, PAYLOAD) + logger = entry.logger + self.assertIsInstance(logger, Logger) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + entry = entries[1] + self.assertIsInstance(entry, ProtobufEntry) + self.assertEqual(entry.insert_id, IID2) + self.assertEqual(entry.payload, PROTO_PAYLOAD) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + self.assertIs(entries[0].logger, entries[1].logger) + + # check call payload + call_payload_no_filter = deepcopy(client._connection._called_with) + call_payload_no_filter["data"]["filter"] = "removed" + self.assertEqual( + call_payload_no_filter, + { + "path": "/entries:list", + "method": "POST", + "data": { + "filter": "removed", + "orderBy": DESCENDING, + "pageSize": PAGE_SIZE, + "pageToken": TOKEN, + "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], + }, + }, + ) + class TestBatch(unittest.TestCase):