Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 26 additions & 8 deletions gcloud/logging/_gax.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@
from gcloud.exceptions import Conflict
from gcloud.exceptions import NotFound
from gcloud._helpers import _datetime_to_pb_timestamp
from gcloud._helpers import _datetime_to_rfc3339
from gcloud._helpers import _pb_timestamp_to_datetime


class _LoggingAPI(object):
Expand Down Expand Up @@ -397,6 +399,22 @@ def _build_paging_options(page_token=None):
return CallOptions(**options)


def _mon_resource_pb_to_mapping(resource_pb):
"""Helper for :func:_log_entry_pb_to_mapping"""
mapping = {
'type': resource_pb.type,
}
if resource_pb.labels:
mapping['labels'] = resource_pb.labels
return mapping


def _pb_timestamp_to_rfc3339(timestamp_pb):
"""Helper for :func:_log_entry_pb_to_mapping"""
timestamp = _pb_timestamp_to_datetime(timestamp_pb)
return _datetime_to_rfc3339(timestamp)


def _log_entry_pb_to_mapping(entry_pb):
"""Helper for :meth:`list_entries`, et aliae

Expand All @@ -405,20 +423,20 @@ def _log_entry_pb_to_mapping(entry_pb):
https://github.com/google/protobuf/issues/1351
"""
mapping = {
'log_name': entry_pb.log_name,
'resource': entry_pb.resource,
'logName': entry_pb.log_name,
'resource': _mon_resource_pb_to_mapping(entry_pb.resource),
'severity': entry_pb.severity,
'insert_id': entry_pb.insert_id,
'timestamp': entry_pb.timestamp,
'insertId': entry_pb.insert_id,
'timestamp': _pb_timestamp_to_rfc3339(entry_pb.timestamp),
'labels': entry_pb.labels,
'text_payload': entry_pb.text_payload,
'json_payload': entry_pb.json_payload,
'proto_payload': entry_pb.proto_payload,
'textPayload': entry_pb.text_payload,
'jsonPayload': entry_pb.json_payload,
'protoPayload': entry_pb.proto_payload,
}

if entry_pb.http_request:
request = entry_pb.http_request
mapping['http_request'] = {
mapping['httpRequest'] = {
'request_method': request.request_method,
'request_url': request.request_url,
'status': request.status,
Expand Down
47 changes: 41 additions & 6 deletions gcloud/logging/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,31 @@

"""Client for interacting with the Google Cloud Logging API."""

import os

try:
from google.logging.v2.config_service_v2_api import (
ConfigServiceV2Api as GeneratedSinksAPI)
from google.logging.v2.logging_service_v2_api import (
LoggingServiceV2Api as GeneratedLoggingAPI)
from google.logging.v2.metrics_service_v2_api import (
MetricsServiceV2Api as GeneratedMetricsAPI)
from gcloud.logging._gax import _LoggingAPI as GAXLoggingAPI
from gcloud.logging._gax import _MetricsAPI as GAXMetricsAPI
from gcloud.logging._gax import _SinksAPI as GAXSinksAPI
except ImportError: # pragma: NO COVER
_HAVE_GAX = False
GeneratedLoggingAPI = GAXLoggingAPI = None
GeneratedMetricsAPI = GAXMetricsAPI = None
GeneratedSinksAPI = GAXSinksAPI = None
else:
_HAVE_GAX = True

from gcloud.client import JSONClient
from gcloud.logging.connection import Connection
from gcloud.logging.connection import _LoggingAPI
from gcloud.logging.connection import _MetricsAPI
from gcloud.logging.connection import _SinksAPI
from gcloud.logging.connection import _LoggingAPI as JSONLoggingAPI
from gcloud.logging.connection import _MetricsAPI as JSONMetricsAPI
from gcloud.logging.connection import _SinksAPI as JSONSinksAPI
from gcloud.logging.entries import ProtobufEntry
from gcloud.logging.entries import StructEntry
from gcloud.logging.entries import TextEntry
Expand All @@ -27,6 +47,9 @@
from gcloud.logging.sink import Sink


_USE_GAX = _HAVE_GAX and (os.environ.get('GCLOUD_ENABLE_GAX') is not None)


class Client(JSONClient):
"""Client to bundle configuration needed for API requests.

Expand Down Expand Up @@ -60,7 +83,11 @@ def logging_api(self):
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs
"""
if self._logging_api is None:
self._logging_api = _LoggingAPI(self.connection)
if _USE_GAX:
generated = GeneratedLoggingAPI()
self._logging_api = GAXLoggingAPI(generated)
else:
self._logging_api = JSONLoggingAPI(self.connection)
return self._logging_api

@property
Expand All @@ -71,7 +98,11 @@ def sinks_api(self):
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks
"""
if self._sinks_api is None:
self._sinks_api = _SinksAPI(self.connection)
if _USE_GAX:
generated = GeneratedSinksAPI()
self._sinks_api = GAXSinksAPI(generated)
else:
self._sinks_api = JSONSinksAPI(self.connection)
return self._sinks_api

@property
Expand All @@ -82,7 +113,11 @@ def metrics_api(self):
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics
"""
if self._metrics_api is None:
self._metrics_api = _MetricsAPI(self.connection)
if _USE_GAX:
generated = GeneratedMetricsAPI()
self._metrics_api = GAXMetricsAPI(generated)
else:
self._metrics_api = JSONMetricsAPI(self.connection)
return self._metrics_api

def logger(self, name):
Expand Down
46 changes: 32 additions & 14 deletions gcloud/logging/test__gax.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@ def test_list_entries_no_paging(self):
self.assertEqual(len(entries), 1)
entry = entries[0]
self.assertIsInstance(entry, dict)
self.assertEqual(entry['log_name'], self.LOG_NAME)
self.assertEqual(entry['logName'], self.LOG_NAME)
self.assertEqual(entry['resource'], {'type': 'global'})
self.assertEqual(entry['text_payload'], TEXT)
self.assertEqual(entry['textPayload'], TEXT)
self.assertEqual(next_token, TOKEN)

projects, filter_, order_by, page_size, options = (
Expand Down Expand Up @@ -94,9 +94,9 @@ def test_list_entries_with_paging(self):
self.assertEqual(len(entries), 1)
entry = entries[0]
self.assertIsInstance(entry, dict)
self.assertEqual(entry['log_name'], self.LOG_NAME)
self.assertEqual(entry['logName'], self.LOG_NAME)
self.assertEqual(entry['resource'], {'type': 'global'})
self.assertEqual(entry['json_payload'], PAYLOAD)
self.assertEqual(entry['jsonPayload'], PAYLOAD)
self.assertEqual(next_token, NEW_TOKEN)

projects, filter_, order_by, page_size, options = (
Expand All @@ -108,7 +108,12 @@ def test_list_entries_with_paging(self):
self.assertEqual(options.page_token, TOKEN)

def test_list_entries_with_extra_properties(self):
from datetime import datetime
from gcloud._testing import _GAXPageIterator
from gcloud._helpers import UTC
from gcloud._helpers import _datetime_to_rfc3339
from gcloud._helpers import _datetime_to_pb_timestamp
NOW = datetime.utcnow().replace(tzinfo=UTC)
SIZE = 23
TOKEN = 'TOKEN'
NEW_TOKEN = 'NEW_TOKEN'
Expand All @@ -128,6 +133,8 @@ def test_list_entries_with_extra_properties(self):
'operation': operation,
}
ENTRY = _LogEntryPB(self.LOG_NAME, proto_payload=PAYLOAD, **EXTRAS)
ENTRY.resource.labels['foo'] = 'bar'
ENTRY.timestamp = _datetime_to_pb_timestamp(NOW)
response = _GAXPageIterator([ENTRY], NEW_TOKEN)
gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
api = self._makeOne(gax_api)
Expand All @@ -138,12 +145,14 @@ def test_list_entries_with_extra_properties(self):
self.assertEqual(len(entries), 1)
entry = entries[0]
self.assertIsInstance(entry, dict)
self.assertEqual(entry['log_name'], self.LOG_NAME)
self.assertEqual(entry['resource'], {'type': 'global'})
self.assertEqual(entry['proto_payload'], PAYLOAD)
self.assertEqual(entry['logName'], self.LOG_NAME)
self.assertEqual(entry['resource'],
{'type': 'global', 'labels': {'foo': 'bar'}})
self.assertEqual(entry['protoPayload'], PAYLOAD)
self.assertEqual(entry['severity'], SEVERITY)
self.assertEqual(entry['labels'], LABELS)
self.assertEqual(entry['insert_id'], IID)
self.assertEqual(entry['insertId'], IID)
self.assertEqual(entry['timestamp'], _datetime_to_rfc3339(NOW))
EXPECTED_REQUEST = {
'request_method': request.request_method,
'request_url': request.request_url,
Expand All @@ -155,7 +164,7 @@ def test_list_entries_with_extra_properties(self):
'remote_ip': request.remote_ip,
'cache_hit': request.cache_hit,
}
self.assertEqual(entry['http_request'], EXPECTED_REQUEST)
self.assertEqual(entry['httpRequest'], EXPECTED_REQUEST)
EXPECTED_OPERATION = {
'producer': operation.producer,
'id': operation.id,
Expand Down Expand Up @@ -302,17 +311,19 @@ def test_write_entries_w_extra_properties(self):

def test_write_entries_multiple(self):
# pylint: disable=too-many-statements
import datetime
from google.logging.type.log_severity_pb2 import WARNING
from google.logging.v2.log_entry_pb2 import LogEntry
from google.protobuf.any_pb2 import Any
from google.protobuf.struct_pb2 import Struct
from gcloud._helpers import _datetime_to_rfc3339, UTC
TEXT = 'TEXT'
TIMESTAMP = _LogEntryPB._make_timestamp()
NOW = datetime.datetime.utcnow().replace(tzinfo=UTC)
TIMESTAMP_TYPE_URL = 'type.googleapis.com/google.protobuf.Timestamp'
JSON = {'payload': 'PAYLOAD', 'type': 'json'}
PROTO = {
'@type': TIMESTAMP_TYPE_URL,
'value': TIMESTAMP,
'value': _datetime_to_rfc3339(NOW),
}
PRODUCER = 'PRODUCER'
OPID = 'OPID'
Expand Down Expand Up @@ -956,6 +967,13 @@ class _LogEntryOperationPB(object):
id = 'OPID'


class _ResourcePB(object):

def __init__(self, type_='global', **labels):
self.type = type_
self.labels = labels


class _LogEntryPB(object):

severity = 'DEFAULT'
Expand All @@ -964,7 +982,7 @@ class _LogEntryPB(object):

def __init__(self, log_name, **kw):
self.log_name = log_name
self.resource = {'type': 'global'}
self.resource = _ResourcePB()
self.timestamp = self._make_timestamp()
self.labels = kw.pop('labels', {})
self.__dict__.update(kw)
Expand All @@ -973,9 +991,9 @@ def __init__(self, log_name, **kw):
def _make_timestamp():
from datetime import datetime
from gcloud._helpers import UTC
from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos
from gcloud._helpers import _datetime_to_pb_timestamp
NOW = datetime.utcnow().replace(tzinfo=UTC)
return _datetime_to_rfc3339_w_nanos(NOW)
return _datetime_to_pb_timestamp(NOW)


class _LogSinkPB(object):
Expand Down
Loading