Skip to content

Commit

Permalink
Merge pull request googleapis#2589 from dhermes/fix-2552
Browse files Browse the repository at this point in the history
Allowing for arbitrarily nested dictionaries in _log_entry_mapping_pb.
  • Loading branch information
dhermes authored Oct 24, 2016
2 parents 3631554 + b462df9 commit 842c486
Show file tree
Hide file tree
Showing 3 changed files with 104 additions and 46 deletions.
9 changes: 3 additions & 6 deletions logging/google/cloud/logging/_gax.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@

"""GAX wrapper for Logging API requests."""

import json

from google.gax import CallOptions
from google.gax import INITIAL_PAGE
from google.gax.errors import GaxError
Expand All @@ -24,7 +22,7 @@
from google.logging.v2.logging_config_pb2 import LogSink
from google.logging.v2.logging_metrics_pb2 import LogMetric
from google.logging.v2.log_entry_pb2 import LogEntry
from google.protobuf.json_format import Parse
from google.protobuf.json_format import ParseDict
from grpc import StatusCode

from google.cloud._helpers import _datetime_to_pb_timestamp
Expand Down Expand Up @@ -603,11 +601,10 @@ def _log_entry_mapping_to_pb(mapping):
entry_pb.labels[key] = value

if 'jsonPayload' in mapping:
for key, value in mapping['jsonPayload'].items():
entry_pb.json_payload[key] = value
ParseDict(mapping['jsonPayload'], entry_pb.json_payload)

if 'protoPayload' in mapping:
Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload)
ParseDict(mapping['protoPayload'], entry_pb.proto_payload)

if 'httpRequest' in mapping:
_http_request_mapping_to_pb(
Expand Down
102 changes: 81 additions & 21 deletions logging/unit_tests/test__gax.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,16 +80,12 @@ def test_list_entries_no_paging(self):
self.assertEqual(page_size, 0)
self.assertIs(options.page_token, INITIAL_PAGE)

def test_list_entries_with_paging(self):
from google.protobuf.struct_pb2 import Value
def _list_entries_with_paging_helper(self, payload, struct_pb):
from google.cloud._testing import _GAXPageIterator

SIZE = 23
TOKEN = 'TOKEN'
NEW_TOKEN = 'NEW_TOKEN'
PAYLOAD = {'message': 'MESSAGE', 'weather': 'sunny'}
struct_pb = _StructPB({
key: Value(string_value=value) for key, value in PAYLOAD.items()
})
response = _GAXPageIterator(
[_LogEntryPB(self.LOG_NAME, json_payload=struct_pb)], NEW_TOKEN)
gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
Expand All @@ -103,7 +99,7 @@ def test_list_entries_with_paging(self):
self.assertIsInstance(entry, dict)
self.assertEqual(entry['logName'], self.LOG_NAME)
self.assertEqual(entry['resource'], {'type': 'global'})
self.assertEqual(entry['jsonPayload'], PAYLOAD)
self.assertEqual(entry['jsonPayload'], payload)
self.assertEqual(next_token, NEW_TOKEN)

projects, filter_, order_by, page_size, options = (
Expand All @@ -114,6 +110,43 @@ def test_list_entries_with_paging(self):
self.assertEqual(page_size, SIZE)
self.assertEqual(options.page_token, TOKEN)

def test_list_entries_with_paging(self):
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value

payload = {'message': 'MESSAGE', 'weather': 'sunny'}
struct_pb = Struct(fields={
key: Value(string_value=value) for key, value in payload.items()
})
self._list_entries_with_paging_helper(payload, struct_pb)

def test_list_entries_with_paging_nested_payload(self):
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value

payload = {}
struct_fields = {}
# Add a simple key.
key = 'message'
payload[key] = 'MESSAGE'
struct_fields[key] = Value(string_value=payload[key])
# Add a nested key.
key = 'weather'
sub_value = {}
sub_fields = {}
sub_key = 'temperature'
sub_value[sub_key] = 75
sub_fields[sub_key] = Value(number_value=sub_value[sub_key])
sub_key = 'precipitation'
sub_value[sub_key] = False
sub_fields[sub_key] = Value(bool_value=sub_value[sub_key])
# Update the parent payload.
payload[key] = sub_value
struct_fields[key] = Value(struct_value=Struct(fields=sub_fields))
# Make the struct_pb for our dict.
struct_pb = Struct(fields=struct_fields)
self._list_entries_with_paging_helper(payload, struct_pb)

def test_list_entries_with_extra_properties(self):
from datetime import datetime
from google.logging.type.log_severity_pb2 import WARNING
Expand Down Expand Up @@ -317,18 +350,16 @@ def test_write_entries_w_extra_properties(self):
self.assertIsNone(options)
# pylint: enable=too-many-statements

def test_write_entries_multiple(self):
def _write_entries_multiple_helper(self, json_payload, json_struct_pb):
# pylint: disable=too-many-statements
import datetime
from google.logging.type.log_severity_pb2 import WARNING
from google.logging.v2.log_entry_pb2 import LogEntry
from google.protobuf.any_pb2 import Any
from google.protobuf.struct_pb2 import Struct
from google.cloud._helpers import _datetime_to_rfc3339, UTC
TEXT = 'TEXT'
NOW = datetime.datetime.utcnow().replace(tzinfo=UTC)
TIMESTAMP_TYPE_URL = 'type.googleapis.com/google.protobuf.Timestamp'
JSON = {'payload': 'PAYLOAD', 'type': 'json'}
PROTO = {
'@type': TIMESTAMP_TYPE_URL,
'value': _datetime_to_rfc3339(NOW),
Expand All @@ -339,7 +370,7 @@ def test_write_entries_multiple(self):
ENTRIES = [
{'textPayload': TEXT,
'severity': WARNING},
{'jsonPayload': JSON,
{'jsonPayload': json_payload,
'operation': {'producer': PRODUCER, 'id': OPID}},
{'protoPayload': PROTO,
'httpRequest': {'requestUrl': URL}},
Expand Down Expand Up @@ -373,10 +404,7 @@ def test_write_entries_multiple(self):
self.assertEqual(entry.log_name, '')
self.assertEqual(entry.resource.type, '')
self.assertEqual(entry.labels, {})
json_struct = entry.json_payload
self.assertIsInstance(json_struct, Struct)
self.assertEqual(json_struct.fields['payload'].string_value,
JSON['payload'])
self.assertEqual(entry.json_payload, json_struct_pb)
operation = entry.operation
self.assertEqual(operation.producer, PRODUCER)
self.assertEqual(operation.id, OPID)
Expand All @@ -399,6 +427,44 @@ def test_write_entries_multiple(self):
self.assertIsNone(options)
# pylint: enable=too-many-statements

def test_write_entries_multiple(self):
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value

json_payload = {'payload': 'PAYLOAD', 'type': 'json'}
json_struct_pb = Struct(fields={
key: Value(string_value=value)
for key, value in json_payload.items()
})
self._write_entries_multiple_helper(json_payload, json_struct_pb)

def test_write_entries_multiple_nested_payload(self):
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value

json_payload = {}
struct_fields = {}
# Add a simple key.
key = 'hello'
json_payload[key] = 'me you looking for'
struct_fields[key] = Value(string_value=json_payload[key])
# Add a nested key.
key = 'everything'
sub_value = {}
sub_fields = {}
sub_key = 'answer'
sub_value[sub_key] = 42
sub_fields[sub_key] = Value(number_value=sub_value[sub_key])
sub_key = 'really?'
sub_value[sub_key] = False
sub_fields[sub_key] = Value(bool_value=sub_value[sub_key])
# Update the parent payload.
json_payload[key] = sub_value
struct_fields[key] = Value(struct_value=Struct(fields=sub_fields))
# Make the struct_pb for our dict.
json_struct_pb = Struct(fields=struct_fields)
self._write_entries_multiple_helper(json_payload, json_struct_pb)

def test_logger_delete(self):
LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME)
gax_api = _GAXLoggingAPI()
Expand Down Expand Up @@ -1057,12 +1123,6 @@ def __init__(self, type_='global', **labels):
self.labels = labels


class _StructPB(object):

def __init__(self, fields):
self.fields = fields


class _LogEntryPB(object):

severity = 0
Expand Down
39 changes: 20 additions & 19 deletions system_tests/logging_.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,15 @@ def setUpModule():

class TestLogging(unittest.TestCase):

JSON_PAYLOAD = {
'message': 'System test: test_log_struct',
'weather': {
'clouds': 'party or partly',
'temperature': 70,
'precipitation': False,
},
}

def setUp(self):
self.to_delete = []
self._handlers_cache = logging.getLogger().handlers[:]
Expand Down Expand Up @@ -120,18 +129,14 @@ def test_log_text_w_metadata(self):
self.assertEqual(request['status'], STATUS)

def test_log_struct(self):
JSON_PAYLOAD = {
'message': 'System test: test_log_struct',
'weather': 'partly cloudy',
}
logger = Config.CLIENT.logger(self._logger_name())
self.to_delete.append(logger)

logger.log_struct(JSON_PAYLOAD)
logger.log_struct(self.JSON_PAYLOAD)
entries, _ = self._list_entries(logger)

self.assertEqual(len(entries), 1)
self.assertEqual(entries[0].payload, JSON_PAYLOAD)
self.assertEqual(entries[0].payload, self.JSON_PAYLOAD)

def test_log_handler_async(self):
LOG_MESSAGE = 'It was the worst of times'
Expand All @@ -145,12 +150,12 @@ def test_log_handler_async(self):
cloud_logger.addHandler(handler)
cloud_logger.warn(LOG_MESSAGE)
entries, _ = self._list_entries(logger)
JSON_PAYLOAD = {
expected_payload = {
'message': LOG_MESSAGE,
'python_logger': handler.name
}
self.assertEqual(len(entries), 1)
self.assertEqual(entries[0].payload, JSON_PAYLOAD)
self.assertEqual(entries[0].payload, expected_payload)

def test_log_handler_sync(self):
LOG_MESSAGE = 'It was the best of times.'
Expand All @@ -169,12 +174,12 @@ def test_log_handler_sync(self):
cloud_logger.warn(LOG_MESSAGE)

entries, _ = self._list_entries(logger)
JSON_PAYLOAD = {
expected_payload = {
'message': LOG_MESSAGE,
'python_logger': LOGGER_NAME
}
self.assertEqual(len(entries), 1)
self.assertEqual(entries[0].payload, JSON_PAYLOAD)
self.assertEqual(entries[0].payload, expected_payload)

def test_log_root_handler(self):
LOG_MESSAGE = 'It was the best of times.'
Expand All @@ -188,19 +193,15 @@ def test_log_root_handler(self):
logging.warn(LOG_MESSAGE)

entries, _ = self._list_entries(logger)
JSON_PAYLOAD = {
expected_payload = {
'message': LOG_MESSAGE,
'python_logger': 'root'
}

self.assertEqual(len(entries), 1)
self.assertEqual(entries[0].payload, JSON_PAYLOAD)
self.assertEqual(entries[0].payload, expected_payload)

def test_log_struct_w_metadata(self):
JSON_PAYLOAD = {
'message': 'System test: test_log_struct',
'weather': 'partly cloudy',
}
INSERT_ID = 'INSERTID'
SEVERITY = 'INFO'
METHOD = 'POST'
Expand All @@ -214,12 +215,12 @@ def test_log_struct_w_metadata(self):
logger = Config.CLIENT.logger(self._logger_name())
self.to_delete.append(logger)

logger.log_struct(JSON_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY,
http_request=REQUEST)
logger.log_struct(self.JSON_PAYLOAD, insert_id=INSERT_ID,
severity=SEVERITY, http_request=REQUEST)
entries, _ = self._list_entries(logger)

self.assertEqual(len(entries), 1)
self.assertEqual(entries[0].payload, JSON_PAYLOAD)
self.assertEqual(entries[0].payload, self.JSON_PAYLOAD)
self.assertEqual(entries[0].insert_id, INSERT_ID)
self.assertEqual(entries[0].severity, SEVERITY)
request = entries[0].http_request
Expand Down

0 comments on commit 842c486

Please sign in to comment.