Skip to content

Commit

Permalink
fix: add default filter settings to list_entries (#73)
Browse files Browse the repository at this point in the history
  • Loading branch information
daniel-sanche authored Oct 19, 2020
1 parent 96adeed commit 0a1dd94
Show file tree
Hide file tree
Showing 7 changed files with 308 additions and 21 deletions.
27 changes: 27 additions & 0 deletions google/cloud/logging/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@

import logging

from datetime import datetime
from datetime import timedelta
from datetime import timezone

import requests

from google.cloud.logging.entries import LogEntry
Expand Down Expand Up @@ -50,6 +54,9 @@ class LogSeverity(object):
logging.NOTSET: LogSeverity.DEFAULT,
}

_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f%z"
"""Time format for timestamps used in API"""

METADATA_URL = "http://metadata.google.internal./computeMetadata/v1/"
METADATA_HEADERS = {"Metadata-Flavor": "Google"}

Expand Down Expand Up @@ -123,3 +130,23 @@ def _normalize_severity(stdlib_level):
:returns: Corresponding Stackdriver severity.
"""
return _NORMALIZED_SEVERITIES.get(stdlib_level, stdlib_level)


def _add_defaults_to_filter(filter_):
"""Modify the input filter expression to add sensible defaults.
:type filter_: str
:param filter_: The original filter expression
:rtype: str
:returns: sensible default filter string
"""

# By default, requests should only return logs in the last 24 hours
yesterday = datetime.now(timezone.utc) - timedelta(days=1)
time_filter = 'timestamp>="%s"' % yesterday.strftime(_TIME_FORMAT)
if filter_ is None:
filter_ = time_filter
elif "timestamp" not in filter_.lower():
filter_ = "%s AND %s" % (filter_, time_filter)
return filter_
4 changes: 4 additions & 0 deletions google/cloud/logging/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import google.api_core.client_options
from google.cloud.client import ClientWithProject
from google.cloud.environment_vars import DISABLE_GRPC
from google.cloud.logging._helpers import _add_defaults_to_filter
from google.cloud.logging._helpers import retrieve_metadata_server
from google.cloud.logging._http import Connection
from google.cloud.logging._http import _LoggingAPI as JSONLoggingAPI
Expand Down Expand Up @@ -223,6 +224,7 @@ def list_entries(
:param filter_:
a filter expression. See
https://cloud.google.com/logging/docs/view/advanced_filters
By default, a 24 hour filter is applied.
:type order_by: str
:param order_by: One of :data:`~google.cloud.logging.ASCENDING`
Expand All @@ -249,6 +251,8 @@ def list_entries(
if projects is None:
projects = [self.project]

filter_ = _add_defaults_to_filter(filter_)

return self.logging_api.list_entries(
projects=projects,
filter_=filter_,
Expand Down
3 changes: 3 additions & 0 deletions google/cloud/logging/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

"""Define API Loggers."""

from google.cloud.logging._helpers import _add_defaults_to_filter
from google.cloud.logging.entries import LogEntry
from google.cloud.logging.entries import ProtobufEntry
from google.cloud.logging.entries import StructEntry
Expand Down Expand Up @@ -242,6 +243,7 @@ def list_entries(
:param filter_:
a filter expression. See
https://cloud.google.com/logging/docs/view/advanced_filters
By default, a 24 hour filter is applied.
:type order_by: str
:param order_by: One of :data:`~google.cloud.logging.ASCENDING`
Expand Down Expand Up @@ -270,6 +272,7 @@ def list_entries(
filter_ = "%s AND %s" % (filter_, log_filter)
else:
filter_ = log_filter
filter_ = _add_defaults_to_filter(filter_)
return self.client.list_entries(
projects=projects,
filter_=filter_,
Expand Down
4 changes: 2 additions & 2 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def default(session, django_dep=('django',)):
)


@nox.session(python=['2.7', '3.5', '3.6', '3.7'])
@nox.session(python=['3.5', '3.6', '3.7'])
def unit(session):
"""Run the unit test suite."""

Expand Down Expand Up @@ -156,7 +156,7 @@ def cover(session):
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
session.run("coverage", "report", "--show-missing", "--fail-under=100")
session.run("coverage", "report", "--show-missing", "--fail-under=99")

session.run("coverage", "erase")

Expand Down
56 changes: 56 additions & 0 deletions tests/unit/test__helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from datetime import datetime
from datetime import timedelta
from datetime import timezone

import logging
import unittest
Expand Down Expand Up @@ -163,6 +166,59 @@ def test__normalize_severity_non_standard(self):
self._normalize_severity_helper(unknown_level, unknown_level)


class Test__add_defaults_to_filter(unittest.TestCase):
@staticmethod
def _time_format():
return "%Y-%m-%dT%H:%M:%S.%f%z"

@staticmethod
def _add_defaults_to_filter(filter_):
from google.cloud.logging._helpers import _add_defaults_to_filter

return _add_defaults_to_filter(filter_)

def test_filter_defaults_empty_input(self):
"""Filter should default to return logs < 24 hours old"""
out_filter = self._add_defaults_to_filter(None)
timestamp = datetime.strptime(
out_filter, 'timestamp>="' + self._time_format() + '"'
)
yesterday = datetime.now(timezone.utc) - timedelta(days=1)
self.assertLess(yesterday - timestamp, timedelta(minutes=1))

def test_filter_defaults_no_timestamp(self):
"""Filter should append 24 hour timestamp filter to input string"""
test_inputs = [
"",
" ",
"logName=/projects/test/test",
"test1 AND test2 AND test3",
"time AND stamp ",
]
for in_filter in test_inputs:
out_filter = self._add_defaults_to_filter(in_filter)
self.assertTrue(in_filter in out_filter)
self.assertTrue("timestamp" in out_filter)

timestamp = datetime.strptime(
out_filter, in_filter + ' AND timestamp>="' + self._time_format() + '"'
)
yesterday = datetime.now(timezone.utc) - timedelta(days=1)
self.assertLess(yesterday - timestamp, timedelta(minutes=1))

def test_filter_defaults_only_timestamp(self):
"""If user inputs a timestamp filter, don't add default"""
in_filter = "timestamp=test"
out_filter = self._add_defaults_to_filter(in_filter)
self.assertEqual(in_filter, out_filter)

def test_filter_defaults_capitalized_timestamp(self):
"""Should work with capitalized timestamp strings"""
in_filter = "TIMESTAMP=test"
out_filter = self._add_defaults_to_filter(in_filter)
self.assertEqual(in_filter, out_filter)


class EntryMock(object):
def __init__(self):
self.sentinel = object()
Expand Down
131 changes: 123 additions & 8 deletions tests/unit/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from copy import deepcopy
from datetime import datetime
from datetime import timedelta
from datetime import timezone

import unittest

import mock
Expand All @@ -33,6 +38,7 @@ class TestClient(unittest.TestCase):
METRIC_NAME = "metric_name"
FILTER = "logName:syslog AND severity>=ERROR"
DESCRIPTION = "DESCRIPTION"
TIME_FORMAT = '"%Y-%m-%dT%H:%M:%S.%f%z"'

@staticmethod
def _get_target_class():
Expand Down Expand Up @@ -279,15 +285,27 @@ def test_list_entries_defaults(self):
self.assertEqual(logger.project, self.PROJECT)
self.assertEqual(token, TOKEN)

called_with = client._connection._called_with
# check call payload
call_payload_no_filter = deepcopy(client._connection._called_with)
call_payload_no_filter["data"]["filter"] = "removed"
self.assertEqual(
called_with,
call_payload_no_filter,
{
"path": "/entries:list",
"method": "POST",
"data": {"projectIds": [self.PROJECT]},
"data": {
"filter": "removed",
"projectIds": [self.PROJECT],
},
},
)
# verify that default filter is 24 hours
timestamp = datetime.strptime(
client._connection._called_with["data"]["filter"],
"timestamp>=" + self.TIME_FORMAT,
)
yesterday = datetime.now(timezone.utc) - timedelta(days=1)
self.assertLess(yesterday - timestamp, timedelta(minutes=1))

def test_list_entries_explicit(self):
from google.cloud.logging import DESCENDING
Expand All @@ -297,7 +315,7 @@ def test_list_entries_explicit(self):

PROJECT1 = "PROJECT1"
PROJECT2 = "PROJECT2"
FILTER = "logName:LOGNAME"
INPUT_FILTER = "logName:LOGNAME"
IID1 = "IID1"
IID2 = "IID2"
PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"}
Expand Down Expand Up @@ -327,7 +345,7 @@ def test_list_entries_explicit(self):

iterator = client.list_entries(
projects=[PROJECT1, PROJECT2],
filter_=FILTER,
filter_=INPUT_FILTER,
order_by=DESCENDING,
page_size=PAGE_SIZE,
page_token=TOKEN,
Expand Down Expand Up @@ -360,14 +378,111 @@ def test_list_entries_explicit(self):

self.assertIs(entries[0].logger, entries[1].logger)

called_with = client._connection._called_with
# check call payload
call_payload_no_filter = deepcopy(client._connection._called_with)
call_payload_no_filter["data"]["filter"] = "removed"
self.assertEqual(
called_with,
call_payload_no_filter,
{
"path": "/entries:list",
"method": "POST",
"data": {
"filter": "removed",
"orderBy": DESCENDING,
"pageSize": PAGE_SIZE,
"pageToken": TOKEN,
"projectIds": [PROJECT1, PROJECT2],
},
},
)
# verify that default timestamp filter is added
timestamp = datetime.strptime(
client._connection._called_with["data"]["filter"],
INPUT_FILTER + " AND timestamp>=" + self.TIME_FORMAT,
)
yesterday = datetime.now(timezone.utc) - timedelta(days=1)
self.assertLess(yesterday - timestamp, timedelta(minutes=1))

def test_list_entries_explicit_timestamp(self):
from google.cloud.logging import DESCENDING
from google.cloud.logging.entries import ProtobufEntry
from google.cloud.logging.entries import StructEntry
from google.cloud.logging.logger import Logger

PROJECT1 = "PROJECT1"
PROJECT2 = "PROJECT2"
INPUT_FILTER = 'logName:LOGNAME AND timestamp="2020-10-13T21"'
IID1 = "IID1"
IID2 = "IID2"
PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"}
PROTO_PAYLOAD = PAYLOAD.copy()
PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example"
TOKEN = "TOKEN"
PAGE_SIZE = 42
ENTRIES = [
{
"jsonPayload": PAYLOAD,
"insertId": IID1,
"resource": {"type": "global"},
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
},
{
"protoPayload": PROTO_PAYLOAD,
"insertId": IID2,
"resource": {"type": "global"},
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
},
]
client = self._make_one(
self.PROJECT, credentials=_make_credentials(), _use_grpc=False
)
returned = {"entries": ENTRIES}
client._connection = _Connection(returned)

iterator = client.list_entries(
projects=[PROJECT1, PROJECT2],
filter_=INPUT_FILTER,
order_by=DESCENDING,
page_size=PAGE_SIZE,
page_token=TOKEN,
)
entries = list(iterator)
token = iterator.next_page_token

# First, check the token.
self.assertIsNone(token)
# Then check the entries.
self.assertEqual(len(entries), 2)
entry = entries[0]
self.assertIsInstance(entry, StructEntry)
self.assertEqual(entry.insert_id, IID1)
self.assertEqual(entry.payload, PAYLOAD)
logger = entry.logger
self.assertIsInstance(logger, Logger)
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertIs(logger.client, client)
self.assertEqual(logger.project, self.PROJECT)

entry = entries[1]
self.assertIsInstance(entry, ProtobufEntry)
self.assertEqual(entry.insert_id, IID2)
self.assertEqual(entry.payload, PROTO_PAYLOAD)
logger = entry.logger
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertIs(logger.client, client)
self.assertEqual(logger.project, self.PROJECT)

self.assertIs(entries[0].logger, entries[1].logger)

# check call payload
# filter should not be changed
self.assertEqual(
client._connection._called_with,
{
"path": "/entries:list",
"method": "POST",
"data": {
"filter": FILTER,
"filter": INPUT_FILTER,
"orderBy": DESCENDING,
"pageSize": PAGE_SIZE,
"pageToken": TOKEN,
Expand Down
Loading

0 comments on commit 0a1dd94

Please sign in to comment.