Skip to content

feat: Common Functionality for SnubaEvents #12422

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Mar 18, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions src/sentry/api/endpoints/group_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,10 @@
from sentry.api.bases import GroupEndpoint
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.helpers.environments import get_environments
from sentry.api.serializers.models.event import SnubaEvent
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator, GenericOffsetPaginator
from sentry.api.utils import get_date_range_from_params
from sentry.models import Event, Group
from sentry.models import Event, Group, SnubaEvent
from sentry.search.utils import (
InvalidQuery,
parse_query,
Expand Down Expand Up @@ -102,7 +101,7 @@ def _get_events_snuba(self, request, group, environments, query, tags, start, en
'project_id': [group.project_id],
'issue': [group.id]
},
selected_columns=SnubaEvent.selected_columns + ['tags.key', 'tags.value'],
selected_columns=SnubaEvent.selected_columns,
orderby='-timestamp',
referrer='api.group-events',
)
Expand Down
7 changes: 4 additions & 3 deletions src/sentry/api/endpoints/organization_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@

from sentry.api.bases import OrganizationEventsEndpointBase, OrganizationEventsError, NoProjects
from sentry.api.paginator import GenericOffsetPaginator
from sentry.api.serializers import serialize
from sentry.api.serializers.models.event import SnubaEvent
from sentry.api.serializers import serialize, SimpleEventSerializer
from sentry.api.serializers.snuba import SnubaTSResultSerializer
from sentry.models import SnubaEvent
from sentry.utils.dates import parse_stats_period
from sentry.utils.snuba import raw_query
from sentry.utils.validators import is_event_id
Expand Down Expand Up @@ -63,10 +63,11 @@ def get(self, request, organization):
**snuba_args
)

serializer = SimpleEventSerializer()
return self.paginate(
request=request,
on_results=lambda results: serialize(
[SnubaEvent(row) for row in results], request.user),
[SnubaEvent(row) for row in results], request.user, serializer),
paginator=GenericOffsetPaginator(data_fn=data_fn)
)

Expand Down
2 changes: 1 addition & 1 deletion src/sentry/api/endpoints/project_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def _get_events_legacy(self, request, project):

def _get_events_snuba(self, request, project):
from sentry.api.paginator import GenericOffsetPaginator
from sentry.api.serializers.models.event import SnubaEvent
from sentry.models import SnubaEvent
from sentry.utils.snuba import raw_query

query = request.GET.get('query')
Expand Down
122 changes: 57 additions & 65 deletions src/sentry/api/serializers/models/event.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,14 @@
from semaphore import meta_with_chunks

from sentry.api.serializers import Serializer, register, serialize
from sentry.models import Event, EventError, EventAttachment, Release, UserReport
from sentry.models import (
Event,
EventError,
EventAttachment,
Release,
UserReport,
SnubaEvent
)
from sentry.search.utils import convert_user_tag_to_query
from sentry.utils.safe import get_path

Expand All @@ -28,6 +35,7 @@ def get_crash_files(events):
return rv


@register(SnubaEvent)
@register(Event)
class EventSerializer(Serializer):
_reserved_keys = frozenset(
Expand Down Expand Up @@ -70,7 +78,7 @@ def _get_entries(self, event, user, is_public=False):
)

def _get_interface_with_meta(self, event, name, is_public=False):
interface = event.interfaces.get(name)
interface = event.get_interface(name)
if not interface:
return (None, None)

Expand All @@ -96,11 +104,22 @@ def _get_tags_with_meta(self, event):
'value': kv[1],
'_meta': meta.get(kv[0]) or get_path(meta, six.text_type(i), '1') or None,
}
# TODO this should be using event.tags but there are some weird
# issues around that because event.tags re-sorts the tags and
# this function relies on them being in the original order to
# look up meta.
for i, kv in enumerate(event.data.get('tags') or ())
if kv is not None and kv[0] is not None and kv[1] is not None],
key=lambda x: x['key']
)

# Add 'query' for each tag to tell the UI what to use as query
# params for this tag.
for tag in tags:
query = convert_user_tag_to_query(tag['key'], tag['value'])
if query:
tag['query'] = query

tags_meta = {
six.text_type(i): {'value': e.pop('_meta')}
for i, e in enumerate(tags) if e.get('_meta')
Expand Down Expand Up @@ -221,6 +240,7 @@ def serialize(self, obj, attrs, user):
'id': six.text_type(obj.id),
'groupID': six.text_type(obj.group_id),
'eventID': six.text_type(obj.event_id),
'projectID': six.text_type(obj.project_id),
'size': obj.size,
'entries': attrs['entries'],
'dist': obj.dist,
Expand Down Expand Up @@ -287,78 +307,50 @@ def serialize(self, obj, attrs, user):
return result


class SnubaEvent(object):
"""
A simple wrapper class on a row (dict) returned from snuba representing
an event. Provides a class name to register a serializer against, and
Makes keys accessible as attributes.
"""

# The list of columns that we should request from snuba to be able to fill
# out a proper event object.
selected_columns = [
'event_id',
'project_id',
'message',
'title',
'location',
'culprit',
'user_id',
'username',
'ip_address',
'email',
'timestamp',
]

def __init__(self, kv):
assert len(set(self.selected_columns) - set(kv.keys())
) == 0, "SnubaEvents need all of the selected_columns"
self.__dict__ = kv


@register(SnubaEvent)
class SnubaEventSerializer(Serializer):
class SimpleEventSerializer(EventSerializer):
"""
A bare-bones version of EventSerializer which uses snuba event rows as
the source data but attempts to produce a compatible (subset) of the
serialization returned by EventSerializer.
Simple event serializer that renders a basic outline of an event without
most interfaces/breadcrumbs. This can be used for basic event list queries
where we don't need the full detail. The side effect is that, if the
serialized events are actually SnubaEvents, we can render them without
needing to fetch the event bodies from nodestore.

NB it would be super easy to inadvertently add a property accessor here
that would require a nodestore lookup for a SnubaEvent serialized using
this serializer. You will only really notice you've done this when the
organization event search API gets real slow.
"""

def get_tags_dict(self, obj):
keys = getattr(obj, 'tags.key', None)
values = getattr(obj, 'tags.value', None)
if keys and values and len(keys) == len(values):
results = []
for key, value in zip(keys, values):
key = key.split('sentry:', 1)[-1]
result = {'key': key, 'value': value}
query = convert_user_tag_to_query(key, value)
if query:
result['query'] = query
results.append(result)
results.sort(key=lambda x: x['key'])
return results
return []
def get_attrs(self, item_list, user):
return {}

def serialize(self, obj, attrs, user):
result = {
tags = [{
'key': key.split('sentry:', 1)[-1],
'value': value,
} for key, value in obj.tags]
for tag in tags:
query = convert_user_tag_to_query(tag['key'], tag['value'])
if query:
tag['query'] = query

user = obj.get_interface('user')
if user is not None:
user = user.get_api_context()

return {
'id': six.text_type(obj.id),
'groupID': six.text_type(obj.group_id),
'eventID': six.text_type(obj.event_id),
'projectID': six.text_type(obj.project_id),
# XXX for 'message' this doesn't do the proper resolution of logentry
# etc. that _get_legacy_message_with_meta does.
'message': obj.message,
'title': obj.title,
'location': obj.location,
'culprit': obj.culprit,
'dateCreated': obj.timestamp,
'user': {
'id': obj.user_id,
'email': obj.email,
'username': obj.username,
'ipAddress': obj.ip_address,
},
'user': user,
'tags': tags,
'platform': obj.platform,
'dateCreated': obj.datetime,
}

tags = self.get_tags_dict(obj)
if tags:
result['tags'] = tags

return result
13 changes: 6 additions & 7 deletions src/sentry/db/models/fields/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

from .gzippeddict import GzippedDictField

__all__ = ('NodeField', )
__all__ = ('NodeField', 'NodeData')

logger = logging.getLogger('sentry')

Expand Down Expand Up @@ -101,10 +101,9 @@ def __repr__(self):
return '<%s: id=%s>' % (cls_name, self.id, )

def get_ref(self, instance):
ref_func = self.field.ref_func
if not ref_func:
if not self.field or not self.field.ref_func:
return
return ref_func(instance)
return self.field.ref_func(instance)

def copy(self):
return self.data.copy()
Expand All @@ -127,18 +126,18 @@ def data(self):
return self._node_data

rv = {}
if self.field.wrapper is not None:
if self.field is not None and self.field.wrapper is not None:
rv = self.field.wrapper(rv)
return rv

def bind_data(self, data, ref=None):
self.ref = data.pop('_ref', ref)
self.ref_version = data.pop('_ref_version', None)
if self.ref_version == self.field.ref_version and ref is not None and self.ref != ref:
if self.field is not None and self.ref_version == self.field.ref_version and ref is not None and self.ref != ref:
raise NodeIntegrityFailure(
'Node reference for %s is invalid: %s != %s' % (self.id, ref, self.ref, )
)
if self.field.wrapper is not None:
if self.field is not None and self.field.wrapper is not None:
data = self.field.wrapper(data)
self._node_data = data

Expand Down
6 changes: 6 additions & 0 deletions src/sentry/db/models/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,6 +320,12 @@ def get_queryset(self):
class EventManager(BaseManager):

def bind_nodes(self, object_list, *node_names):
"""
For a list of Event objects, and a property name where we might find an
(unfetched) NodeData on those objects, fetch all the data blobs for
those NodeDatas with a single multi-get command to nodestore, and bind
the returned blobs to the NodeDatas
"""
object_node_list = []
for name in node_names:
object_node_list.extend(
Expand Down
Loading