Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ref: Sample to_python calls 2 #12375

Merged
merged 11 commits into from
Mar 18, 2019
62 changes: 62 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,19 @@ matrix:
before_script:
- psql -c 'create database sentry;' -U postgres

# XXX(markus): Remove after rust interfaces are done
- python: 2.7
env: TEST_SUITE=postgres DB=postgres SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1
services:
- memcached
- redis-server
- postgresql
install:
- python setup.py install_egg_info
- pip install -e ".[dev,tests,optional]"
before_script:
- psql -c 'create database sentry;' -U postgres

- python: 2.7
env: TEST_SUITE=mysql DB=mysql
services:
Expand Down Expand Up @@ -128,6 +141,31 @@ matrix:
before_script:
- psql -c 'create database sentry;' -U postgres

# XXX(markus): Remove after rust interfaces are done
- python: 2.7
env: TEST_SUITE=acceptance SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1
services:
- memcached
- redis-server
- postgresql
before_install:
- find "$NODE_DIR" -type d -empty -delete
- nvm install
- npm install -g "yarn@${YARN_VERSION}"
- docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
- docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- docker ps -a
install:
- yarn install --pure-lockfile
- python setup.py install_egg_info
- pip install -e ".[dev,tests,optional]"
- wget -N "https://chromedriver.storage.googleapis.com/2.45/chromedriver_linux64.zip" -P ~/
- unzip ~/chromedriver_linux64.zip -d ~/
- rm ~/chromedriver_linux64.zip
- sudo install -m755 ~/chromedriver /usr/local/bin/
before_script:
- psql -c 'create database sentry;' -U postgres

- python: 2.7
env: TEST_SUITE=js
before_install:
Expand Down Expand Up @@ -202,6 +240,27 @@ matrix:
before_script:
- psql -c 'create database sentry;' -U postgres

# XXX(markus): Remove after rust interfaces are done
- python: 2.7
env: TEST_SUITE=snuba SENTRY_TAGSTORE=sentry.tagstore.snuba.SnubaTagStorage SENTRY_ZOOKEEPER_HOSTS=localhost:2181 SENTRY_KAFKA_HOSTS=localhost:9092 SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1
services:
- docker
- memcached
- redis-server
- postgresql
before_install:
- docker run -d --network host --name zookeeper -e ZOOKEEPER_CLIENT_PORT=2181 confluentinc/cp-zookeeper:4.1.0
- docker run -d --network host --name kafka -e KAFKA_ZOOKEEPER_CONNECT=localhost:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 confluentinc/cp-kafka:4.1.0
- docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
- docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- docker ps -a
install:
- python setup.py install_egg_info
- pip install -e ".[dev,tests,optional]"
- pip install confluent-kafka
before_script:
- psql -c 'create database sentry;' -U postgres

# Deploy 'storybook' (component & style guide) - allowed to fail
- language: node_js
env: STORYBOOK_BUILD=1
Expand All @@ -227,6 +286,9 @@ matrix:
- language: node_js
env: STORYBOOK_BUILD=1

# XXX(markus): Remove after rust interfaces are done
- env: TEST_SUITE=postgres DB=postgres SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1

notifications:
webhooks:
urls:
Expand Down
2 changes: 1 addition & 1 deletion requirements-base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ redis>=2.10.3,<2.10.6
requests-oauthlib==0.3.3
requests[security]>=2.20.0,<2.21.0
selenium==3.11.0
semaphore>=0.4.18,<0.5.0
semaphore>=0.4.19,<0.5.0
sentry-sdk>=0.7.0
setproctitle>=1.1.7,<1.2.0
simplejson>=3.2.0,<3.9.0
Expand Down
1 change: 1 addition & 0 deletions src/sentry/db/models/fields/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ class NodeData(collections.MutableMapping):
data=None means, this is a node that needs to be fetched from nodestore.
data={...} means, this is an object that should be saved to nodestore.
"""

def __init__(self, field, id, data=None):
self.field = field
self.id = id
Expand Down
5 changes: 3 additions & 2 deletions src/sentry/event_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ def __init__(
auth=None,
key=None,
content_encoding=None,
for_store=True,
is_renormalize=False,
):
self._data = _decode_event(data, content_encoding=content_encoding)
self.version = version
Expand All @@ -352,7 +352,7 @@ def __init__(
self._user_agent = user_agent
self._auth = auth
self._key = key
self._for_store = for_store
self._is_renormalize = is_renormalize
self._normalized = False

def process_csp_report(self):
Expand Down Expand Up @@ -442,6 +442,7 @@ def _normalize_impl(self):
max_secs_in_future=MAX_SECS_IN_FUTURE,
max_secs_in_past=MAX_SECS_IN_PAST,
enable_trimming=True,
is_renormalize=self._is_renormalize
)

self._data = CanonicalKeyDict(
Expand Down
74 changes: 16 additions & 58 deletions src/sentry/interfaces/base.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
from __future__ import absolute_import

from collections import Mapping, OrderedDict
import os

from collections import OrderedDict
import logging
import six

from django.conf import settings
from django.utils.translation import ugettext as _

from sentry.models.eventerror import EventError
from sentry.utils.canonical import get_canonical_name
from sentry.utils.html import escape
from sentry.utils.imports import import_string
Expand All @@ -18,6 +19,14 @@
logger = logging.getLogger("sentry.events")
interface_logger = logging.getLogger("sentry.interfaces")

# This flag is only effectively used for the testsuite. In production the
# return value of `sentry.models.event._should_skip_to_python` is explicitly
# passed to interfaces.
RUST_RENORMALIZED_DEFAULT = os.environ.get(
"SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION",
"false"
).lower() in ("true", "1")


def get_interface(name):
try:
Expand All @@ -34,7 +43,7 @@ def get_interface(name):
return interface


def get_interfaces(data):
def get_interfaces(data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
result = []
for key, data in six.iteritems(data):
# Skip invalid interfaces that were nulled out during normalization
Expand All @@ -46,7 +55,9 @@ def get_interfaces(data):
except ValueError:
continue

value = safe_execute(cls.to_python, data, _with_transaction=False)
value = safe_execute(cls.to_python, data,
rust_renormalized=rust_renormalized,
_with_transaction=False)
if not value:
continue

Expand Down Expand Up @@ -128,7 +139,7 @@ def __setattr__(self, name, value):
self._data[name] = value

@classmethod
def to_python(cls, data):
def to_python(cls, data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
"""Creates a python interface object from the given raw data.

This function can assume fully normalized and valid data. It can create
Expand All @@ -137,59 +148,6 @@ def to_python(cls, data):
"""
return cls(**data) if data is not None else None

@classmethod
def _normalize(cls, data, meta):
"""Custom interface normalization. ``data`` is guaranteed to be a
non-empty mapping. Return ``None`` for invalid data.
"""
return cls.to_python(data).to_json()

@classmethod
def normalize(cls, data, meta):
"""Normalizes the given raw data removing or replacing all invalid
attributes. If the interface is unprocessable, ``None`` is returned
instead.

Errors are written to the ``meta`` container. Use ``Meta.enter(key)`` to
obtain an instance.

TEMPORARY: The transitional default behavior is to call to_python and
catch exceptions into meta data. To migrate, override ``_normalize``.
"""

# Gracefully skip empty data. We treat ``None`` and empty objects the
# same as missing data. If there are meta errors attached already, they
# will remain in meta.
if not data:
return None

# Interface data is required to be a JSON object. Places where the
# protocol permits lists must be casted to a values wrapper first.
if not isinstance(data, Mapping):
meta.add_error(EventError.INVALID_DATA, data, {
'reason': 'expected %s' % (cls.__name__,),
})
return None

try:
data = cls._normalize(data, meta=meta)
except Exception as e:
# XXX: InterfaceValidationErrors can be thrown in the transitional
# phase while to_python is being used for normalization. All other
# exceptions indicate a programming error and need to be reported.
if not isinstance(e, InterfaceValidationError):
interface_logger.error('Discarded invalid value for interface: %s (%r)',
cls.path, data, exc_info=True)

meta.add_error(EventError.INVALID_DATA, data, {
'reason': six.text_type(e)
})
return None

# As with input data, empty interface data is coerced to None after
# normalization.
return data or None

def get_api_context(self, is_public=False):
return self.to_json()

Expand Down
31 changes: 26 additions & 5 deletions src/sentry/interfaces/breadcrumbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import six

from sentry.constants import LOG_LEVELS_MAP
from sentry.interfaces.base import Interface, InterfaceValidationError, prune_empty_keys
from sentry.interfaces.base import Interface, InterfaceValidationError, prune_empty_keys, RUST_RENORMALIZED_DEFAULT
from sentry.utils.safe import get_path, trim
from sentry.utils.dates import to_timestamp, to_datetime, parse_timestamp

Expand All @@ -37,18 +37,19 @@ class Breadcrumbs(Interface):
score = 800

@classmethod
def to_python(cls, data):
def to_python(cls, data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
values = []
for index, crumb in enumerate(get_path(data, 'values', filter=True, default=())):
# TODO(ja): Handle already invalid and None breadcrumbs

try:
values.append(cls.normalize_crumb(crumb))
values.append(cls.normalize_crumb(crumb, rust_renormalized=rust_renormalized))
except Exception:
# TODO(dcramer): we dont want to discard the entirety of data
# when one breadcrumb errors, but it'd be nice if we could still
# record an error
pass
if rust_renormalized:
raise

return cls(values=values)

Expand All @@ -68,7 +69,27 @@ def to_json(self):
})

@classmethod
def normalize_crumb(cls, crumb):
def normalize_crumb(cls, crumb, rust_renormalized):
if rust_renormalized:
crumb = dict(crumb)
ts = parse_timestamp(crumb.get('timestamp'))
if ts:
crumb['timestamp'] = to_timestamp(ts)
else:
crumb['timestamp'] = None

for key in (
'type',
'level',
'message',
'category',
'event_id',
'data',
):
crumb.setdefault(key, None)

return crumb

ty = crumb.get('type') or 'default'
level = crumb.get('level')
if not isinstance(level, six.string_types) or \
Expand Down
6 changes: 4 additions & 2 deletions src/sentry/interfaces/contexts.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from django.utils.encoding import force_text

from sentry.interfaces.base import Interface, prune_empty_keys
from sentry.interfaces.base import Interface, prune_empty_keys, RUST_RENORMALIZED_DEFAULT
from sentry.utils.contexts_normalization import normalize_os, normalize_runtime
from sentry.utils.safe import get_path, trim

Expand Down Expand Up @@ -176,9 +176,11 @@ class Contexts(Interface):
score = 800

@classmethod
def to_python(cls, data):
def to_python(cls, data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
rv = {}
for alias, value in six.iteritems(data):
# XXX(markus): The `None`-case should be handled in the UI and
# other consumers of this interface
if value is not None:
rv[alias] = cls.normalize_context(alias, value)
return cls(**rv)
Expand Down
24 changes: 14 additions & 10 deletions src/sentry/interfaces/debug_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

__all__ = ('DebugMeta', )

from sentry.interfaces.base import Interface, InterfaceValidationError, prune_empty_keys
from sentry.interfaces.base import Interface, InterfaceValidationError, prune_empty_keys, RUST_RENORMALIZED_DEFAULT

from symbolic import parse_addr, normalize_debug_id

Expand Down Expand Up @@ -105,16 +105,20 @@ class DebugMeta(Interface):
external_type = 'debugmeta'

@classmethod
def to_python(cls, data):
def to_python(cls, data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
is_debug_build = data.get('is_debug_build', None)
if is_debug_build is not None and not isinstance(is_debug_build, bool):
raise InterfaceValidationError('Invalid value for "is_debug_build"')

images = []
for x in data.get('images', None) or ():
if x is None:
continue
images.append(cls.normalize_image(x))

if rust_renormalized:
images = data.get('images', None) or []
else:
if is_debug_build is not None and not isinstance(is_debug_build, bool):
raise InterfaceValidationError('Invalid value for "is_debug_build"')

images = []
for x in data.get('images', None) or ():
if x is None:
continue
images.append(cls.normalize_image(x))

return cls(
images=images,
Expand Down
Loading