Skip to content

Commit

Permalink
Merge branch 'main' into web/cleanup/better-context-definitions
Browse files Browse the repository at this point in the history
* main: (30 commits)
  web: maintenance: split tsconfig into “base” and “build” variants. (#9036)
  web: consistent style declarations internally (#9077)
  providers/oauth2: fix interactive device flow (#9076)
  website/docs: fix transports example (#9074)
  events: fix log_capture (#9075)
  web: bump the sentry group in /web with 2 updates (#9065)
  core: bump goauthentik.io/api/v3 from 3.2024022.6 to 3.2024022.7 (#9064)
  web: bump @codemirror/lang-python from 6.1.4 to 6.1.5 in /web (#9068)
  web: bump the eslint group in /web with 1 update (#9066)
  web: bump glob from 10.3.10 to 10.3.12 in /web (#9069)
  web: bump the rollup group in /web with 3 updates (#9067)
  web: bump the eslint group in /tests/wdio with 1 update (#9071)
  core: bump webauthn from 2.0.0 to 2.1.0 (#9070)
  core: bump sentry-sdk from 1.43.0 to 1.44.0 (#9073)
  core: bump requests-mock from 1.12.0 to 1.12.1 (#9072)
  web: bump API Client version (#9061)
  events: rework log messages returned from API and their rendering (#8770)
  website/docs: update airgapped config (#9049)
  website: bump @types/react from 18.2.72 to 18.2.73 in /website (#9052)
  web: bump the rollup group in /web with 3 updates (#9053)
  ...
  • Loading branch information
kensternberg-authentik committed Mar 29, 2024
2 parents 39ef3c7 + 51a8670 commit 25b5a6b
Show file tree
Hide file tree
Showing 53 changed files with 755 additions and 449 deletions.
29 changes: 13 additions & 16 deletions authentik/blueprints/v1/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@
from rest_framework.exceptions import ValidationError
from rest_framework.serializers import BaseSerializer, Serializer
from structlog.stdlib import BoundLogger, get_logger
from structlog.testing import capture_logs
from structlog.types import EventDict
from yaml import load

from authentik.blueprints.v1.common import (
Expand All @@ -42,6 +40,7 @@
from authentik.enterprise.license import LicenseKey
from authentik.enterprise.models import LicenseUsage
from authentik.enterprise.providers.rac.models import ConnectionToken
from authentik.events.logs import LogEvent, capture_logs
from authentik.events.models import SystemTask
from authentik.events.utils import cleanse_dict
from authentik.flows.models import FlowToken, Stage
Expand Down Expand Up @@ -161,7 +160,7 @@ def __update_pks_for_attrs(self, attrs: dict[str, Any]) -> dict[str, Any]:

def updater(value) -> Any:
if value in self.__pk_map:
self.logger.debug("updating reference in entry", value=value)
self.logger.debug("Updating reference in entry", value=value)
return self.__pk_map[value]
return value

Expand Down Expand Up @@ -250,7 +249,7 @@ def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None:
model_instance = existing_models.first()
if not isinstance(model(), BaseMetaModel) and model_instance:
self.logger.debug(
"initialise serializer with instance",
"Initialise serializer with instance",
model=model,
instance=model_instance,
pk=model_instance.pk,
Expand All @@ -260,14 +259,14 @@ def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None:
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
raise EntryInvalidError.from_entry(
(
f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} "
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED} "
"and object exists already",
),
entry,
)
else:
self.logger.debug(
"initialised new serializer instance",
"Initialised new serializer instance",
model=model,
**cleanse_dict(updated_identifiers),
)
Expand Down Expand Up @@ -324,7 +323,7 @@ def _apply_models(self, raise_errors=False) -> bool:
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
except LookupError:
self.logger.warning(
"app or model does not exist", app=model_app_label, model=model_name
"App or Model does not exist", app=model_app_label, model=model_name
)
return False
# Validate each single entry
Expand All @@ -336,7 +335,7 @@ def _apply_models(self, raise_errors=False) -> bool:
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
serializer = exc.serializer
else:
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
self.logger.warning(f"Entry invalid: {exc}", entry=entry, error=exc)
if raise_errors:
raise exc
return False
Expand All @@ -356,27 +355,27 @@ def _apply_models(self, raise_errors=False) -> bool:
and state == BlueprintEntryDesiredState.CREATED
):
self.logger.debug(
"instance exists, skipping",
"Instance exists, skipping",
model=model,
instance=instance,
pk=instance.pk,
)
else:
instance = serializer.save()
self.logger.debug("updated model", model=instance)
self.logger.debug("Updated model", model=instance)
if "pk" in entry.identifiers:
self.__pk_map[entry.identifiers["pk"]] = instance.pk
entry._state = BlueprintEntryState(instance)
elif state == BlueprintEntryDesiredState.ABSENT:
instance: Model | None = serializer.instance
if instance.pk:
instance.delete()
self.logger.debug("deleted model", mode=instance)
self.logger.debug("Deleted model", mode=instance)
continue
self.logger.debug("entry to delete with no instance, skipping")
self.logger.debug("Entry to delete with no instance, skipping")
return True

def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]:
def validate(self, raise_validation_errors=False) -> tuple[bool, list[LogEvent]]:
"""Validate loaded blueprint export, ensure all models are allowed
and serializers have no errors"""
self.logger.debug("Starting blueprint import validation")
Expand All @@ -390,9 +389,7 @@ def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]
):
successful = self._apply_models(raise_errors=raise_validation_errors)
if not successful:
self.logger.debug("Blueprint validation failed")
for log in logs:
getattr(self.logger, log.get("log_level"))(**log)
self.logger.warning("Blueprint validation failed")
self.logger.debug("Finished blueprint import validation")
self._import = orig_import
return successful, logs
16 changes: 9 additions & 7 deletions authentik/blueprints/v1/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
from authentik.blueprints.v1.oci import OCI_PREFIX
from authentik.events.logs import capture_logs
from authentik.events.models import TaskStatus
from authentik.events.system_tasks import SystemTask, prefill_task
from authentik.events.utils import sanitize_dict
Expand Down Expand Up @@ -211,14 +212,15 @@ def apply_blueprint(self: SystemTask, instance_pk: str):
if not valid:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskStatus.ERROR, *[x["event"] for x in logs])
return
applied = importer.apply()
if not applied:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskStatus.ERROR, "Failed to apply")
self.set_status(TaskStatus.ERROR, *logs)
return
with capture_logs() as logs:
applied = importer.apply()
if not applied:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskStatus.ERROR, *logs)
return
instance.status = BlueprintInstanceStatus.SUCCESSFUL
instance.last_applied_hash = file_hash
instance.last_applied = now()
Expand Down
7 changes: 3 additions & 4 deletions authentik/core/api/applications.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,14 @@
from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from structlog.testing import capture_logs

from authentik.admin.api.metrics import CoordinateSerializer
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.providers import ProviderSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.models import Application, User
from authentik.events.logs import LogEventSerializer, capture_logs
from authentik.events.models import EventAction
from authentik.events.utils import sanitize_dict
from authentik.lib.utils.file import (
FilePathSerializer,
FileUploadSerializer,
Expand Down Expand Up @@ -182,9 +181,9 @@ def check_access(self, request: Request, slug: str) -> Response:
if request.user.is_superuser:
log_messages = []
for log in logs:
if log.get("process", "") == "PolicyProcess":
if log.attributes.get("process", "") == "PolicyProcess":
continue
log_messages.append(sanitize_dict(log))
log_messages.append(LogEventSerializer(log).data)
result.log_messages = log_messages
response = PolicyTestResultSerializer(result)
return Response(response.data)
Expand Down
4 changes: 2 additions & 2 deletions authentik/events/api/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
ChoiceField,
DateTimeField,
FloatField,
ListField,
SerializerMethodField,
)
from rest_framework.request import Request
Expand All @@ -21,6 +20,7 @@
from rest_framework.viewsets import ReadOnlyModelViewSet
from structlog.stdlib import get_logger

from authentik.events.logs import LogEventSerializer
from authentik.events.models import SystemTask, TaskStatus
from authentik.rbac.decorators import permission_required

Expand All @@ -39,7 +39,7 @@ class SystemTaskSerializer(ModelSerializer):
duration = FloatField(read_only=True)

status = ChoiceField(choices=[(x.value, x.name) for x in TaskStatus])
messages = ListField(child=CharField())
messages = LogEventSerializer(many=True)

def get_full_name(self, instance: SystemTask) -> str:
"""Get full name with UID"""
Expand Down
82 changes: 82 additions & 0 deletions authentik/events/logs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
from collections.abc import Generator
from contextlib import contextmanager
from dataclasses import dataclass, field
from datetime import datetime
from typing import Any

from django.utils.timezone import now
from rest_framework.fields import CharField, ChoiceField, DateTimeField, DictField
from structlog import configure, get_config
from structlog.stdlib import NAME_TO_LEVEL, ProcessorFormatter
from structlog.testing import LogCapture
from structlog.types import EventDict

from authentik.core.api.utils import PassiveSerializer
from authentik.events.utils import sanitize_dict


@dataclass()
class LogEvent:

event: str
log_level: str
logger: str
timestamp: datetime = field(default_factory=now)
attributes: dict[str, Any] = field(default_factory=dict)

@staticmethod
def from_event_dict(item: EventDict) -> "LogEvent":
event = item.pop("event")
log_level = item.pop("level").lower()
timestamp = datetime.fromisoformat(item.pop("timestamp"))
item.pop("pid", None)
# Sometimes log entries have both `level` and `log_level` set, but `level` is always set
item.pop("log_level", None)
return LogEvent(
event, log_level, item.pop("logger"), timestamp, attributes=sanitize_dict(item)
)


class LogEventSerializer(PassiveSerializer):
"""Single log message with all context logged."""

timestamp = DateTimeField()
log_level = ChoiceField(choices=tuple((x, x) for x in NAME_TO_LEVEL.keys()))
logger = CharField()
event = CharField()
attributes = DictField()

# TODO(2024.6?): This is a migration helper to return a correct API response for logs that
# have been saved in an older format (mostly just list[str] with just the messages)
def to_representation(self, instance):
if isinstance(instance, str):
instance = LogEvent(instance, "", "")
elif isinstance(instance, list):
instance = [LogEvent(x, "", "") for x in instance]
return super().to_representation(instance)


@contextmanager
def capture_logs(log_default_output=True) -> Generator[list[LogEvent], None, None]:
"""Capture log entries created"""
logs = []
cap = LogCapture()
# Modify `_Configuration.default_processors` set via `configure` but always
# keep the list instance intact to not break references held by bound
# loggers.
processors: list = get_config()["processors"]
old_processors = processors.copy()
try:
# clear processors list and use LogCapture for testing
if ProcessorFormatter.wrap_for_formatter in processors:
processors.remove(ProcessorFormatter.wrap_for_formatter)
processors.append(cap)
configure(processors=processors)
yield logs
for raw_log in cap.entries:
logs.append(LogEvent.from_event_dict(raw_log))
finally:
# remove LogCapture and restore original processors
processors.clear()
processors.extend(old_processors)
configure(processors=processors)
17 changes: 11 additions & 6 deletions authentik/events/system_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from structlog.stdlib import get_logger
from tenant_schemas_celery.task import TenantTask

from authentik.events.logs import LogEvent
from authentik.events.models import Event, EventAction, TaskStatus
from authentik.events.models import SystemTask as DBSystemTask
from authentik.events.utils import sanitize_item
Expand All @@ -24,7 +25,7 @@ class SystemTask(TenantTask):
save_on_success: bool

_status: TaskStatus
_messages: list[str]
_messages: list[LogEvent]

_uid: str | None
# Precise start time from perf_counter
Expand All @@ -44,15 +45,20 @@ def set_uid(self, uid: str):
"""Set UID, so in the case of an unexpected error its saved correctly"""
self._uid = uid

def set_status(self, status: TaskStatus, *messages: str):
def set_status(self, status: TaskStatus, *messages: LogEvent):
"""Set result for current run, will overwrite previous result."""
self._status = status
self._messages = messages
self._messages = list(messages)
for idx, msg in enumerate(self._messages):
if not isinstance(msg, LogEvent):
self._messages[idx] = LogEvent(msg, logger=self.__name__, log_level="info")

def set_error(self, exception: Exception):
"""Set result to error and save exception"""
self._status = TaskStatus.ERROR
self._messages = [exception_to_string(exception)]
self._messages = [
LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error")
]

def before_start(self, task_id, args, kwargs):
self._start_precise = perf_counter()
Expand Down Expand Up @@ -98,8 +104,7 @@ def after_return(self, status, retval, task_id, args: list[Any], kwargs: dict[st
def on_failure(self, exc, task_id, args, kwargs, einfo):
super().on_failure(exc, task_id, args, kwargs, einfo=einfo)
if not self._status:
self._status = TaskStatus.ERROR
self._messages = exception_to_string(exc)
self.set_error(exc)
DBSystemTask.objects.update_or_create(
name=self.__name__,
uid=self._uid,
Expand Down
8 changes: 4 additions & 4 deletions authentik/flows/api/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework.decorators import action
from rest_framework.fields import BooleanField, CharField, DictField, ListField, ReadOnlyField
from rest_framework.fields import BooleanField, CharField, ReadOnlyField
from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request
from rest_framework.response import Response
Expand All @@ -19,7 +19,7 @@
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, Importer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import CacheSerializer, LinkSerializer, PassiveSerializer
from authentik.events.utils import sanitize_dict
from authentik.events.logs import LogEventSerializer
from authentik.flows.api.flows_diagram import FlowDiagram, FlowDiagramSerializer
from authentik.flows.exceptions import FlowNonApplicableException
from authentik.flows.models import Flow
Expand Down Expand Up @@ -107,7 +107,7 @@ class Meta:
class FlowImportResultSerializer(PassiveSerializer):
"""Logs of an attempted flow import"""

logs = ListField(child=DictField(), read_only=True)
logs = LogEventSerializer(many=True, read_only=True)
success = BooleanField(read_only=True)


Expand Down Expand Up @@ -184,7 +184,7 @@ def import_flow(self, request: Request) -> Response:

importer = Importer.from_string(file.read().decode())
valid, logs = importer.validate()
import_response.initial_data["logs"] = [sanitize_dict(log) for log in logs]
import_response.initial_data["logs"] = [LogEventSerializer(log).data for log in logs]
import_response.initial_data["success"] = valid
import_response.is_valid()
if not valid:
Expand Down
4 changes: 2 additions & 2 deletions authentik/flows/planner.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,11 @@ class FlowPlan:
markers: list[StageMarker] = field(default_factory=list)

def append_stage(self, stage: Stage, marker: StageMarker | None = None):
"""Append `stage` to all stages, optionally with stage marker"""
"""Append `stage` to the end of the plan, optionally with stage marker"""
return self.append(FlowStageBinding(stage=stage), marker)

def append(self, binding: FlowStageBinding, marker: StageMarker | None = None):
"""Append `stage` to all stages, optionally with stage marker"""
"""Append `stage` to the end of the plan, optionally with stage marker"""
self.bindings.append(binding)
self.markers.append(marker or StageMarker())

Expand Down
2 changes: 1 addition & 1 deletion authentik/flows/views/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -450,7 +450,7 @@ def stage_invalid(self, error_message: str | None = None) -> HttpResponse:
return to_stage_response(self.request, challenge_view.get(self.request))

def cancel(self):
"""Cancel current execution and return a redirect"""
"""Cancel current flow execution"""
keys_to_delete = [
SESSION_KEY_APPLICATION_PRE,
SESSION_KEY_PLAN,
Expand Down
Loading

0 comments on commit 25b5a6b

Please sign in to comment.