events: rework log messages returned from API and their rendering (#8770)
* events: initial log rework Signed-off-by: Jens Langhammer <jens@goauthentik.io> * add migration code Signed-off-by: Jens Langhammer <jens@goauthentik.io> --------- Signed-off-by: Jens Langhammer <jens@goauthentik.io>
This commit is contained in:
@ -19,8 +19,6 @@ from guardian.models import UserObjectPermission
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.serializers import BaseSerializer, Serializer
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
from structlog.testing import capture_logs
|
||||
from structlog.types import EventDict
|
||||
from yaml import load
|
||||
|
||||
from authentik.blueprints.v1.common import (
|
||||
@ -42,6 +40,7 @@ from authentik.core.models import (
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.enterprise.models import LicenseUsage
|
||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||
from authentik.events.logs import LogEvent, capture_logs
|
||||
from authentik.events.models import SystemTask
|
||||
from authentik.events.utils import cleanse_dict
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
@ -161,7 +160,7 @@ class Importer:
|
||||
|
||||
def updater(value) -> Any:
|
||||
if value in self.__pk_map:
|
||||
self.logger.debug("updating reference in entry", value=value)
|
||||
self.logger.debug("Updating reference in entry", value=value)
|
||||
return self.__pk_map[value]
|
||||
return value
|
||||
|
||||
@ -250,7 +249,7 @@ class Importer:
|
||||
model_instance = existing_models.first()
|
||||
if not isinstance(model(), BaseMetaModel) and model_instance:
|
||||
self.logger.debug(
|
||||
"initialise serializer with instance",
|
||||
"Initialise serializer with instance",
|
||||
model=model,
|
||||
instance=model_instance,
|
||||
pk=model_instance.pk,
|
||||
@ -260,14 +259,14 @@ class Importer:
|
||||
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
|
||||
raise EntryInvalidError.from_entry(
|
||||
(
|
||||
f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
||||
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
||||
"and object exists already",
|
||||
),
|
||||
entry,
|
||||
)
|
||||
else:
|
||||
self.logger.debug(
|
||||
"initialised new serializer instance",
|
||||
"Initialised new serializer instance",
|
||||
model=model,
|
||||
**cleanse_dict(updated_identifiers),
|
||||
)
|
||||
@ -324,7 +323,7 @@ class Importer:
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
except LookupError:
|
||||
self.logger.warning(
|
||||
"app or model does not exist", app=model_app_label, model=model_name
|
||||
"App or Model does not exist", app=model_app_label, model=model_name
|
||||
)
|
||||
return False
|
||||
# Validate each single entry
|
||||
@ -336,7 +335,7 @@ class Importer:
|
||||
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
|
||||
serializer = exc.serializer
|
||||
else:
|
||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||
self.logger.warning(f"Entry invalid: {exc}", entry=entry, error=exc)
|
||||
if raise_errors:
|
||||
raise exc
|
||||
return False
|
||||
@ -356,14 +355,14 @@ class Importer:
|
||||
and state == BlueprintEntryDesiredState.CREATED
|
||||
):
|
||||
self.logger.debug(
|
||||
"instance exists, skipping",
|
||||
"Instance exists, skipping",
|
||||
model=model,
|
||||
instance=instance,
|
||||
pk=instance.pk,
|
||||
)
|
||||
else:
|
||||
instance = serializer.save()
|
||||
self.logger.debug("updated model", model=instance)
|
||||
self.logger.debug("Updated model", model=instance)
|
||||
if "pk" in entry.identifiers:
|
||||
self.__pk_map[entry.identifiers["pk"]] = instance.pk
|
||||
entry._state = BlueprintEntryState(instance)
|
||||
@ -371,12 +370,12 @@ class Importer:
|
||||
instance: Model | None = serializer.instance
|
||||
if instance.pk:
|
||||
instance.delete()
|
||||
self.logger.debug("deleted model", mode=instance)
|
||||
self.logger.debug("Deleted model", mode=instance)
|
||||
continue
|
||||
self.logger.debug("entry to delete with no instance, skipping")
|
||||
self.logger.debug("Entry to delete with no instance, skipping")
|
||||
return True
|
||||
|
||||
def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]:
|
||||
def validate(self, raise_validation_errors=False) -> tuple[bool, list[LogEvent]]:
|
||||
"""Validate loaded blueprint export, ensure all models are allowed
|
||||
and serializers have no errors"""
|
||||
self.logger.debug("Starting blueprint import validation")
|
||||
@ -390,9 +389,7 @@ class Importer:
|
||||
):
|
||||
successful = self._apply_models(raise_errors=raise_validation_errors)
|
||||
if not successful:
|
||||
self.logger.debug("Blueprint validation failed")
|
||||
for log in logs:
|
||||
getattr(self.logger, log.get("log_level"))(**log)
|
||||
self.logger.warning("Blueprint validation failed")
|
||||
self.logger.debug("Finished blueprint import validation")
|
||||
self._import = orig_import
|
||||
return successful, logs
|
||||
|
@ -30,6 +30,7 @@ from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, E
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.events.logs import capture_logs
|
||||
from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask, prefill_task
|
||||
from authentik.events.utils import sanitize_dict
|
||||
@ -211,14 +212,15 @@ def apply_blueprint(self: SystemTask, instance_pk: str):
|
||||
if not valid:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.save()
|
||||
self.set_status(TaskStatus.ERROR, *[x["event"] for x in logs])
|
||||
return
|
||||
applied = importer.apply()
|
||||
if not applied:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.save()
|
||||
self.set_status(TaskStatus.ERROR, "Failed to apply")
|
||||
self.set_status(TaskStatus.ERROR, *logs)
|
||||
return
|
||||
with capture_logs() as logs:
|
||||
applied = importer.apply()
|
||||
if not applied:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.save()
|
||||
self.set_status(TaskStatus.ERROR, *logs)
|
||||
return
|
||||
instance.status = BlueprintInstanceStatus.SUCCESSFUL
|
||||
instance.last_applied_hash = file_hash
|
||||
instance.last_applied = now()
|
||||
|
@ -20,15 +20,14 @@ from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
from structlog.testing import capture_logs
|
||||
|
||||
from authentik.admin.api.metrics import CoordinateSerializer
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.models import Application, User
|
||||
from authentik.events.logs import LogEventSerializer, capture_logs
|
||||
from authentik.events.models import EventAction
|
||||
from authentik.events.utils import sanitize_dict
|
||||
from authentik.lib.utils.file import (
|
||||
FilePathSerializer,
|
||||
FileUploadSerializer,
|
||||
@ -182,9 +181,9 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
if request.user.is_superuser:
|
||||
log_messages = []
|
||||
for log in logs:
|
||||
if log.get("process", "") == "PolicyProcess":
|
||||
if log.attributes.get("process", "") == "PolicyProcess":
|
||||
continue
|
||||
log_messages.append(sanitize_dict(log))
|
||||
log_messages.append(LogEventSerializer(log).data)
|
||||
result.log_messages = log_messages
|
||||
response = PolicyTestResultSerializer(result)
|
||||
return Response(response.data)
|
||||
|
@ -12,7 +12,6 @@ from rest_framework.fields import (
|
||||
ChoiceField,
|
||||
DateTimeField,
|
||||
FloatField,
|
||||
ListField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.request import Request
|
||||
@ -21,6 +20,7 @@ from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.events.logs import LogEventSerializer
|
||||
from authentik.events.models import SystemTask, TaskStatus
|
||||
from authentik.rbac.decorators import permission_required
|
||||
|
||||
@ -39,7 +39,7 @@ class SystemTaskSerializer(ModelSerializer):
|
||||
duration = FloatField(read_only=True)
|
||||
|
||||
status = ChoiceField(choices=[(x.value, x.name) for x in TaskStatus])
|
||||
messages = ListField(child=CharField())
|
||||
messages = LogEventSerializer(many=True)
|
||||
|
||||
def get_full_name(self, instance: SystemTask) -> str:
|
||||
"""Get full name with UID"""
|
||||
|
81
authentik/events/logs.py
Normal file
81
authentik/events/logs.py
Normal file
@ -0,0 +1,81 @@
|
||||
from collections.abc import Generator
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from django.utils.timezone import now
|
||||
from rest_framework.fields import CharField, ChoiceField, DateTimeField, DictField
|
||||
from structlog import configure, get_config
|
||||
from structlog.stdlib import NAME_TO_LEVEL, ProcessorFormatter
|
||||
from structlog.testing import LogCapture
|
||||
from structlog.types import EventDict
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.utils import sanitize_dict
|
||||
|
||||
|
||||
@dataclass()
|
||||
class LogEvent:
|
||||
|
||||
event: str
|
||||
log_level: str
|
||||
logger: str
|
||||
timestamp: datetime = field(default_factory=now)
|
||||
attributes: dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@staticmethod
|
||||
def from_event_dict(item: EventDict) -> "LogEvent":
|
||||
event = item.pop("event")
|
||||
log_level = item.pop("level").lower()
|
||||
timestamp = datetime.fromisoformat(item.pop("timestamp"))
|
||||
item.pop("pid", None)
|
||||
# Sometimes log entries have both `level` and `log_level` set, but `level` is always set
|
||||
item.pop("log_level", None)
|
||||
return LogEvent(
|
||||
event, log_level, item.pop("logger"), timestamp, attributes=sanitize_dict(item)
|
||||
)
|
||||
|
||||
|
||||
class LogEventSerializer(PassiveSerializer):
|
||||
"""Single log message with all context logged."""
|
||||
|
||||
timestamp = DateTimeField()
|
||||
log_level = ChoiceField(choices=tuple((x, x) for x in NAME_TO_LEVEL.keys()))
|
||||
logger = CharField()
|
||||
event = CharField()
|
||||
attributes = DictField()
|
||||
|
||||
# TODO(2024.6?): This is a migration helper to return a correct API response for logs that
|
||||
# have been saved in an older format (mostly just list[str] with just the messages)
|
||||
def to_representation(self, instance):
|
||||
if isinstance(instance, str):
|
||||
instance = LogEvent(instance, "", "")
|
||||
elif isinstance(instance, list):
|
||||
instance = [LogEvent(x, "", "") for x in instance]
|
||||
return super().to_representation(instance)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def capture_logs(log_default_output=True) -> Generator[list[LogEvent], None, None]:
|
||||
"""Capture log entries created"""
|
||||
logs = []
|
||||
cap = LogCapture()
|
||||
# Modify `_Configuration.default_processors` set via `configure` but always
|
||||
# keep the list instance intact to not break references held by bound
|
||||
# loggers.
|
||||
processors: list = get_config()["processors"]
|
||||
old_processors = processors.copy()
|
||||
try:
|
||||
# clear processors list and use LogCapture for testing
|
||||
processors.remove(ProcessorFormatter.wrap_for_formatter)
|
||||
processors.append(cap)
|
||||
configure(processors=processors)
|
||||
yield logs
|
||||
for raw_log in cap.entries:
|
||||
logs.append(LogEvent.from_event_dict(raw_log))
|
||||
finally:
|
||||
# remove LogCapture and restore original processors
|
||||
processors.clear()
|
||||
processors.extend(old_processors)
|
||||
configure(processors=processors)
|
@ -9,6 +9,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from structlog.stdlib import get_logger
|
||||
from tenant_schemas_celery.task import TenantTask
|
||||
|
||||
from authentik.events.logs import LogEvent
|
||||
from authentik.events.models import Event, EventAction, TaskStatus
|
||||
from authentik.events.models import SystemTask as DBSystemTask
|
||||
from authentik.events.utils import sanitize_item
|
||||
@ -24,7 +25,7 @@ class SystemTask(TenantTask):
|
||||
save_on_success: bool
|
||||
|
||||
_status: TaskStatus
|
||||
_messages: list[str]
|
||||
_messages: list[LogEvent]
|
||||
|
||||
_uid: str | None
|
||||
# Precise start time from perf_counter
|
||||
@ -44,15 +45,20 @@ class SystemTask(TenantTask):
|
||||
"""Set UID, so in the case of an unexpected error its saved correctly"""
|
||||
self._uid = uid
|
||||
|
||||
def set_status(self, status: TaskStatus, *messages: str):
|
||||
def set_status(self, status: TaskStatus, *messages: LogEvent):
|
||||
"""Set result for current run, will overwrite previous result."""
|
||||
self._status = status
|
||||
self._messages = messages
|
||||
self._messages = list(messages)
|
||||
for idx, msg in enumerate(self._messages):
|
||||
if not isinstance(msg, LogEvent):
|
||||
self._messages[idx] = LogEvent(msg, logger=self.__name__, log_level="info")
|
||||
|
||||
def set_error(self, exception: Exception):
|
||||
"""Set result to error and save exception"""
|
||||
self._status = TaskStatus.ERROR
|
||||
self._messages = [exception_to_string(exception)]
|
||||
self._messages = [
|
||||
LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error")
|
||||
]
|
||||
|
||||
def before_start(self, task_id, args, kwargs):
|
||||
self._start_precise = perf_counter()
|
||||
@ -98,8 +104,7 @@ class SystemTask(TenantTask):
|
||||
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
||||
super().on_failure(exc, task_id, args, kwargs, einfo=einfo)
|
||||
if not self._status:
|
||||
self._status = TaskStatus.ERROR
|
||||
self._messages = exception_to_string(exc)
|
||||
self.set_error(exc)
|
||||
DBSystemTask.objects.update_or_create(
|
||||
name=self.__name__,
|
||||
uid=self._uid,
|
||||
|
@ -7,7 +7,7 @@ from django.utils.translation import gettext as _
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import BooleanField, CharField, DictField, ListField, ReadOnlyField
|
||||
from rest_framework.fields import BooleanField, CharField, ReadOnlyField
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -19,7 +19,7 @@ from authentik.blueprints.v1.exporter import FlowExporter
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, Importer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import CacheSerializer, LinkSerializer, PassiveSerializer
|
||||
from authentik.events.utils import sanitize_dict
|
||||
from authentik.events.logs import LogEventSerializer
|
||||
from authentik.flows.api.flows_diagram import FlowDiagram, FlowDiagramSerializer
|
||||
from authentik.flows.exceptions import FlowNonApplicableException
|
||||
from authentik.flows.models import Flow
|
||||
@ -107,7 +107,7 @@ class FlowSetSerializer(FlowSerializer):
|
||||
class FlowImportResultSerializer(PassiveSerializer):
|
||||
"""Logs of an attempted flow import"""
|
||||
|
||||
logs = ListField(child=DictField(), read_only=True)
|
||||
logs = LogEventSerializer(many=True, read_only=True)
|
||||
success = BooleanField(read_only=True)
|
||||
|
||||
|
||||
@ -184,7 +184,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
importer = Importer.from_string(file.read().decode())
|
||||
valid, logs = importer.validate()
|
||||
import_response.initial_data["logs"] = [sanitize_dict(log) for log in logs]
|
||||
import_response.initial_data["logs"] = [LogEventSerializer(log).data for log in logs]
|
||||
import_response.initial_data["success"] = valid
|
||||
import_response.is_valid()
|
||||
if not valid:
|
||||
|
@ -3,9 +3,9 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from structlog.stdlib import get_logger
|
||||
from structlog.testing import capture_logs
|
||||
|
||||
from authentik import __version__, get_build_hash
|
||||
from authentik.events.logs import LogEvent, capture_logs
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.outposts.models import (
|
||||
@ -63,21 +63,21 @@ class BaseController:
|
||||
"""Called by scheduled task to reconcile deployment/service/etc"""
|
||||
raise NotImplementedError
|
||||
|
||||
def up_with_logs(self) -> list[str]:
|
||||
def up_with_logs(self) -> list[LogEvent]:
|
||||
"""Call .up() but capture all log output and return it."""
|
||||
with capture_logs() as logs:
|
||||
self.up()
|
||||
return [x["event"] for x in logs]
|
||||
return logs
|
||||
|
||||
def down(self):
|
||||
"""Handler to delete everything we've created"""
|
||||
raise NotImplementedError
|
||||
|
||||
def down_with_logs(self) -> list[str]:
|
||||
def down_with_logs(self) -> list[LogEvent]:
|
||||
"""Call .down() but capture all log output and return it."""
|
||||
with capture_logs() as logs:
|
||||
self.down()
|
||||
return [x["event"] for x in logs]
|
||||
return logs
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
@ -9,10 +9,10 @@ from kubernetes.client.exceptions import OpenApiException
|
||||
from kubernetes.config.config_exception import ConfigException
|
||||
from kubernetes.config.incluster_config import load_incluster_config
|
||||
from kubernetes.config.kube_config import load_kube_config_from_dict
|
||||
from structlog.testing import capture_logs
|
||||
from urllib3.exceptions import HTTPError
|
||||
from yaml import dump_all
|
||||
|
||||
from authentik.events.logs import LogEvent, capture_logs
|
||||
from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException
|
||||
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
|
||||
from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||
@ -91,7 +91,7 @@ class KubernetesController(BaseController):
|
||||
except (OpenApiException, HTTPError, ServiceConnectionInvalid) as exc:
|
||||
raise ControllerException(str(exc)) from exc
|
||||
|
||||
def up_with_logs(self) -> list[str]:
|
||||
def up_with_logs(self) -> list[LogEvent]:
|
||||
try:
|
||||
all_logs = []
|
||||
for reconcile_key in self.reconcile_order:
|
||||
@ -104,7 +104,9 @@ class KubernetesController(BaseController):
|
||||
continue
|
||||
reconciler = reconciler_cls(self)
|
||||
reconciler.up()
|
||||
all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs]
|
||||
for log in logs:
|
||||
log.logger = reconcile_key.title()
|
||||
all_logs.extend(logs)
|
||||
return all_logs
|
||||
except (OpenApiException, HTTPError, ServiceConnectionInvalid) as exc:
|
||||
raise ControllerException(str(exc)) from exc
|
||||
@ -122,7 +124,7 @@ class KubernetesController(BaseController):
|
||||
except (OpenApiException, HTTPError, ServiceConnectionInvalid) as exc:
|
||||
raise ControllerException(str(exc)) from exc
|
||||
|
||||
def down_with_logs(self) -> list[str]:
|
||||
def down_with_logs(self) -> list[LogEvent]:
|
||||
try:
|
||||
all_logs = []
|
||||
for reconcile_key in self.reconcile_order:
|
||||
@ -135,7 +137,9 @@ class KubernetesController(BaseController):
|
||||
continue
|
||||
reconciler = reconciler_cls(self)
|
||||
reconciler.down()
|
||||
all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs]
|
||||
for log in logs:
|
||||
log.logger = reconcile_key.title()
|
||||
all_logs.extend(logs)
|
||||
return all_logs
|
||||
except (OpenApiException, HTTPError, ServiceConnectionInvalid) as exc:
|
||||
raise ControllerException(str(exc)) from exc
|
||||
|
@ -149,10 +149,8 @@ def outpost_controller(
|
||||
if not controller_type:
|
||||
return
|
||||
with controller_type(outpost, outpost.service_connection) as controller:
|
||||
logs = getattr(controller, f"{action}_with_logs")()
|
||||
LOGGER.debug("---------------Outpost Controller logs starting----------------")
|
||||
for log in logs:
|
||||
LOGGER.debug(log)
|
||||
logs = getattr(controller, f"{action}_with_logs")()
|
||||
LOGGER.debug("-----------------Outpost Controller logs end-------------------")
|
||||
except (ControllerException, ServiceConnectionInvalid) as exc:
|
||||
self.set_error(exc)
|
||||
|
@ -1,10 +1,11 @@
|
||||
"""Serializer for policy execution"""
|
||||
|
||||
from rest_framework.fields import BooleanField, CharField, DictField, ListField
|
||||
from rest_framework.fields import BooleanField, CharField, ListField
|
||||
from rest_framework.relations import PrimaryKeyRelatedField
|
||||
|
||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
||||
from authentik.core.models import User
|
||||
from authentik.events.logs import LogEventSerializer
|
||||
|
||||
|
||||
class PolicyTestSerializer(PassiveSerializer):
|
||||
@ -19,4 +20,4 @@ class PolicyTestResultSerializer(PassiveSerializer):
|
||||
|
||||
passing = BooleanField()
|
||||
messages = ListField(child=CharField(), read_only=True)
|
||||
log_messages = ListField(child=DictField(), read_only=True)
|
||||
log_messages = LogEventSerializer(many=True, read_only=True)
|
||||
|
@ -11,12 +11,11 @@ from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
from structlog.testing import capture_logs
|
||||
|
||||
from authentik.core.api.applications import user_app_cache_key
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import CacheSerializer, MetaNameSerializer, TypeCreateSerializer
|
||||
from authentik.events.utils import sanitize_dict
|
||||
from authentik.events.logs import LogEventSerializer, capture_logs
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
from authentik.policies.api.exec import PolicyTestResultSerializer, PolicyTestSerializer
|
||||
from authentik.policies.models import Policy, PolicyBinding
|
||||
@ -166,9 +165,9 @@ class PolicyViewSet(
|
||||
result = proc.execute()
|
||||
log_messages = []
|
||||
for log in logs:
|
||||
if log.get("process", "") == "PolicyProcess":
|
||||
if log.attributes.get("process", "") == "PolicyProcess":
|
||||
continue
|
||||
log_messages.append(sanitize_dict(log))
|
||||
log_messages.append(LogEventSerializer(log).data)
|
||||
result.log_messages = log_messages
|
||||
response = PolicyTestResultSerializer(result)
|
||||
return Response(response.data)
|
||||
|
@ -13,6 +13,7 @@ from authentik.events.context_processors.base import get_context_processors
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.core.models import User
|
||||
from authentik.events.logs import LogEvent
|
||||
from authentik.policies.models import PolicyBinding
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -74,7 +75,7 @@ class PolicyResult:
|
||||
source_binding: PolicyBinding | None
|
||||
source_results: list[PolicyResult] | None
|
||||
|
||||
log_messages: list[dict] | None
|
||||
log_messages: list[LogEvent] | None
|
||||
|
||||
def __init__(self, passing: bool, *messages: str):
|
||||
self.passing = passing
|
||||
|
50
schema.yml
50
schema.yml
@ -33782,8 +33782,7 @@ components:
|
||||
logs:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
additionalProperties: {}
|
||||
$ref: '#/components/schemas/LogEvent'
|
||||
readOnly: true
|
||||
success:
|
||||
type: boolean
|
||||
@ -35515,6 +35514,48 @@ components:
|
||||
type: string
|
||||
required:
|
||||
- link
|
||||
LogEvent:
|
||||
type: object
|
||||
description: Single log message with all context logged.
|
||||
properties:
|
||||
timestamp:
|
||||
type: string
|
||||
format: date-time
|
||||
log_level:
|
||||
$ref: '#/components/schemas/LogLevelEnum'
|
||||
logger:
|
||||
type: string
|
||||
event:
|
||||
type: string
|
||||
attributes:
|
||||
type: object
|
||||
additionalProperties: {}
|
||||
required:
|
||||
- attributes
|
||||
- event
|
||||
- log_level
|
||||
- logger
|
||||
- timestamp
|
||||
LogLevelEnum:
|
||||
enum:
|
||||
- critical
|
||||
- exception
|
||||
- error
|
||||
- warn
|
||||
- warning
|
||||
- info
|
||||
- debug
|
||||
- notset
|
||||
type: string
|
||||
description: |-
|
||||
* `critical` - critical
|
||||
* `exception` - exception
|
||||
* `error` - error
|
||||
* `warn` - warn
|
||||
* `warning` - warning
|
||||
* `info` - info
|
||||
* `debug` - debug
|
||||
* `notset` - notset
|
||||
LoginChallengeTypes:
|
||||
oneOf:
|
||||
- $ref: '#/components/schemas/RedirectChallenge'
|
||||
@ -41309,8 +41350,7 @@ components:
|
||||
log_messages:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
additionalProperties: {}
|
||||
$ref: '#/components/schemas/LogEvent'
|
||||
readOnly: true
|
||||
required:
|
||||
- log_messages
|
||||
@ -44324,7 +44364,7 @@ components:
|
||||
messages:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
$ref: '#/components/schemas/LogEvent'
|
||||
required:
|
||||
- description
|
||||
- duration
|
||||
|
@ -26,12 +26,6 @@ class TestProxyKubernetes(TestCase):
|
||||
outpost_connection_discovery()
|
||||
self.controller = None
|
||||
|
||||
def tearDown(self) -> None:
|
||||
if self.controller:
|
||||
for log in self.controller.down_with_logs():
|
||||
LOGGER.info(log)
|
||||
return super().tearDown()
|
||||
|
||||
@pytest.mark.timeout(120)
|
||||
def test_kubernetes_controller_static(self):
|
||||
"""Test Kubernetes Controller"""
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { DEFAULT_CONFIG } from "@goauthentik/common/api/config";
|
||||
import "@goauthentik/components/ak-status-label";
|
||||
import "@goauthentik/elements/events/LogViewer";
|
||||
import { Form } from "@goauthentik/elements/forms/Form";
|
||||
import "@goauthentik/elements/forms/HorizontalFormElement";
|
||||
import "@goauthentik/elements/forms/SearchSelect";
|
||||
@ -83,28 +84,7 @@ export class ApplicationCheckAccessForm extends Form<{ forUser: number }> {
|
||||
<div class="pf-c-form__group-label">
|
||||
<div class="c-form__horizontal-group">
|
||||
<dl class="pf-c-description-list pf-m-horizontal">
|
||||
${(this.result?.logMessages || []).length > 0
|
||||
? this.result?.logMessages?.map((m) => {
|
||||
return html`<div class="pf-c-description-list__group">
|
||||
<dt class="pf-c-description-list__term">
|
||||
<span class="pf-c-description-list__text"
|
||||
>${m.log_level}</span
|
||||
>
|
||||
</dt>
|
||||
<dd class="pf-c-description-list__description">
|
||||
<div class="pf-c-description-list__text">
|
||||
${m.event}
|
||||
</div>
|
||||
</dd>
|
||||
</div>`;
|
||||
})
|
||||
: html`<div class="pf-c-description-list__group">
|
||||
<dt class="pf-c-description-list__term">
|
||||
<span class="pf-c-description-list__text"
|
||||
>${msg("No log messages.")}</span
|
||||
>
|
||||
</dt>
|
||||
</div>`}
|
||||
<ak-log-viewer .logs=${this.result?.logMessages}></ak-log-viewer>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { DEFAULT_CONFIG } from "@goauthentik/common/api/config";
|
||||
import { SentryIgnoredError } from "@goauthentik/common/errors";
|
||||
import "@goauthentik/components/ak-status-label";
|
||||
import "@goauthentik/elements/events/LogViewer";
|
||||
import { Form } from "@goauthentik/elements/forms/Form";
|
||||
import "@goauthentik/elements/forms/HorizontalFormElement";
|
||||
|
||||
@ -55,28 +56,7 @@ export class FlowImportForm extends Form<Flow> {
|
||||
<div class="pf-c-form__group-label">
|
||||
<div class="c-form__horizontal-group">
|
||||
<dl class="pf-c-description-list pf-m-horizontal">
|
||||
${(this.result?.logs || []).length > 0
|
||||
? this.result?.logs?.map((m) => {
|
||||
return html`<div class="pf-c-description-list__group">
|
||||
<dt class="pf-c-description-list__term">
|
||||
<span class="pf-c-description-list__text"
|
||||
>${m.log_level}</span
|
||||
>
|
||||
</dt>
|
||||
<dd class="pf-c-description-list__description">
|
||||
<div class="pf-c-description-list__text">
|
||||
${m.event}
|
||||
</div>
|
||||
</dd>
|
||||
</div>`;
|
||||
})
|
||||
: html`<div class="pf-c-description-list__group">
|
||||
<dt class="pf-c-description-list__term">
|
||||
<span class="pf-c-description-list__text"
|
||||
>${msg("No log messages.")}</span
|
||||
>
|
||||
</dt>
|
||||
</div>`}
|
||||
<ak-log-viewer .logs=${this.result?.logs}></ak-log-viewer>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -3,6 +3,7 @@ import { first } from "@goauthentik/common/utils";
|
||||
import "@goauthentik/components/ak-status-label";
|
||||
import "@goauthentik/elements/CodeMirror";
|
||||
import { CodeMirrorMode } from "@goauthentik/elements/CodeMirror";
|
||||
import "@goauthentik/elements/events/LogViewer";
|
||||
import { Form } from "@goauthentik/elements/forms/Form";
|
||||
import "@goauthentik/elements/forms/HorizontalFormElement";
|
||||
import "@goauthentik/elements/forms/SearchSelect";
|
||||
@ -85,28 +86,7 @@ export class PolicyTestForm extends Form<PolicyTestRequest> {
|
||||
<div class="pf-c-form__group-label">
|
||||
<div class="c-form__horizontal-group">
|
||||
<dl class="pf-c-description-list pf-m-horizontal">
|
||||
${(this.result?.logMessages || []).length > 0
|
||||
? this.result?.logMessages?.map((m) => {
|
||||
return html`<div class="pf-c-description-list__group">
|
||||
<dt class="pf-c-description-list__term">
|
||||
<span class="pf-c-description-list__text"
|
||||
>${m.log_level}</span
|
||||
>
|
||||
</dt>
|
||||
<dd class="pf-c-description-list__description">
|
||||
<div class="pf-c-description-list__text">
|
||||
${m.event}
|
||||
</div>
|
||||
</dd>
|
||||
</div>`;
|
||||
})
|
||||
: html`<div class="pf-c-description-list__group">
|
||||
<dt class="pf-c-description-list__term">
|
||||
<span class="pf-c-description-list__text"
|
||||
>${msg("No log messages.")}</span
|
||||
>
|
||||
</dt>
|
||||
</div>`}
|
||||
<ak-log-viewer .logs=${this.result?.logMessages}></ak-log-viewer>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -5,6 +5,7 @@ import { getRelativeTime } from "@goauthentik/common/utils";
|
||||
import { PFColor } from "@goauthentik/elements/Label";
|
||||
import "@goauthentik/elements/buttons/ActionButton";
|
||||
import "@goauthentik/elements/buttons/SpinnerButton";
|
||||
import "@goauthentik/elements/events/LogViewer";
|
||||
import { PaginatedResponse } from "@goauthentik/elements/table/Table";
|
||||
import { TableColumn } from "@goauthentik/elements/table/Table";
|
||||
import { TablePage } from "@goauthentik/elements/table/TablePage";
|
||||
@ -95,9 +96,7 @@ export class SystemTaskListPage extends TablePage<SystemTask> {
|
||||
</dt>
|
||||
<dd class="pf-c-description-list__description">
|
||||
<div class="pf-c-description-list__text">
|
||||
${item.messages.map((m) => {
|
||||
return html`<li>${m}</li>`;
|
||||
})}
|
||||
<ak-log-viewer .logs=${item?.messages}></ak-log-viewer>
|
||||
</div>
|
||||
</dd>
|
||||
</div>
|
||||
|
114
web/src/elements/events/LogViewer.ts
Normal file
114
web/src/elements/events/LogViewer.ts
Normal file
@ -0,0 +1,114 @@
|
||||
import { getRelativeTime } from "@goauthentik/common/utils";
|
||||
import "@goauthentik/components/ak-status-label";
|
||||
import "@goauthentik/elements/EmptyState";
|
||||
import { PaginatedResponse, Table, TableColumn } from "@goauthentik/elements/table/Table";
|
||||
|
||||
import { msg } from "@lit/localize";
|
||||
import { CSSResult, TemplateResult, html } from "lit";
|
||||
import { customElement, property } from "lit/decorators.js";
|
||||
|
||||
import PFDescriptionList from "@patternfly/patternfly/components/DescriptionList/description-list.css";
|
||||
|
||||
import { LogEvent, LogLevelEnum } from "@goauthentik/api";
|
||||
|
||||
@customElement("ak-log-viewer")
|
||||
export class LogViewer extends Table<LogEvent> {
|
||||
@property({ attribute: false })
|
||||
logs?: LogEvent[] = [];
|
||||
|
||||
expandable = true;
|
||||
paginated = false;
|
||||
|
||||
static get styles(): CSSResult[] {
|
||||
return super.styles.concat(PFDescriptionList);
|
||||
}
|
||||
|
||||
async apiEndpoint(_page: number): Promise<PaginatedResponse<LogEvent>> {
|
||||
return {
|
||||
pagination: {
|
||||
next: 0,
|
||||
previous: 0,
|
||||
count: 1,
|
||||
current: 1,
|
||||
totalPages: 1,
|
||||
startIndex: 1,
|
||||
endIndex: 1,
|
||||
},
|
||||
results: this.logs || [],
|
||||
};
|
||||
}
|
||||
|
||||
renderEmpty(): TemplateResult {
|
||||
return super.renderEmpty(
|
||||
html`<ak-empty-state header=${msg("No log messages.")}> </ak-empty-state>`,
|
||||
);
|
||||
}
|
||||
|
||||
renderExpanded(item: LogEvent): TemplateResult {
|
||||
return html`<td role="cell" colspan="4">
|
||||
<div class="pf-c-table__expandable-row-content">
|
||||
<dl class="pf-c-description-list pf-m-horizontal">
|
||||
<div class="pf-c-description-list__group">
|
||||
<dt class="pf-c-description-list__term">
|
||||
<span class="pf-c-description-list__text">${msg("Timestamp")}</span>
|
||||
</dt>
|
||||
<dd class="pf-c-description-list__description">
|
||||
<div class="pf-c-description-list__text">
|
||||
${item.timestamp.toLocaleString()}
|
||||
</div>
|
||||
</dd>
|
||||
</div>
|
||||
<div class="pf-c-description-list__group">
|
||||
<dt class="pf-c-description-list__term">
|
||||
<span class="pf-c-description-list__text">${msg("Attributes")}</span>
|
||||
</dt>
|
||||
<dd class="pf-c-description-list__description">
|
||||
<div class="pf-c-description-list__text">
|
||||
<pre>${JSON.stringify(item.attributes, null, 4)}</pre>
|
||||
</div>
|
||||
</dd>
|
||||
</div>
|
||||
</dl>
|
||||
</div>
|
||||
</td>`;
|
||||
}
|
||||
|
||||
renderToolbarContainer(): TemplateResult {
|
||||
return html``;
|
||||
}
|
||||
|
||||
columns(): TableColumn[] {
|
||||
return [
|
||||
new TableColumn(msg("Time")),
|
||||
new TableColumn(msg("Level")),
|
||||
new TableColumn(msg("Event")),
|
||||
new TableColumn(msg("Logger")),
|
||||
];
|
||||
}
|
||||
|
||||
statusForItem(item: LogEvent): string {
|
||||
switch (item.logLevel) {
|
||||
case LogLevelEnum.Critical:
|
||||
case LogLevelEnum.Error:
|
||||
case LogLevelEnum.Exception:
|
||||
return "error";
|
||||
case LogLevelEnum.Warn:
|
||||
case LogLevelEnum.Warning:
|
||||
return "warning";
|
||||
default:
|
||||
return "info";
|
||||
}
|
||||
}
|
||||
|
||||
row(item: LogEvent): TemplateResult[] {
|
||||
return [
|
||||
html`${getRelativeTime(item.timestamp)}`,
|
||||
html`<ak-status-label
|
||||
type=${this.statusForItem(item)}
|
||||
bad-label=${item.logLevel}
|
||||
></ak-status-label>`,
|
||||
html`${item.event}`,
|
||||
html`${item.logger}`,
|
||||
];
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user