Compare commits

..

9 Commits

Author SHA1 Message Date
6e1cf8a23c slight refactors
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2025-06-09 00:48:28 +02:00
fde6120e67 fix
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2025-06-08 15:52:24 +02:00
dabd812071 fix more
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2025-06-08 03:03:48 +02:00
db92da4cb8 fix em actually
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2025-06-08 02:51:27 +02:00
81c23fff98 found the first bug
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2025-06-08 02:25:47 +02:00
54b5774a15 better
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2025-06-08 02:16:03 +02:00
ba4650a088 less hardcoded names
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2025-06-08 02:07:29 +02:00
b7d2c5188b improve
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2025-06-08 02:07:23 +02:00
dd8e71df7d initial optimisation
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2025-06-08 01:24:12 +02:00
365 changed files with 12287 additions and 15772 deletions

View File

@ -1,16 +1,16 @@
[bumpversion]
current_version = 2025.6.2
current_version = 2025.6.0
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
serialize =
serialize =
{major}.{minor}.{patch}-{rc_t}{rc_n}
{major}.{minor}.{patch}
message = release: {new_version}
tag_name = version/{new_version}
[bumpversion:part:rc_t]
values =
values =
rc
final
optional_value = final

View File

@ -11,4 +11,3 @@ blueprints/local
!gen-ts-api/node_modules
!gen-ts-api/dist/**
!gen-go-api/
.venv

View File

@ -7,9 +7,6 @@ charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.toml]
indent_size = 2
[*.html]
indent_size = 2

View File

@ -100,13 +100,6 @@ updates:
goauthentik:
patterns:
- "@goauthentik/*"
eslint:
patterns:
- "@eslint/*"
- "@typescript-eslint/*"
- "eslint-*"
- "eslint"
- "typescript-eslint"
- package-ecosystem: npm
directory: "/lifecycle/aws"
schedule:

View File

@ -202,7 +202,7 @@ jobs:
uses: actions/cache@v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web

View File

@ -41,27 +41,6 @@ jobs:
- name: test
working-directory: website/
run: npm test
build:
runs-on: ubuntu-latest
name: ${{ matrix.job }}
strategy:
fail-fast: false
matrix:
job:
- build
- build:integrations
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: website/package.json
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/
run: npm ci
- name: build
working-directory: website/
run: npm run ${{ matrix.job }}
build-container:
runs-on: ubuntu-latest
permissions:
@ -115,7 +94,6 @@ jobs:
needs:
- lint
- test
- build
- build-container
runs-on: ubuntu-latest
steps:

View File

@ -75,9 +75,9 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 4: Download uv
FROM ghcr.io/astral-sh/uv:0.7.13 AS uv
FROM ghcr.io/astral-sh/uv:0.7.11 AS uv
# Stage 5: Base python image
FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base
FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base
ENV VENV_PATH="/ak-root/.venv" \
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \

View File

@ -94,7 +94,7 @@ gen-build: ## Extract the schema from the database
AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
uv run ak make_blueprint_schema --file blueprints/schema.json
uv run ak make_blueprint_schema > blueprints/schema.json
AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \

View File

@ -2,7 +2,7 @@
from os import environ
__version__ = "2025.6.2"
__version__ = "2025.6.0"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -0,0 +1,79 @@
"""authentik administration metrics"""
from datetime import timedelta
from django.db.models.functions import ExtractHour
from drf_spectacular.utils import extend_schema, extend_schema_field
from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import IntegerField, SerializerMethodField
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.core.api.utils import PassiveSerializer
from authentik.events.models import EventAction
class CoordinateSerializer(PassiveSerializer):
"""Coordinates for diagrams"""
x_cord = IntegerField(read_only=True)
y_cord = IntegerField(read_only=True)
class LoginMetricsSerializer(PassiveSerializer):
"""Login Metrics per 1h"""
logins = SerializerMethodField()
logins_failed = SerializerMethodField()
authorizations = SerializerMethodField()
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins(self, _):
"""Get successful logins per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN_FAILED
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations(self, _):
"""Get successful authorizations per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
class AdministrationMetricsViewSet(APIView):
"""Login Metrics per 1h"""
permission_classes = [IsAuthenticated]
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
def get(self, request: Request) -> Response:
"""Login Metrics per 1h"""
serializer = LoginMetricsSerializer(True)
serializer.context["user"] = request.user
return Response(serializer.data)

View File

@ -36,6 +36,11 @@ class TestAdminAPI(TestCase):
body = loads(response.content)
self.assertEqual(len(body), 0)
def test_metrics(self):
"""Test metrics API"""
response = self.client.get(reverse("authentik_api:admin_metrics"))
self.assertEqual(response.status_code, 200)
def test_apps(self):
"""Test apps API"""
response = self.client.get(reverse("authentik_api:apps-list"))

View File

@ -3,6 +3,7 @@
from django.urls import path
from authentik.admin.api.meta import AppsViewSet, ModelViewSet
from authentik.admin.api.metrics import AdministrationMetricsViewSet
from authentik.admin.api.system import SystemView
from authentik.admin.api.version import VersionView
from authentik.admin.api.version_history import VersionHistoryViewSet
@ -11,6 +12,11 @@ from authentik.admin.api.workers import WorkerView
api_urlpatterns = [
("admin/apps", AppsViewSet, "apps"),
("admin/models", ModelViewSet, "models"),
path(
"admin/metrics/",
AdministrationMetricsViewSet.as_view(),
name="admin_metrics",
),
path("admin/version/", VersionView.as_view(), name="admin_version"),
("admin/version/history", VersionHistoryViewSet, "version_history"),
path("admin/workers/", WorkerView.as_view(), name="admin_workers"),

View File

@ -72,33 +72,20 @@ class Command(BaseCommand):
"additionalProperties": True,
},
"entries": {
"anyOf": [
{
"type": "array",
"items": {"$ref": "#/$defs/blueprint_entry"},
},
{
"type": "object",
"additionalProperties": {
"type": "array",
"items": {"$ref": "#/$defs/blueprint_entry"},
},
},
],
"type": "array",
"items": {
"oneOf": [],
},
},
},
"$defs": {"blueprint_entry": {"oneOf": []}},
"$defs": {},
}
def add_arguments(self, parser):
parser.add_argument("--file", type=str)
@no_translations
def handle(self, *args, file: str, **options):
def handle(self, *args, **options):
"""Generate JSON Schema for blueprints"""
self.build()
with open(file, "w") as _schema:
_schema.write(dumps(self.schema, indent=4, default=Command.json_default))
self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default))
@staticmethod
def json_default(value: Any) -> Any:
@ -125,7 +112,7 @@ class Command(BaseCommand):
}
)
model_path = f"{model._meta.app_label}.{model._meta.model_name}"
self.schema["$defs"]["blueprint_entry"]["oneOf"].append(
self.schema["properties"]["entries"]["items"]["oneOf"].append(
self.template_entry(model_path, model, serializer)
)
@ -147,7 +134,7 @@ class Command(BaseCommand):
"id": {"type": "string"},
"state": {
"type": "string",
"enum": sorted([s.value for s in BlueprintEntryDesiredState]),
"enum": [s.value for s in BlueprintEntryDesiredState],
"default": "present",
},
"conditions": {"type": "array", "items": {"type": "boolean"}},
@ -218,7 +205,7 @@ class Command(BaseCommand):
"type": "object",
"required": ["permission"],
"properties": {
"permission": {"type": "string", "enum": sorted(perms)},
"permission": {"type": "string", "enum": perms},
"user": {"type": "integer"},
"role": {"type": "string"},
},

View File

@ -1,11 +1,10 @@
version: 1
entries:
foo:
- identifiers:
name: "%(id)s"
slug: "%(id)s"
model: authentik_flows.flow
state: present
attrs:
designation: stage_configuration
title: foo
- identifiers:
name: "%(id)s"
slug: "%(id)s"
model: authentik_flows.flow
state: present
attrs:
designation: stage_configuration
title: foo

View File

@ -191,18 +191,11 @@ class Blueprint:
"""Dataclass used for a full export"""
version: int = field(default=1)
entries: list[BlueprintEntry] | dict[str, list[BlueprintEntry]] = field(default_factory=list)
entries: list[BlueprintEntry] = field(default_factory=list)
context: dict = field(default_factory=dict)
metadata: BlueprintMetadata | None = field(default=None)
def iter_entries(self) -> Iterable[BlueprintEntry]:
if isinstance(self.entries, dict):
for _section, entries in self.entries.items():
yield from entries
else:
yield from self.entries
class YAMLTag:
"""Base class for all YAML Tags"""
@ -233,7 +226,7 @@ class KeyOf(YAMLTag):
self.id_from = node.value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
for _entry in blueprint.iter_entries():
for _entry in blueprint.entries:
if _entry.id == self.id_from and _entry._state.instance:
# Special handling for PolicyBindingModels, as they'll have a different PK
# which is used when creating policy bindings

View File

@ -384,7 +384,7 @@ class Importer:
def _apply_models(self, raise_errors=False) -> bool:
"""Apply (create/update) models yaml"""
self.__pk_map = {}
for entry in self._import.iter_entries():
for entry in self._import.entries:
model_app_label, model_name = entry.get_model(self._import).split(".")
try:
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)

View File

@ -47,7 +47,7 @@ class MetaModelRegistry:
models = apps.get_models()
for _, value in self.models.items():
models.append(value)
return sorted(models, key=str)
return models
def get_model(self, app_label: str, model_id: str) -> type[Model]:
"""Get model checks if any virtual models are registered, and falls back

View File

@ -148,14 +148,3 @@ class TestBrands(APITestCase):
"default_locale": "",
},
)
def test_custom_css(self):
"""Test custom_css"""
brand = create_test_brand()
brand.branding_custom_css = """* {
font-family: "Foo bar";
}"""
brand.save()
res = self.client.get(reverse("authentik_core:if-user"))
self.assertEqual(res.status_code, 200)
self.assertIn(brand.branding_custom_css, res.content.decode())

View File

@ -5,8 +5,6 @@ from typing import Any
from django.db.models import F, Q
from django.db.models import Value as V
from django.http.request import HttpRequest
from django.utils.html import _json_script_escapes
from django.utils.safestring import mark_safe
from authentik import get_full_version
from authentik.brands.models import Brand
@ -34,13 +32,8 @@ def context_processor(request: HttpRequest) -> dict[str, Any]:
"""Context Processor that injects brand object into every template"""
brand = getattr(request, "brand", DEFAULT_BRAND)
tenant = getattr(request, "tenant", Tenant())
# similarly to `json_script` we escape everything HTML-related, however django
# only directly exposes this as a function that also wraps it in a <script> tag
# which we dont want for CSS
brand_css = mark_safe(str(brand.branding_custom_css).translate(_json_script_escapes)) # nosec
return {
"brand": brand,
"brand_css": brand_css,
"footer_links": tenant.footer_links,
"html_meta": {**get_http_meta()},
"version": get_full_version(),

View File

@ -2,9 +2,11 @@
from collections.abc import Iterator
from copy import copy
from datetime import timedelta
from django.core.cache import cache
from django.db.models import QuerySet
from django.db.models.functions import ExtractHour
from django.shortcuts import get_object_or_404
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
@ -18,6 +20,7 @@ from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from authentik.admin.api.metrics import CoordinateSerializer
from authentik.api.pagination import Pagination
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.providers import ProviderSerializer
@ -25,6 +28,7 @@ from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import Application, User
from authentik.events.logs import LogEventSerializer, capture_logs
from authentik.events.models import EventAction
from authentik.lib.utils.file import (
FilePathSerializer,
FileUploadSerializer,
@ -149,10 +153,10 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
return applications
def _filter_applications_with_launch_url(
self, pagined_apps: Iterator[Application]
self, paginated_apps: Iterator[Application]
) -> list[Application]:
applications = []
for app in pagined_apps:
for app in paginated_apps:
if app.get_launch_url():
applications.append(app)
return applications
@ -317,3 +321,18 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
"""Set application icon (as URL)"""
app: Application = self.get_object()
return set_file_url(request, app, "meta_icon")
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
@extend_schema(responses={200: CoordinateSerializer(many=True)})
@action(detail=True, pagination_class=None, filter_backends=[])
def metrics(self, request: Request, slug: str):
"""Metrics for application logins"""
app = self.get_object()
return Response(
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION,
context__authorized_application__pk=app.pk.hex,
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)

View File

@ -6,6 +6,7 @@ from typing import Any
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.models import Permission
from django.db.models.functions import ExtractHour
from django.db.transaction import atomic
from django.db.utils import IntegrityError
from django.urls import reverse_lazy
@ -51,6 +52,7 @@ from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from authentik.admin.api.metrics import CoordinateSerializer
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.brands.models import Brand
from authentik.core.api.used_by import UsedByMixin
@ -315,6 +317,53 @@ class SessionUserSerializer(PassiveSerializer):
original = UserSelfSerializer(required=False)
class UserMetricsSerializer(PassiveSerializer):
"""User Metrics"""
logins = SerializerMethodField()
logins_failed = SerializerMethodField()
authorizations = SerializerMethodField()
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins(self, _):
"""Get successful logins per 8 hours for the last 7 days"""
user = self.context["user"]
request = self.context["request"]
return (
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.LOGIN, user__pk=user.pk
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
user = self.context["user"]
request = self.context["request"]
return (
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.LOGIN_FAILED, context__username=user.username
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
user = self.context["user"]
request = self.context["request"]
return (
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
class UsersFilter(FilterSet):
"""Filter for users"""
@ -386,23 +435,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
queryset = User.objects.none()
ordering = ["username"]
serializer_class = UserSerializer
filterset_class = UsersFilter
search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"]
def get_ql_fields(self):
from djangoql.schema import BoolField, StrField
from authentik.enterprise.search.fields import ChoiceSearchField, JSONSearchField
return [
StrField(User, "username"),
StrField(User, "name"),
StrField(User, "email"),
StrField(User, "path"),
BoolField(User, "is_active", nullable=True),
ChoiceSearchField(User, "type"),
JSONSearchField(User, "attributes"),
]
filterset_class = UsersFilter
def get_queryset(self):
base_qs = User.objects.all().exclude_anonymous()
@ -573,6 +607,17 @@ class UserViewSet(UsedByMixin, ModelViewSet):
update_session_auth_hash(self.request, user)
return Response(status=204)
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
@action(detail=True, pagination_class=None, filter_backends=[])
def metrics(self, request: Request, pk: int) -> Response:
"""User metrics per 1h"""
user: User = self.get_object()
serializer = UserMetricsSerializer(instance={})
serializer.context["user"] = user
serializer.context["request"] = request
return Response(serializer.data)
@permission_required("authentik_core.reset_user_password")
@extend_schema(
responses={

View File

@ -18,7 +18,7 @@ from django.http import HttpRequest
from django.utils.functional import SimpleLazyObject, cached_property
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _
from django_cte import CTE, with_cte
from django_cte import CTEQuerySet, With
from guardian.conf import settings
from guardian.mixins import GuardianUserMixin
from model_utils.managers import InheritanceManager
@ -136,7 +136,7 @@ class AttributesMixin(models.Model):
return instance, False
class GroupQuerySet(QuerySet):
class GroupQuerySet(CTEQuerySet):
def with_children_recursive(self):
"""Recursively get all groups that have the current queryset as parents
or are indirectly related."""
@ -165,9 +165,9 @@ class GroupQuerySet(QuerySet):
)
# Build the recursive query, see above
cte = CTE.recursive(make_cte)
cte = With.recursive(make_cte)
# Return the result, as a usable queryset for Group.
return with_cte(cte, select=cte.join(Group, group_uuid=cte.col.group_uuid))
return cte.join(Group, group_uuid=cte.col.group_uuid).with_cte(cte)
class Group(SerializerModel, AttributesMixin):

View File

@ -16,7 +16,7 @@
{% block head_before %}
{% endblock %}
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
<style>{{ brand_css }}</style>
<style>{{ brand.branding_custom_css }}</style>
<script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
<script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script>
{% block head %}

View File

@ -114,7 +114,6 @@ class TestApplicationsAPI(APITestCase):
self.assertJSONEqual(
response.content.decode(),
{
"autocomplete": {},
"pagination": {
"next": 0,
"previous": 0,
@ -168,7 +167,6 @@ class TestApplicationsAPI(APITestCase):
self.assertJSONEqual(
response.content.decode(),
{
"autocomplete": {},
"pagination": {
"next": 0,
"previous": 0,

View File

@ -81,6 +81,22 @@ class TestUsersAPI(APITestCase):
response = self.client.get(reverse("authentik_api:user-list"), {"include_groups": "true"})
self.assertEqual(response.status_code, 200)
def test_metrics(self):
"""Test user's metrics"""
self.client.force_login(self.admin)
response = self.client.get(
reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk})
)
self.assertEqual(response.status_code, 200)
def test_metrics_denied(self):
"""Test user's metrics (non-superuser)"""
self.client.force_login(self.user)
response = self.client.get(
reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk})
)
self.assertEqual(response.status_code, 403)
def test_recovery_no_flow(self):
"""Test user recovery link (no recovery flow set)"""
self.client.force_login(self.admin)

View File

@ -119,17 +119,17 @@ class TestTrimPasswordHistory(TestCase):
[
UserPasswordHistory(
user=self.user,
old_password="hunter1", # nosec
old_password="hunter1", # nosec B106
created_at=_now - timedelta(days=3),
),
UserPasswordHistory(
user=self.user,
old_password="hunter2", # nosec
old_password="hunter2", # nosec B106
created_at=_now - timedelta(days=2),
),
UserPasswordHistory(
user=self.user,
old_password="hunter3", # nosec
old_password="hunter3", # nosec B106
created_at=_now,
),
]

View File

@ -1,12 +0,0 @@
"""Enterprise app config"""
from authentik.enterprise.apps import EnterpriseConfig
class AuthentikEnterpriseSearchConfig(EnterpriseConfig):
"""Enterprise app config"""
name = "authentik.enterprise.search"
label = "authentik_search"
verbose_name = "authentik Enterprise.Search"
default = True

View File

@ -1,128 +0,0 @@
"""DjangoQL search"""
from collections import OrderedDict, defaultdict
from collections.abc import Generator
from django.db import connection
from django.db.models import Model, Q
from djangoql.compat import text_type
from djangoql.schema import StrField
class JSONSearchField(StrField):
"""JSON field for DjangoQL"""
model: Model
def __init__(self, model=None, name=None, nullable=None, suggest_nested=True):
# Set this in the constructor to not clobber the type variable
self.type = "relation"
self.suggest_nested = suggest_nested
super().__init__(model, name, nullable)
def get_lookup(self, path, operator, value):
search = "__".join(path)
op, invert = self.get_operator(operator)
q = Q(**{f"{search}{op}": self.get_lookup_value(value)})
return ~q if invert else q
def json_field_keys(self) -> Generator[tuple[str]]:
with connection.cursor() as cursor:
cursor.execute(
f"""
WITH RECURSIVE "{self.name}_keys" AS (
SELECT
ARRAY[jsonb_object_keys("{self.name}")] AS key_path_array,
"{self.name}" -> jsonb_object_keys("{self.name}") AS value
FROM {self.model._meta.db_table}
WHERE "{self.name}" IS NOT NULL
AND jsonb_typeof("{self.name}") = 'object'
UNION ALL
SELECT
ck.key_path_array || jsonb_object_keys(ck.value),
ck.value -> jsonb_object_keys(ck.value) AS value
FROM "{self.name}_keys" ck
WHERE jsonb_typeof(ck.value) = 'object'
),
unique_paths AS (
SELECT DISTINCT key_path_array
FROM "{self.name}_keys"
)
SELECT key_path_array FROM unique_paths;
""" # nosec
)
return (x[0] for x in cursor.fetchall())
def get_nested_options(self) -> OrderedDict:
"""Get keys of all nested objects to show autocomplete"""
if not self.suggest_nested:
return OrderedDict()
base_model_name = f"{self.model._meta.app_label}.{self.model._meta.model_name}_{self.name}"
def recursive_function(parts: list[str], parent_parts: list[str] | None = None):
if not parent_parts:
parent_parts = []
path = parts.pop(0)
parent_parts.append(path)
relation_key = "_".join(parent_parts)
if len(parts) > 1:
out_dict = {
relation_key: {
parts[0]: {
"type": "relation",
"relation": f"{relation_key}_{parts[0]}",
}
}
}
child_paths = recursive_function(parts.copy(), parent_parts.copy())
child_paths.update(out_dict)
return child_paths
else:
return {relation_key: {parts[0]: {}}}
relation_structure = defaultdict(dict)
for relations in self.json_field_keys():
result = recursive_function([base_model_name] + relations)
for relation_key, value in result.items():
for sub_relation_key, sub_value in value.items():
if not relation_structure[relation_key].get(sub_relation_key, None):
relation_structure[relation_key][sub_relation_key] = sub_value
else:
relation_structure[relation_key][sub_relation_key].update(sub_value)
final_dict = defaultdict(dict)
for key, value in relation_structure.items():
for sub_key, sub_value in value.items():
if not sub_value:
final_dict[key][sub_key] = {
"type": "str",
"nullable": True,
}
else:
final_dict[key][sub_key] = sub_value
return OrderedDict(final_dict)
def relation(self) -> str:
return f"{self.model._meta.app_label}.{self.model._meta.model_name}_{self.name}"
class ChoiceSearchField(StrField):
def __init__(self, model=None, name=None, nullable=None):
super().__init__(model, name, nullable, suggest_options=True)
def get_options(self, search):
result = []
choices = self._field_choices()
if choices:
search = search.lower()
for c in choices:
choice = text_type(c[0])
if search in choice.lower():
result.append(choice)
return result

View File

@ -1,53 +0,0 @@
from rest_framework.response import Response
from authentik.api.pagination import Pagination
from authentik.enterprise.search.ql import AUTOCOMPLETE_COMPONENT_NAME, QLSearch
class AutocompletePagination(Pagination):
def paginate_queryset(self, queryset, request, view=None):
self.view = view
return super().paginate_queryset(queryset, request, view)
def get_autocomplete(self):
schema = QLSearch().get_schema(self.request, self.view)
introspections = {}
if hasattr(self.view, "get_ql_fields"):
from authentik.enterprise.search.schema import AKQLSchemaSerializer
introspections = AKQLSchemaSerializer().serialize(
schema(self.page.paginator.object_list.model)
)
return introspections
def get_paginated_response(self, data):
previous_page_number = 0
if self.page.has_previous():
previous_page_number = self.page.previous_page_number()
next_page_number = 0
if self.page.has_next():
next_page_number = self.page.next_page_number()
return Response(
{
"pagination": {
"next": next_page_number,
"previous": previous_page_number,
"count": self.page.paginator.count,
"current": self.page.number,
"total_pages": self.page.paginator.num_pages,
"start_index": self.page.start_index(),
"end_index": self.page.end_index(),
},
"results": data,
"autocomplete": self.get_autocomplete(),
}
)
def get_paginated_response_schema(self, schema):
final_schema = super().get_paginated_response_schema(schema)
final_schema["properties"]["autocomplete"] = {
"$ref": f"#/components/schemas/{AUTOCOMPLETE_COMPONENT_NAME}"
}
final_schema["required"].append("autocomplete")
return final_schema

View File

@ -1,78 +0,0 @@
"""DjangoQL search"""
from django.apps import apps
from django.db.models import QuerySet
from djangoql.ast import Name
from djangoql.exceptions import DjangoQLError
from djangoql.queryset import apply_search
from djangoql.schema import DjangoQLSchema
from rest_framework.filters import SearchFilter
from rest_framework.request import Request
from structlog.stdlib import get_logger
from authentik.enterprise.search.fields import JSONSearchField
LOGGER = get_logger()
AUTOCOMPLETE_COMPONENT_NAME = "Autocomplete"
AUTOCOMPLETE_SCHEMA = {
"type": "object",
"additionalProperties": {},
}
class BaseSchema(DjangoQLSchema):
"""Base Schema which deals with JSON Fields"""
def resolve_name(self, name: Name):
model = self.model_label(self.current_model)
root_field = name.parts[0]
field = self.models[model].get(root_field)
# If the query goes into a JSON field, return the root
# field as the JSON field will do the rest
if isinstance(field, JSONSearchField):
# This is a workaround; build_filter will remove the right-most
# entry in the path as that is intended to be the same as the field
# however for JSON that is not the case
if name.parts[-1] != root_field:
name.parts.append(root_field)
return field
return super().resolve_name(name)
class QLSearch(SearchFilter):
"""rest_framework search filter which uses DjangoQL"""
@property
def enabled(self):
return apps.get_app_config("authentik_enterprise").enabled()
def get_search_terms(self, request) -> str:
"""
Search terms are set by a ?search=... query parameter,
and may be comma and/or whitespace delimited.
"""
params = request.query_params.get(self.search_param, "")
params = params.replace("\x00", "") # strip null characters
return params
def get_schema(self, request: Request, view) -> BaseSchema:
ql_fields = []
if hasattr(view, "get_ql_fields"):
ql_fields = view.get_ql_fields()
class InlineSchema(BaseSchema):
def get_fields(self, model):
return ql_fields or []
return InlineSchema
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
search_query = self.get_search_terms(request)
schema = self.get_schema(request, view)
if len(search_query) == 0 or not self.enabled:
return super().filter_queryset(request, queryset, view)
try:
return apply_search(queryset, search_query, schema=schema)
except DjangoQLError as exc:
LOGGER.debug("Failed to parse search expression", exc=exc)
return super().filter_queryset(request, queryset, view)

View File

@ -1,29 +0,0 @@
from djangoql.serializers import DjangoQLSchemaSerializer
from drf_spectacular.generators import SchemaGenerator
from authentik.api.schema import create_component
from authentik.enterprise.search.fields import JSONSearchField
from authentik.enterprise.search.ql import AUTOCOMPLETE_COMPONENT_NAME, AUTOCOMPLETE_SCHEMA
class AKQLSchemaSerializer(DjangoQLSchemaSerializer):
def serialize(self, schema):
serialization = super().serialize(schema)
for _, fields in schema.models.items():
for _, field in fields.items():
if not isinstance(field, JSONSearchField):
continue
serialization["models"].update(field.get_nested_options())
return serialization
def serialize_field(self, field):
result = super().serialize_field(field)
if isinstance(field, JSONSearchField):
result["relation"] = field.relation()
return result
def postprocess_schema_search_autocomplete(result, generator: SchemaGenerator, **kwargs):
create_component(generator, AUTOCOMPLETE_COMPONENT_NAME, AUTOCOMPLETE_SCHEMA)
return result

View File

@ -1,17 +0,0 @@
SPECTACULAR_SETTINGS = {
"POSTPROCESSING_HOOKS": [
"authentik.api.schema.postprocess_schema_responses",
"authentik.enterprise.search.schema.postprocess_schema_search_autocomplete",
"drf_spectacular.hooks.postprocess_schema_enums",
],
}
REST_FRAMEWORK = {
"DEFAULT_PAGINATION_CLASS": "authentik.enterprise.search.pagination.AutocompletePagination",
"DEFAULT_FILTER_BACKENDS": [
"authentik.enterprise.search.ql.QLSearch",
"authentik.rbac.filters.ObjectFilter",
"django_filters.rest_framework.DjangoFilterBackend",
"rest_framework.filters.OrderingFilter",
],
}

View File

@ -1,78 +0,0 @@
from json import loads
from unittest.mock import PropertyMock, patch
from urllib.parse import urlencode
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.tests.utils import create_test_admin_user
@patch(
"authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware.enabled",
PropertyMock(return_value=True),
)
class QLTest(APITestCase):
def setUp(self):
self.user = create_test_admin_user()
# ensure we have more than 1 user
create_test_admin_user()
def test_search(self):
"""Test simple search query"""
self.client.force_login(self.user)
query = f'username = "{self.user.username}"'
res = self.client.get(
reverse(
"authentik_api:user-list",
)
+ f"?{urlencode({"search": query})}"
)
self.assertEqual(res.status_code, 200)
content = loads(res.content)
self.assertEqual(content["pagination"]["count"], 1)
self.assertEqual(content["results"][0]["username"], self.user.username)
def test_no_search(self):
"""Ensure works with no search query"""
self.client.force_login(self.user)
res = self.client.get(
reverse(
"authentik_api:user-list",
)
)
self.assertEqual(res.status_code, 200)
content = loads(res.content)
self.assertNotEqual(content["pagination"]["count"], 1)
def test_search_no_ql(self):
"""Test simple search query (no QL)"""
self.client.force_login(self.user)
res = self.client.get(
reverse(
"authentik_api:user-list",
)
+ f"?{urlencode({"search": self.user.username})}"
)
self.assertEqual(res.status_code, 200)
content = loads(res.content)
self.assertGreaterEqual(content["pagination"]["count"], 1)
self.assertEqual(content["results"][0]["username"], self.user.username)
def test_search_json(self):
"""Test search query with a JSON attribute"""
self.user.attributes = {"foo": {"bar": "baz"}}
self.user.save()
self.client.force_login(self.user)
query = 'attributes.foo.bar = "baz"'
res = self.client.get(
reverse(
"authentik_api:user-list",
)
+ f"?{urlencode({"search": query})}"
)
self.assertEqual(res.status_code, 200)
content = loads(res.content)
self.assertEqual(content["pagination"]["count"], 1)
self.assertEqual(content["results"][0]["username"], self.user.username)

View File

@ -18,7 +18,6 @@ TENANT_APPS = [
"authentik.enterprise.providers.google_workspace",
"authentik.enterprise.providers.microsoft_entra",
"authentik.enterprise.providers.ssf",
"authentik.enterprise.search",
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
"authentik.enterprise.stages.mtls",
"authentik.enterprise.stages.source",

View File

@ -1,36 +1,28 @@
"""Events API Views"""
from datetime import timedelta
from json import loads
import django_filters
from django.db.models import Count, ExpressionWrapper, F, QuerySet
from django.db.models import DateTimeField as DjangoDateTimeField
from django.db.models.aggregates import Count
from django.db.models.fields.json import KeyTextTransform, KeyTransform
from django.db.models.functions import TruncHour
from django.db.models.functions import ExtractDay, ExtractHour
from django.db.models.query_utils import Q
from django.utils.timezone import now
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, extend_schema
from guardian.shortcuts import get_objects_for_user
from rest_framework.decorators import action
from rest_framework.fields import ChoiceField, DateTimeField, DictField, IntegerField
from rest_framework.fields import DictField, IntegerField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from authentik.admin.api.metrics import CoordinateSerializer
from authentik.core.api.object_types import TypeCreateSerializer
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
from authentik.events.models import Event, EventAction
class EventVolumeSerializer(PassiveSerializer):
"""Count of events of action created on day"""
action = ChoiceField(choices=EventAction.choices)
time = DateTimeField()
count = IntegerField()
class EventSerializer(ModelSerializer):
"""Event Serializer"""
@ -61,7 +53,7 @@ class EventsFilter(django_filters.FilterSet):
"""Filter for events"""
username = django_filters.CharFilter(
field_name="user", label="Username", method="filter_username"
field_name="user", lookup_expr="username", label="Username"
)
context_model_pk = django_filters.CharFilter(
field_name="context",
@ -86,19 +78,12 @@ class EventsFilter(django_filters.FilterSet):
field_name="action",
lookup_expr="icontains",
)
actions = django_filters.MultipleChoiceFilter(
field_name="action",
choices=EventAction.choices,
)
brand_name = django_filters.CharFilter(
field_name="brand",
lookup_expr="name",
label="Brand name",
)
def filter_username(self, queryset, name, value):
return queryset.filter(Q(user__username=value) | Q(context__username=value))
def filter_context_model_pk(self, queryset, name, value):
"""Because we store the PK as UUID.hex,
we need to remove the dashes that a client may send. We can't use a
@ -132,22 +117,6 @@ class EventViewSet(ModelViewSet):
]
filterset_class = EventsFilter
def get_ql_fields(self):
from djangoql.schema import DateTimeField, StrField
from authentik.enterprise.search.fields import ChoiceSearchField, JSONSearchField
return [
ChoiceSearchField(Event, "action"),
StrField(Event, "event_uuid"),
StrField(Event, "app", suggest_options=True),
StrField(Event, "client_ip"),
JSONSearchField(Event, "user", suggest_nested=False),
JSONSearchField(Event, "brand", suggest_nested=False),
JSONSearchField(Event, "context", suggest_nested=False),
DateTimeField(Event, "created", suggest_options=True),
]
@extend_schema(
methods=["GET"],
responses={200: EventTopPerUserSerializer(many=True)},
@ -187,37 +156,45 @@ class EventViewSet(ModelViewSet):
return Response(EventTopPerUserSerializer(instance=events, many=True).data)
@extend_schema(
responses={200: EventVolumeSerializer(many=True)},
parameters=[
OpenApiParameter(
"history_days",
type=OpenApiTypes.NUMBER,
location=OpenApiParameter.QUERY,
required=False,
default=7,
),
],
responses={200: CoordinateSerializer(many=True)},
)
@action(detail=False, methods=["GET"], pagination_class=None)
def volume(self, request: Request) -> Response:
"""Get event volume for specified filters and timeframe"""
queryset: QuerySet[Event] = self.filter_queryset(self.get_queryset())
delta = timedelta(days=7)
time_delta = request.query_params.get("history_days", 7)
if time_delta:
delta = timedelta(days=min(int(time_delta), 60))
queryset = self.filter_queryset(self.get_queryset())
return Response(queryset.get_events_per(timedelta(days=7), ExtractHour, 7 * 3))
@extend_schema(
responses={200: CoordinateSerializer(many=True)},
filters=[],
parameters=[
OpenApiParameter(
"action",
type=OpenApiTypes.STR,
location=OpenApiParameter.QUERY,
required=False,
),
OpenApiParameter(
"query",
type=OpenApiTypes.STR,
location=OpenApiParameter.QUERY,
required=False,
),
],
)
@action(detail=False, methods=["GET"], pagination_class=None)
def per_month(self, request: Request):
"""Get the count of events per month"""
filtered_action = request.query_params.get("action", EventAction.LOGIN)
try:
query = loads(request.query_params.get("query", "{}"))
except ValueError:
return Response(status=400)
return Response(
queryset.filter(created__gte=now() - delta)
.annotate(hour=TruncHour("created"))
.annotate(
time=ExpressionWrapper(
F("hour") - (F("hour__hour") % 6) * timedelta(hours=1),
output_field=DjangoDateTimeField(),
)
)
.values("time", "action")
.annotate(count=Count("pk"))
.order_by("time", "action")
get_objects_for_user(request.user, "authentik_events.view_event")
.filter(action=filtered_action)
.filter(**query)
.get_events_per(timedelta(weeks=4), ExtractDay, 30)
)
@extend_schema(responses={200: TypeCreateSerializer(many=True)})

View File

@ -11,7 +11,7 @@ from authentik.events.models import NotificationRule
class NotificationRuleSerializer(ModelSerializer):
"""NotificationRule Serializer"""
destination_group_obj = GroupSerializer(read_only=True, source="destination_group")
group_obj = GroupSerializer(read_only=True, source="group")
class Meta:
model = NotificationRule
@ -20,9 +20,8 @@ class NotificationRuleSerializer(ModelSerializer):
"name",
"transports",
"severity",
"destination_group",
"destination_group_obj",
"destination_event_user",
"group",
"group_obj",
]
@ -31,6 +30,6 @@ class NotificationRuleViewSet(UsedByMixin, ModelViewSet):
queryset = NotificationRule.objects.all()
serializer_class = NotificationRuleSerializer
filterset_fields = ["name", "severity", "destination_group__name"]
filterset_fields = ["name", "severity", "group__name"]
ordering = ["name"]
search_fields = ["name", "destination_group__name"]
search_fields = ["name", "group__name"]

View File

@ -1,26 +0,0 @@
# Generated by Django 5.1.11 on 2025-06-16 23:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_events", "0009_remove_notificationtransport_webhook_mapping_and_more"),
]
operations = [
migrations.RenameField(
model_name="notificationrule",
old_name="group",
new_name="destination_group",
),
migrations.AddField(
model_name="notificationrule",
name="destination_event_user",
field=models.BooleanField(
default=False,
help_text="When enabled, notification will be sent to user the user that triggered the event.When destination_group is configured, notification is sent to both.",
),
),
]

View File

@ -1,16 +1,21 @@
"""authentik events models"""
from collections.abc import Generator
import time
from collections import Counter
from datetime import timedelta
from difflib import get_close_matches
from functools import lru_cache
from inspect import currentframe
from smtplib import SMTPException
from typing import Any
from uuid import uuid4
from django.apps import apps
from django.db import connection, models
from django.db.models import Count, ExpressionWrapper, F
from django.db.models.fields import DurationField
from django.db.models.functions import Extract
from django.db.models.manager import Manager
from django.db.models.query import QuerySet
from django.http import HttpRequest
from django.http.request import QueryDict
from django.utils.timezone import now
@ -119,6 +124,60 @@ class EventAction(models.TextChoices):
CUSTOM_PREFIX = "custom_"
class EventQuerySet(QuerySet):
"""Custom events query set with helper functions"""
def get_events_per(
self,
time_since: timedelta,
extract: Extract,
data_points: int,
) -> list[dict[str, int]]:
"""Get event count by hour in the last day, fill with zeros"""
_now = now()
max_since = timedelta(days=60)
# Allow maximum of 60 days to limit load
if time_since.total_seconds() > max_since.total_seconds():
time_since = max_since
date_from = _now - time_since
result = (
self.filter(created__gte=date_from)
.annotate(age=ExpressionWrapper(_now - F("created"), output_field=DurationField()))
.annotate(age_interval=extract("age"))
.values("age_interval")
.annotate(count=Count("pk"))
.order_by("age_interval")
)
data = Counter({int(d["age_interval"]): d["count"] for d in result})
results = []
interval_delta = time_since / data_points
for interval in range(1, -data_points, -1):
results.append(
{
"x_cord": time.mktime((_now + (interval_delta * interval)).timetuple()) * 1000,
"y_cord": data[interval * -1],
}
)
return results
class EventManager(Manager):
"""Custom helper methods for Events"""
def get_queryset(self) -> QuerySet:
"""use custom queryset"""
return EventQuerySet(self.model, using=self._db)
def get_events_per(
self,
time_since: timedelta,
extract: Extract,
data_points: int,
) -> list[dict[str, int]]:
"""Wrap method from queryset"""
return self.get_queryset().get_events_per(time_since, extract, data_points)
class Event(SerializerModel, ExpiringModel):
"""An individual Audit/Metrics/Notification/Error Event"""
@ -134,6 +193,8 @@ class Event(SerializerModel, ExpiringModel):
# Shadow the expires attribute from ExpiringModel to override the default duration
expires = models.DateTimeField(default=default_event_duration)
objects = EventManager()
@staticmethod
def _get_app_from_request(request: HttpRequest) -> str:
if not isinstance(request, HttpRequest):
@ -549,7 +610,7 @@ class NotificationRule(SerializerModel, PolicyBindingModel):
default=NotificationSeverity.NOTICE,
help_text=_("Controls which severity level the created notifications will have."),
)
destination_group = models.ForeignKey(
group = models.ForeignKey(
Group,
help_text=_(
"Define which group of users this notification should be sent and shown to. "
@ -559,19 +620,6 @@ class NotificationRule(SerializerModel, PolicyBindingModel):
blank=True,
on_delete=models.SET_NULL,
)
destination_event_user = models.BooleanField(
default=False,
help_text=_(
"When enabled, notification will be sent to user the user that triggered the event."
"When destination_group is configured, notification is sent to both."
),
)
def destination_users(self, event: Event) -> Generator[User, Any]:
if self.destination_event_user and event.user.get("pk"):
yield User(pk=event.user.get("pk"))
if self.destination_group:
yield from self.destination_group.users.all()
@property
def serializer(self) -> type[Serializer]:

View File

@ -68,10 +68,14 @@ def event_trigger_handler(event_uuid: str, trigger_name: str):
if not result.passing:
return
if not trigger.group:
LOGGER.debug("e(trigger): trigger has no group", trigger=trigger)
return
LOGGER.debug("e(trigger): event trigger matched", trigger=trigger)
# Create the notification objects
for transport in trigger.transports.all():
for user in trigger.destination_users(event):
for user in trigger.group.users.all():
LOGGER.debug("created notification")
notification_transport.apply_async(
args=[

View File

@ -6,7 +6,6 @@ from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.models import Group, User
from authentik.core.tests.utils import create_test_user
from authentik.events.models import (
Event,
EventAction,
@ -35,7 +34,7 @@ class TestEventsNotifications(APITestCase):
def test_trigger_empty(self):
"""Test trigger without any policies attached"""
transport = NotificationTransport.objects.create(name=generate_id())
trigger = NotificationRule.objects.create(name=generate_id(), destination_group=self.group)
trigger = NotificationRule.objects.create(name=generate_id(), group=self.group)
trigger.transports.add(transport)
trigger.save()
@ -47,7 +46,7 @@ class TestEventsNotifications(APITestCase):
def test_trigger_single(self):
"""Test simple transport triggering"""
transport = NotificationTransport.objects.create(name=generate_id())
trigger = NotificationRule.objects.create(name=generate_id(), destination_group=self.group)
trigger = NotificationRule.objects.create(name=generate_id(), group=self.group)
trigger.transports.add(transport)
trigger.save()
matcher = EventMatcherPolicy.objects.create(
@ -60,25 +59,6 @@ class TestEventsNotifications(APITestCase):
Event.new(EventAction.CUSTOM_PREFIX).save()
self.assertEqual(execute_mock.call_count, 1)
def test_trigger_event_user(self):
"""Test trigger with event user"""
user = create_test_user()
transport = NotificationTransport.objects.create(name=generate_id())
trigger = NotificationRule.objects.create(name=generate_id(), destination_event_user=True)
trigger.transports.add(transport)
trigger.save()
matcher = EventMatcherPolicy.objects.create(
name="matcher", action=EventAction.CUSTOM_PREFIX
)
PolicyBinding.objects.create(target=trigger, policy=matcher, order=0)
execute_mock = MagicMock()
with patch("authentik.events.models.NotificationTransport.send", execute_mock):
Event.new(EventAction.CUSTOM_PREFIX).set_user(user).save()
self.assertEqual(execute_mock.call_count, 1)
notification: Notification = execute_mock.call_args[0][0]
self.assertEqual(notification.user, user)
def test_trigger_no_group(self):
"""Test trigger without group"""
trigger = NotificationRule.objects.create(name=generate_id())
@ -96,7 +76,7 @@ class TestEventsNotifications(APITestCase):
"""Test Policy error which would cause recursion"""
transport = NotificationTransport.objects.create(name=generate_id())
NotificationRule.objects.filter(name__startswith="default").delete()
trigger = NotificationRule.objects.create(name=generate_id(), destination_group=self.group)
trigger = NotificationRule.objects.create(name=generate_id(), group=self.group)
trigger.transports.add(transport)
trigger.save()
matcher = EventMatcherPolicy.objects.create(
@ -119,7 +99,7 @@ class TestEventsNotifications(APITestCase):
transport = NotificationTransport.objects.create(name=generate_id(), send_once=True)
NotificationRule.objects.filter(name__startswith="default").delete()
trigger = NotificationRule.objects.create(name=generate_id(), destination_group=self.group)
trigger = NotificationRule.objects.create(name=generate_id(), group=self.group)
trigger.transports.add(transport)
trigger.save()
matcher = EventMatcherPolicy.objects.create(
@ -143,7 +123,7 @@ class TestEventsNotifications(APITestCase):
name=generate_id(), webhook_mapping_body=mapping, mode=TransportMode.LOCAL
)
NotificationRule.objects.filter(name__startswith="default").delete()
trigger = NotificationRule.objects.create(name=generate_id(), destination_group=self.group)
trigger = NotificationRule.objects.create(name=generate_id(), group=self.group)
trigger.transports.add(transport)
matcher = EventMatcherPolicy.objects.create(
name="matcher", action=EventAction.CUSTOM_PREFIX

View File

@ -37,9 +37,6 @@ class WebsocketMessageInstruction(IntEnum):
# Provider specific message
PROVIDER_SPECIFIC = 3
# Session ended
SESSION_END = 4
@dataclass(slots=True)
class WebsocketMessage:
@ -148,14 +145,6 @@ class OutpostConsumer(JsonWebsocketConsumer):
asdict(WebsocketMessage(instruction=WebsocketMessageInstruction.TRIGGER_UPDATE))
)
def event_session_end(self, event):
"""Event handler which is called when a session is ended"""
self.send_json(
asdict(
WebsocketMessage(instruction=WebsocketMessageInstruction.SESSION_END, args=event)
)
)
def event_provider_specific(self, event):
"""Event handler which can be called by provider-specific
implementations to send specific messages to the outpost"""

View File

@ -1,24 +1,17 @@
"""authentik outpost signals"""
from django.contrib.auth.signals import user_logged_out
from django.core.cache import cache
from django.db.models import Model
from django.db.models.signals import m2m_changed, post_save, pre_delete, pre_save
from django.dispatch import receiver
from django.http import HttpRequest
from structlog.stdlib import get_logger
from authentik.brands.models import Brand
from authentik.core.models import AuthenticatedSession, Provider, User
from authentik.core.models import Provider
from authentik.crypto.models import CertificateKeyPair
from authentik.lib.utils.reflection import class_to_path
from authentik.outposts.models import Outpost, OutpostServiceConnection
from authentik.outposts.tasks import (
CACHE_KEY_OUTPOST_DOWN,
outpost_controller,
outpost_post_save,
outpost_session_end,
)
from authentik.outposts.tasks import CACHE_KEY_OUTPOST_DOWN, outpost_controller, outpost_post_save
LOGGER = get_logger()
UPDATE_TRIGGERING_MODELS = (
@ -80,17 +73,3 @@ def pre_delete_cleanup(sender, instance: Outpost, **_):
instance.user.delete()
cache.set(CACHE_KEY_OUTPOST_DOWN % instance.pk.hex, instance)
outpost_controller.delay(instance.pk.hex, action="down", from_cache=True)
@receiver(user_logged_out)
def logout_revoke_direct(sender: type[User], request: HttpRequest, **_):
"""Catch logout by direct logout and forward to providers"""
if not request.session or not request.session.session_key:
return
outpost_session_end.delay(request.session.session_key)
@receiver(pre_delete, sender=AuthenticatedSession)
def logout_revoke(sender: type[AuthenticatedSession], instance: AuthenticatedSession, **_):
"""Catch logout by expiring sessions being deleted"""
outpost_session_end.delay(instance.session.session_key)

View File

@ -1,6 +1,5 @@
"""outpost tasks"""
from hashlib import sha256
from os import R_OK, access
from pathlib import Path
from socket import gethostname
@ -50,11 +49,6 @@ LOGGER = get_logger()
CACHE_KEY_OUTPOST_DOWN = "goauthentik.io/outposts/teardown/%s"
def hash_session_key(session_key: str) -> str:
"""Hash the session key for sending session end signals"""
return sha256(session_key.encode("ascii")).hexdigest()
def controller_for_outpost(outpost: Outpost) -> type[BaseController] | None:
"""Get a controller for the outpost, when a service connection is defined"""
if not outpost.service_connection:
@ -295,20 +289,3 @@ def outpost_connection_discovery(self: SystemTask):
url=unix_socket_path,
)
self.set_status(TaskStatus.SUCCESSFUL, *messages)
@CELERY_APP.task()
def outpost_session_end(session_id: str):
"""Update outpost instances connected to a single outpost"""
layer = get_channel_layer()
hashed_session_id = hash_session_key(session_id)
for outpost in Outpost.objects.all():
LOGGER.info("Sending session end signal to outpost", outpost=outpost)
group = OUTPOST_GROUP % {"outpost_pk": str(outpost.pk)}
async_to_sync(layer.group_send)(
group,
{
"type": "event.session.end",
"session_id": hashed_session_id,
},
)

View File

@ -1,9 +1,11 @@
"""Websocket tests"""
from dataclasses import asdict
from unittest.mock import patch
from channels.routing import URLRouter
from channels.testing import WebsocketCommunicator
from django.contrib.contenttypes.models import ContentType
from django.test import TransactionTestCase
from authentik import __version__
@ -14,6 +16,12 @@ from authentik.providers.proxy.models import ProxyProvider
from authentik.root import websocket
def patched__get_ct_cached(app_label, codename):
"""Caches `ContentType` instances like its `QuerySet` does."""
return ContentType.objects.get(app_label=app_label, permission__codename=codename)
@patch("guardian.shortcuts._get_ct_cached", patched__get_ct_cached)
class TestOutpostWS(TransactionTestCase):
"""Websocket tests"""

View File

@ -1,11 +1,11 @@
"""authentik policy engine"""
from collections.abc import Iterator
from collections.abc import Iterable
from multiprocessing import Pipe, current_process
from multiprocessing.connection import Connection
from time import perf_counter
from django.core.cache import cache
from django.db.models import Count, Q, QuerySet
from django.http import HttpRequest
from sentry_sdk import start_span
from sentry_sdk.tracing import Span
@ -67,14 +67,11 @@ class PolicyEngine:
self.__processes: list[PolicyProcessInfo] = []
self.use_cache = True
self.__expected_result_count = 0
self.__static_result: PolicyResult | None = None
def iterate_bindings(self) -> Iterator[PolicyBinding]:
def bindings(self) -> QuerySet[PolicyBinding] | Iterable[PolicyBinding]:
"""Make sure all Policies are their respective classes"""
return (
PolicyBinding.objects.filter(target=self.__pbm, enabled=True)
.order_by("order")
.iterator()
)
return PolicyBinding.objects.filter(target=self.__pbm, enabled=True).order_by("order")
def _check_policy_type(self, binding: PolicyBinding):
"""Check policy type, make sure it's not the root class as that has no logic implemented"""
@ -84,30 +81,66 @@ class PolicyEngine:
def _check_cache(self, binding: PolicyBinding):
if not self.use_cache:
return False
before = perf_counter()
key = cache_key(binding, self.request)
cached_policy = cache.get(key, None)
duration = max(perf_counter() - before, 0)
if not cached_policy:
return False
self.logger.debug(
"P_ENG: Taking result from cache",
binding=binding,
cache_key=key,
request=self.request,
)
HIST_POLICIES_EXECUTION_TIME.labels(
# It's a bit silly to time this, but
with HIST_POLICIES_EXECUTION_TIME.labels(
binding_order=binding.order,
binding_target_type=binding.target_type,
binding_target_name=binding.target_name,
object_pk=str(self.request.obj.pk),
object_type=class_to_path(self.request.obj.__class__),
mode="cache_retrieve",
).observe(duration)
# It's a bit silly to time this, but
).time():
key = cache_key(binding, self.request)
cached_policy = cache.get(key, None)
if not cached_policy:
return False
self.logger.debug(
"P_ENG: Taking result from cache",
binding=binding,
cache_key=key,
request=self.request,
)
self.__cached_policies.append(cached_policy)
return True
def compute_static_bindings(self, bindings: QuerySet[PolicyBinding]):
"""Check static bindings if possible"""
aggrs = {
"total": Count(
"pk", filter=Q(Q(group__isnull=False) | Q(user__isnull=False), policy=None)
),
}
if self.request.user.pk:
all_groups = self.request.user.all_groups()
aggrs["passing"] = Count(
"pk",
filter=Q(
Q(
Q(user=self.request.user) | Q(group__in=all_groups),
negate=False,
)
| Q(
Q(~Q(user=self.request.user), user__isnull=False)
| Q(~Q(group__in=all_groups), group__isnull=False),
negate=True,
),
enabled=True,
),
)
matched_bindings = bindings.aggregate(**aggrs)
passing = False
if matched_bindings["total"] == 0 and matched_bindings.get("passing", 0) == 0:
# If we didn't find any static bindings, do nothing
return
self.logger.debug("P_ENG: Found static bindings", **matched_bindings)
if matched_bindings.get("passing", 0) > 0:
# Any passing static binding -> passing
passing = True
elif matched_bindings["total"] > 0 and matched_bindings.get("passing", 0) < 1:
# No matching static bindings but at least one is configured -> not passing
passing = False
self.__static_result = PolicyResult(passing)
def build(self) -> "PolicyEngine":
"""Build wrapper which monitors performance"""
with (
@ -123,7 +156,12 @@ class PolicyEngine:
span: Span
span.set_data("pbm", self.__pbm)
span.set_data("request", self.request)
for binding in self.iterate_bindings():
bindings = self.bindings()
policy_bindings = bindings
if isinstance(bindings, QuerySet):
self.compute_static_bindings(bindings)
policy_bindings = [x for x in bindings if x.policy]
for binding in policy_bindings:
self.__expected_result_count += 1
self._check_policy_type(binding)
@ -153,10 +191,13 @@ class PolicyEngine:
@property
def result(self) -> PolicyResult:
"""Get policy-checking result"""
self.__processes.sort(key=lambda x: x.binding.order)
process_results: list[PolicyResult] = [x.result for x in self.__processes if x.result]
all_results = list(process_results + self.__cached_policies)
if len(all_results) < self.__expected_result_count: # pragma: no cover
raise AssertionError("Got less results than polices")
if self.__static_result:
all_results.append(self.__static_result)
# No results, no policies attached -> passing
if len(all_results) == 0:
return PolicyResult(self.empty_result)

View File

@ -1,9 +1,12 @@
"""policy engine tests"""
from django.core.cache import cache
from django.db import connections
from django.test import TestCase
from django.test.utils import CaptureQueriesContext
from authentik.core.tests.utils import create_test_admin_user
from authentik.core.models import Group
from authentik.core.tests.utils import create_test_user
from authentik.lib.generators import generate_id
from authentik.policies.dummy.models import DummyPolicy
from authentik.policies.engine import PolicyEngine
@ -19,7 +22,7 @@ class TestPolicyEngine(TestCase):
def setUp(self):
clear_policy_cache()
self.user = create_test_admin_user()
self.user = create_test_user()
self.policy_false = DummyPolicy.objects.create(
name=generate_id(), result=False, wait_min=0, wait_max=1
)
@ -127,3 +130,43 @@ class TestPolicyEngine(TestCase):
self.assertEqual(len(cache.keys(f"{CACHE_PREFIX}{binding.policy_binding_uuid.hex}*")), 1)
self.assertEqual(engine.build().passing, False)
self.assertEqual(len(cache.keys(f"{CACHE_PREFIX}{binding.policy_binding_uuid.hex}*")), 1)
def test_engine_static_bindings(self):
"""Test static bindings"""
group_a = Group.objects.create(name=generate_id())
group_b = Group.objects.create(name=generate_id())
group_b.users.add(self.user)
user = create_test_user()
for case in [
{
"message": "Group, not member",
"binding_args": {"group": group_a},
"passing": False,
},
{
"message": "Group, member",
"binding_args": {"group": group_b},
"passing": True,
},
{
"message": "User, other",
"binding_args": {"user": user},
"passing": False,
},
{
"message": "User, same",
"binding_args": {"user": self.user},
"passing": True,
},
]:
with self.subTest():
pbm = PolicyBindingModel.objects.create()
for x in range(1000):
PolicyBinding.objects.create(target=pbm, order=x, **case["binding_args"])
engine = PolicyEngine(pbm, self.user)
engine.use_cache = False
with CaptureQueriesContext(connections["default"]) as ctx:
engine.build()
self.assertLess(ctx.final_queries, 1000)
self.assertEqual(engine.result.passing, case["passing"])

View File

@ -29,13 +29,12 @@ class TestPolicyProcess(TestCase):
def setUp(self):
clear_policy_cache()
self.factory = RequestFactory()
self.user = User.objects.create_user(username="policyuser")
self.user = User.objects.create_user(username=generate_id())
def test_group_passing(self):
"""Test binding to group"""
group = Group.objects.create(name="test-group")
group = Group.objects.create(name=generate_id())
group.users.add(self.user)
group.save()
binding = PolicyBinding(group=group)
request = PolicyRequest(self.user)
@ -44,8 +43,7 @@ class TestPolicyProcess(TestCase):
def test_group_negative(self):
"""Test binding to group"""
group = Group.objects.create(name="test-group")
group.save()
group = Group.objects.create(name=generate_id())
binding = PolicyBinding(group=group)
request = PolicyRequest(self.user)
@ -115,8 +113,10 @@ class TestPolicyProcess(TestCase):
def test_exception(self):
"""Test policy execution"""
policy = Policy.objects.create(name="test-execution")
binding = PolicyBinding(policy=policy, target=Application.objects.create(name="test"))
policy = Policy.objects.create(name=generate_id())
binding = PolicyBinding(
policy=policy, target=Application.objects.create(name=generate_id())
)
request = PolicyRequest(self.user)
response = PolicyProcess(binding, request, None).execute()
@ -125,13 +125,15 @@ class TestPolicyProcess(TestCase):
def test_execution_logging(self):
"""Test policy execution creates event"""
policy = DummyPolicy.objects.create(
name="test-execution-logging",
name=generate_id(),
result=False,
wait_min=0,
wait_max=1,
execution_logging=True,
)
binding = PolicyBinding(policy=policy, target=Application.objects.create(name="test"))
binding = PolicyBinding(
policy=policy, target=Application.objects.create(name=generate_id())
)
http_request = self.factory.get(reverse("authentik_api:user-impersonate-end"))
http_request.user = self.user
@ -186,13 +188,15 @@ class TestPolicyProcess(TestCase):
def test_execution_logging_anonymous(self):
"""Test policy execution creates event with anonymous user"""
policy = DummyPolicy.objects.create(
name="test-execution-logging-anon",
name=generate_id(),
result=False,
wait_min=0,
wait_max=1,
execution_logging=True,
)
binding = PolicyBinding(policy=policy, target=Application.objects.create(name="test"))
binding = PolicyBinding(
policy=policy, target=Application.objects.create(name=generate_id())
)
user = AnonymousUser()
@ -219,9 +223,9 @@ class TestPolicyProcess(TestCase):
def test_raises(self):
"""Test policy that raises error"""
policy_raises = ExpressionPolicy.objects.create(name="raises", expression="{{ 0/0 }}")
policy_raises = ExpressionPolicy.objects.create(name=generate_id(), expression="{{ 0/0 }}")
binding = PolicyBinding(
policy=policy_raises, target=Application.objects.create(name="test")
policy=policy_raises, target=Application.objects.create(name=generate_id())
)
request = PolicyRequest(self.user)

View File

@ -387,7 +387,8 @@ class TestAuthorize(OAuthTestCase):
self.assertEqual(
response.url,
(
f"http://localhost#id_token={provider.encode(token.id_token.to_dict())}"
f"http://localhost#access_token={token.token}"
f"&id_token={provider.encode(token.id_token.to_dict())}"
f"&token_type={TOKEN_TYPE}"
f"&expires_in={int(expires)}&state={state}"
),
@ -562,6 +563,7 @@ class TestAuthorize(OAuthTestCase):
"url": "http://localhost",
"title": f"Redirecting to {app.name}...",
"attrs": {
"access_token": token.token,
"id_token": provider.encode(token.id_token.to_dict()),
"token_type": TOKEN_TYPE,
"expires_in": "3600",

View File

@ -150,12 +150,12 @@ class OAuthAuthorizationParams:
self.check_redirect_uri()
self.check_grant()
self.check_scope(github_compat)
self.check_nonce()
self.check_code_challenge()
if self.request:
raise AuthorizeError(
self.redirect_uri, "request_not_supported", self.grant_type, self.state
)
self.check_nonce()
self.check_code_challenge()
def check_grant(self):
"""Check grant"""
@ -630,6 +630,7 @@ class OAuthFulfillmentStage(StageView):
if self.params.response_type in [
ResponseTypes.ID_TOKEN_TOKEN,
ResponseTypes.CODE_ID_TOKEN_TOKEN,
ResponseTypes.ID_TOKEN,
ResponseTypes.CODE_TOKEN,
]:
query_fragment["access_token"] = token.token

View File

@ -0,0 +1,23 @@
"""Proxy provider signals"""
from django.contrib.auth.signals import user_logged_out
from django.db.models.signals import pre_delete
from django.dispatch import receiver
from django.http import HttpRequest
from authentik.core.models import AuthenticatedSession, User
from authentik.providers.proxy.tasks import proxy_on_logout
@receiver(user_logged_out)
def logout_proxy_revoke_direct(sender: type[User], request: HttpRequest, **_):
"""Catch logout by direct logout and forward to proxy providers"""
if not request.session or not request.session.session_key:
return
proxy_on_logout.delay(request.session.session_key)
@receiver(pre_delete, sender=AuthenticatedSession)
def logout_proxy_revoke(sender: type[AuthenticatedSession], instance: AuthenticatedSession, **_):
"""Catch logout by expiring sessions being deleted"""
proxy_on_logout.delay(instance.session.session_key)

View File

@ -0,0 +1,26 @@
"""proxy provider tasks"""
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from authentik.outposts.consumer import OUTPOST_GROUP
from authentik.outposts.models import Outpost, OutpostType
from authentik.providers.oauth2.id_token import hash_session_key
from authentik.root.celery import CELERY_APP
@CELERY_APP.task()
def proxy_on_logout(session_id: str):
"""Update outpost instances connected to a single outpost"""
layer = get_channel_layer()
hashed_session_id = hash_session_key(session_id)
for outpost in Outpost.objects.filter(type=OutpostType.PROXY):
group = OUTPOST_GROUP % {"outpost_pk": str(outpost.pk)}
async_to_sync(layer.group_send)(
group,
{
"type": "event.provider.specific",
"sub_type": "logout",
"session_id": hashed_session_id,
},
)

View File

@ -49,7 +49,6 @@ class TestEndpointsAPI(APITestCase):
self.assertJSONEqual(
response.content.decode(),
{
"autocomplete": {},
"pagination": {
"next": 0,
"previous": 0,
@ -102,7 +101,6 @@ class TestEndpointsAPI(APITestCase):
self.assertJSONEqual(
response.content.decode(),
{
"autocomplete": {},
"pagination": {
"next": 0,
"previous": 0,

View File

@ -20,9 +20,6 @@ from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.engine import PolicyEngine
from authentik.policies.views import PolicyAccessView
from authentik.providers.rac.models import ConnectionToken, Endpoint, RACProvider
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
PLAN_CONNECTION_SETTINGS = "connection_settings"
class RACStartView(PolicyAccessView):
@ -112,15 +109,10 @@ class RACFinalStage(RedirectStage):
return super().dispatch(request, *args, **kwargs)
def get_challenge(self, *args, **kwargs) -> RedirectChallenge:
settings = self.executor.plan.context.get(PLAN_CONNECTION_SETTINGS)
if not settings:
settings = self.executor.plan.context.get(PLAN_CONTEXT_PROMPT, {}).get(
PLAN_CONNECTION_SETTINGS
)
token = ConnectionToken.objects.create(
provider=self.provider,
endpoint=self.endpoint,
settings=settings or {},
settings=self.executor.plan.context.get("connection_settings", {}),
session=self.request.session["authenticatedsession"],
expires=now() + timedelta_from_string(self.provider.connection_expiry),
expiring=True,

View File

@ -44,7 +44,6 @@ class TestRBACRoleAPI(APITestCase):
self.assertJSONEqual(
res.content.decode(),
{
"autocomplete": {},
"pagination": {
"next": 0,
"previous": 0,

View File

@ -46,7 +46,6 @@ class TestRBACUserAPI(APITestCase):
self.assertJSONEqual(
res.content.decode(),
{
"autocomplete": {},
"pagination": {
"next": 0,
"previous": 0,

View File

@ -38,7 +38,6 @@ class TestAPIPerms(APITestCase):
self.assertJSONEqual(
res.content.decode(),
{
"autocomplete": {},
"pagination": {
"next": 0,
"previous": 0,
@ -74,7 +73,6 @@ class TestAPIPerms(APITestCase):
self.assertJSONEqual(
res.content.decode(),
{
"autocomplete": {},
"pagination": {
"next": 0,
"previous": 0,

View File

@ -1,29 +1,12 @@
"""test decorators api"""
from django.urls import reverse
from guardian.shortcuts import assign_perm
from rest_framework.decorators import action
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.test import APITestCase
from rest_framework.viewsets import ModelViewSet
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_user
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import get_request
from authentik.rbac.decorators import permission_required
class MVS(ModelViewSet):
queryset = Application.objects.all()
lookup_field = "slug"
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
@action(detail=True, pagination_class=None, filter_backends=[])
def test(self, request: Request, slug: str):
self.get_object()
return Response(status=200)
class TestAPIDecorators(APITestCase):
@ -35,33 +18,41 @@ class TestAPIDecorators(APITestCase):
def test_obj_perm_denied(self):
"""Test object perm denied"""
request = get_request("", user=self.user)
self.client.force_login(self.user)
app = Application.objects.create(name=generate_id(), slug=generate_id())
response = MVS.as_view({"get": "test"})(request, slug=app.slug)
response = self.client.get(
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
)
self.assertEqual(response.status_code, 403)
def test_obj_perm_global(self):
"""Test object perm successful (global)"""
assign_perm("authentik_core.view_application", self.user)
assign_perm("authentik_events.view_event", self.user)
self.client.force_login(self.user)
app = Application.objects.create(name=generate_id(), slug=generate_id())
request = get_request("", user=self.user)
response = MVS.as_view({"get": "test"})(request, slug=app.slug)
self.assertEqual(response.status_code, 200, response.data)
response = self.client.get(
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
)
self.assertEqual(response.status_code, 200)
def test_obj_perm_scoped(self):
"""Test object perm successful (scoped)"""
assign_perm("authentik_events.view_event", self.user)
app = Application.objects.create(name=generate_id(), slug=generate_id())
assign_perm("authentik_core.view_application", self.user, app)
request = get_request("", user=self.user)
response = MVS.as_view({"get": "test"})(request, slug=app.slug)
self.client.force_login(self.user)
response = self.client.get(
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
)
self.assertEqual(response.status_code, 200)
def test_other_perm_denied(self):
"""Test other perm denied"""
self.client.force_login(self.user)
app = Application.objects.create(name=generate_id(), slug=generate_id())
assign_perm("authentik_core.view_application", self.user, app)
request = get_request("", user=self.user)
response = MVS.as_view({"get": "test"})(request, slug=app.slug)
response = self.client.get(
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
)
self.assertEqual(response.status_code, 403)

View File

@ -9,14 +9,13 @@ https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
import django
from channels.routing import ProtocolTypeRouter, URLRouter
from defusedxml import defuse_stdlib
from django.core.asgi import get_asgi_application
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
from authentik.root.setup import setup
# DJANGO_SETTINGS_MODULE is set in gunicorn.conf.py
setup()
defuse_stdlib()
django.setup()

View File

@ -446,8 +446,6 @@ _DISALLOWED_ITEMS = [
"MIDDLEWARE",
"AUTHENTICATION_BACKENDS",
"CELERY",
"SPECTACULAR_SETTINGS",
"REST_FRAMEWORK",
]
SILENCED_SYSTEM_CHECKS = [
@ -470,8 +468,6 @@ def _update_settings(app_path: str):
TENANT_APPS.extend(getattr(settings_module, "TENANT_APPS", []))
MIDDLEWARE.extend(getattr(settings_module, "MIDDLEWARE", []))
AUTHENTICATION_BACKENDS.extend(getattr(settings_module, "AUTHENTICATION_BACKENDS", []))
SPECTACULAR_SETTINGS.update(getattr(settings_module, "SPECTACULAR_SETTINGS", {}))
REST_FRAMEWORK.update(getattr(settings_module, "REST_FRAMEWORK", {}))
CELERY["beat_schedule"].update(getattr(settings_module, "CELERY_BEAT_SCHEDULE", {}))
for _attr in dir(settings_module):
if not _attr.startswith("__") and _attr not in _DISALLOWED_ITEMS:

View File

@ -1,26 +0,0 @@
import os
import warnings
from cryptography.hazmat.backends.openssl.backend import backend
from defusedxml import defuse_stdlib
from authentik.lib.config import CONFIG
def setup():
warnings.filterwarnings("ignore", "SelectableGroups dict interface")
warnings.filterwarnings(
"ignore",
"defusedxml.lxml is no longer supported and will be removed in a future release.",
)
warnings.filterwarnings(
"ignore",
"defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead.",
)
defuse_stdlib()
if CONFIG.get_bool("compliance.fips.enabled", False):
backend._enable_fips()
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "authentik.root.settings")

View File

@ -3,44 +3,25 @@
import os
from argparse import ArgumentParser
from unittest import TestCase
from unittest.mock import patch
import pytest
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.test.runner import DiscoverRunner
from structlog.stdlib import get_logger
from authentik.lib.config import CONFIG
from authentik.lib.sentry import sentry_init
from authentik.root.signals import post_startup, pre_startup, startup
from tests.e2e.utils import get_docker_tag
# globally set maxDiff to none to show full assert error
TestCase.maxDiff = None
def get_docker_tag() -> str:
"""Get docker-tag based off of CI variables"""
env_pr_branch = "GITHUB_HEAD_REF"
default_branch = "GITHUB_REF"
branch_name = os.environ.get(default_branch, "main")
if os.environ.get(env_pr_branch, "") != "":
branch_name = os.environ[env_pr_branch]
branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
return f"gh-{branch_name}"
def patched__get_ct_cached(app_label, codename):
"""Caches `ContentType` instances like its `QuerySet` does."""
return ContentType.objects.get(app_label=app_label, permission__codename=codename)
class PytestTestRunner(DiscoverRunner): # pragma: no cover
"""Runs pytest to discover and run tests."""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.logger = get_logger().bind(runner="pytest")
self.args = []
if self.failfast:
@ -53,33 +34,22 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
if kwargs.get("no_capture", False):
self.args.append("--capture=no")
self._setup_test_environment()
def _setup_test_environment(self):
"""Configure test environment settings"""
settings.TEST = True
settings.CELERY["task_always_eager"] = True
# Test-specific configuration
test_config = {
"events.context_processors.geoip": "tests/GeoLite2-City-Test.mmdb",
"events.context_processors.asn": "tests/GeoLite2-ASN-Test.mmdb",
"blueprints_dir": "./blueprints",
"outposts.container_image_base": f"ghcr.io/goauthentik/dev-%(type)s:{get_docker_tag()}",
"tenants.enabled": False,
"outposts.disable_embedded_outpost": False,
"error_reporting.sample_rate": 0,
"error_reporting.environment": "testing",
"error_reporting.send_pii": True,
}
for key, value in test_config.items():
CONFIG.set(key, value)
CONFIG.set("events.context_processors.geoip", "tests/GeoLite2-City-Test.mmdb")
CONFIG.set("events.context_processors.asn", "tests/GeoLite2-ASN-Test.mmdb")
CONFIG.set("blueprints_dir", "./blueprints")
CONFIG.set(
"outposts.container_image_base",
f"ghcr.io/goauthentik/dev-%(type)s:{get_docker_tag()}",
)
CONFIG.set("tenants.enabled", False)
CONFIG.set("outposts.disable_embedded_outpost", False)
CONFIG.set("error_reporting.sample_rate", 0)
CONFIG.set("error_reporting.environment", "testing")
CONFIG.set("error_reporting.send_pii", True)
sentry_init()
self.logger.debug("Test environment configured")
# Send startup signals
pre_startup.send(sender=self, mode="test")
startup.send(sender=self, mode="test")
post_startup.send(sender=self, mode="test")
@ -102,21 +72,7 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
help="Disable any capturing of stdout/stderr during tests.",
)
def _validate_test_label(self, label: str) -> bool:
"""Validate test label format"""
if not label:
return False
# Check for invalid characters, but allow forward slashes and colons
# for paths and pytest markers
invalid_chars = set('\\*?"<>|')
if any(c in label for c in invalid_chars):
self.logger.error("Invalid characters in test label", label=label)
return False
return True
def run_tests(self, test_labels: list[str], extra_tests=None, **kwargs):
def run_tests(self, test_labels, extra_tests=None, **kwargs):
"""Run pytest and return the exitcode.
It translates some of Django's test command option to pytest's.
@ -126,17 +82,10 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
The extra_tests argument has been deprecated since Django 5.x
It is kept for compatibility with PyCharm's Django test runner.
"""
if not test_labels:
self.logger.error("No test files specified")
return 1
for label in test_labels:
if not self._validate_test_label(label):
return 1
valid_label_found = False
label_as_path = os.path.abspath(label)
# File path has been specified
if os.path.exists(label_as_path):
self.args.append(label_as_path)
@ -144,31 +93,24 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
elif "::" in label:
self.args.append(label)
valid_label_found = True
# Convert dotted module path to file_path::class::method
else:
# Check if the label is a dotted module path
path_pieces = label.split(".")
# Check whether only class or class and method are specified
for i in range(-1, -3, -1):
try:
path = os.path.join(*path_pieces[:i]) + ".py"
if os.path.exists(path):
if i < -1:
path_method = path + "::" + "::".join(path_pieces[i:])
self.args.append(path_method)
else:
self.args.append(path)
valid_label_found = True
break
except (TypeError, IndexError):
continue
path = os.path.join(*path_pieces[:i]) + ".py"
label_as_path = os.path.abspath(path)
if os.path.exists(label_as_path):
path_method = label_as_path + "::" + "::".join(path_pieces[i:])
self.args.append(path_method)
valid_label_found = True
break
if not valid_label_found:
self.logger.error("Test file not found", label=label)
return 1
raise RuntimeError(
f"One of the test labels: {label!r}, "
f"is not supported. Use a dotted module name or "
f"path instead."
)
self.logger.info("Running tests", test_files=self.args)
with patch("guardian.shortcuts._get_ct_cached", patched__get_ct_cached):
try:
return pytest.main(self.args)
except Exception as e:
self.logger.error("Error running tests", error=str(e), test_files=self.args)
return 1
return pytest.main(self.args)

File diff suppressed because one or more lines are too long

View File

@ -100,11 +100,9 @@ def send_mail(
# Because we use the Message-ID as UID for the task, manually assign it
message_object.extra_headers["Message-ID"] = message_id
# Add the logo if it is used in the email body (we can't add it in the
# previous message since MIMEImage can't be converted to json)
body = get_email_body(message_object)
if "cid:logo" in body:
message_object.attach(logo_data())
# Add the logo (we can't add it in the previous message since MIMEImage
# can't be converted to json)
message_object.attach(logo_data())
if (
message_object.to

View File

@ -96,7 +96,7 @@
<table width="100%" style="background-color: #FFFFFF; border-spacing: 0; margin-top: 15px;">
<tr height="80">
<td align="center" style="padding: 20px 0;">
<img src="{% block logo_url %}cid:logo{% endblock %}" border="0=" alt="authentik logo" class="flexibleImage logo">
<img src="{% block logo_url %}cid:logo.png{% endblock %}" border="0=" alt="authentik logo" class="flexibleImage logo">
</td>
</tr>
{% block content %}

View File

@ -19,8 +19,7 @@ def logo_data() -> MIMEImage:
path = Path("web/dist/assets/icons/icon_left_brand.png")
with open(path, "rb") as _logo_file:
logo = MIMEImage(_logo_file.read())
logo.add_header("Content-ID", "<logo>")
logo.add_header("Content-Disposition", "inline", filename="logo.png")
logo.add_header("Content-ID", "logo.png")
return logo

View File

@ -1,6 +1,6 @@
"""Prompt Stage Logic"""
from collections.abc import Callable, Iterator
from collections.abc import Callable
from email.policy import Policy
from types import MethodType
from typing import Any
@ -190,7 +190,7 @@ class ListPolicyEngine(PolicyEngine):
self.__list = policies
self.use_cache = False
def iterate_bindings(self) -> Iterator[PolicyBinding]:
def bindings(self):
for policy in self.__list:
yield PolicyBinding(
policy=policy,

File diff suppressed because it is too large Load Diff

View File

@ -31,7 +31,7 @@ services:
volumes:
- redis:/data
server:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.2}
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.0}
restart: unless-stopped
command: server
environment:
@ -55,7 +55,7 @@ services:
redis:
condition: service_healthy
worker:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.2}
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.0}
restart: unless-stopped
command: worker
environment:

9
go.mod
View File

@ -4,7 +4,6 @@ go 1.24.0
require (
beryju.io/ldap v0.1.0
github.com/avast/retry-go/v4 v4.6.1
github.com/coreos/go-oidc/v3 v3.14.1
github.com/getsentry/sentry-go v0.33.0
github.com/go-http-utils/etag v0.0.0-20161124023236-513ea8f21eb1
@ -18,21 +17,21 @@ require (
github.com/gorilla/sessions v1.4.0
github.com/gorilla/websocket v1.5.3
github.com/grafana/pyroscope-go v1.2.2
github.com/jellydator/ttlcache/v3 v3.4.0
github.com/jellydator/ttlcache/v3 v3.3.0
github.com/mitchellh/mapstructure v1.5.0
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
github.com/pires/go-proxyproto v0.8.1
github.com/prometheus/client_golang v1.22.0
github.com/redis/go-redis/v9 v9.10.0
github.com/redis/go-redis/v9 v9.9.0
github.com/sethvargo/go-envconfig v1.3.0
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cobra v1.9.1
github.com/stretchr/testify v1.10.0
github.com/wwt/guac v1.3.2
goauthentik.io/api/v3 v3.2025062.3
goauthentik.io/api/v3 v3.2025060.1
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
golang.org/x/oauth2 v0.30.0
golang.org/x/sync v0.15.0
golang.org/x/sync v0.14.0
gopkg.in/yaml.v2 v2.4.0
layeh.com/radius v0.0.0-20210819152912-ad72663a72ab
)

18
go.sum
View File

@ -41,8 +41,6 @@ github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa h1:LHTHcTQiSGT7V
github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so=
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/avast/retry-go/v4 v4.6.1 h1:VkOLRubHdisGrHnTu89g08aQEWEgRU7LVEop3GbIcMk=
github.com/avast/retry-go/v4 v4.6.1/go.mod h1:V6oF8njAwxJ5gRo1Q7Cxab24xs5NCWZBeaHHBklR8mA=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
@ -203,8 +201,8 @@ github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh6
github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs=
github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY=
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
github.com/jellydator/ttlcache/v3 v3.4.0 h1:YS4P125qQS0tNhtL6aeYkheEaB/m8HCqdMMP4mnWdTY=
github.com/jellydator/ttlcache/v3 v3.4.0/go.mod h1:Hw9EgjymziQD3yGsQdf1FqFdpp7YjFMd4Srg5EJlgD4=
github.com/jellydator/ttlcache/v3 v3.3.0 h1:BdoC9cE81qXfrxeb9eoJi9dWrdhSuwXMAnHTbnBm4Wc=
github.com/jellydator/ttlcache/v3 v3.3.0/go.mod h1:bj2/e0l4jRnQdrnSTaGTsh4GSXvMjQcy41i7th0GVGw=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
@ -251,8 +249,8 @@ github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ
github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I=
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
github.com/redis/go-redis/v9 v9.10.0 h1:FxwK3eV8p/CQa0Ch276C7u2d0eNC9kCmAYQ7mCXCzVs=
github.com/redis/go-redis/v9 v9.10.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
github.com/redis/go-redis/v9 v9.9.0 h1:URbPQ4xVQSQhZ27WMQVmZSo3uT3pL+4IdHVcYq2nVfM=
github.com/redis/go-redis/v9 v9.9.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
@ -298,8 +296,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
goauthentik.io/api/v3 v3.2025062.3 h1:syBOKigaHyX/8Rwmh9kOSF+TzsxOzmP5i7rsFwbemzA=
goauthentik.io/api/v3 v3.2025062.3/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
goauthentik.io/api/v3 v3.2025060.1 h1:H/TDuroJlQicuxrWEnLcO3lzQaHuR28xrUb1L2362Vo=
goauthentik.io/api/v3 v3.2025060.1/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
@ -384,8 +382,8 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ=
golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=

View File

@ -33,4 +33,4 @@ func UserAgent() string {
return fmt.Sprintf("authentik@%s", FullVersion())
}
const VERSION = "2025.6.2"
const VERSION = "2025.6.0"

View File

@ -13,7 +13,6 @@ import (
"syscall"
"time"
"github.com/avast/retry-go/v4"
"github.com/getsentry/sentry-go"
"github.com/google/uuid"
"github.com/gorilla/websocket"
@ -26,6 +25,8 @@ import (
"goauthentik.io/internal/utils/web"
)
type WSHandler func(ctx context.Context, args map[string]interface{})
const ConfigLogLevel = "log_level"
// APIController main controller which connects to the authentik api via http and ws
@ -42,11 +43,12 @@ type APIController struct {
reloadOffset time.Duration
eventConn *websocket.Conn
lastWsReconnect time.Time
wsIsReconnecting bool
eventHandlers []EventHandler
refreshHandlers []func()
wsConn *websocket.Conn
lastWsReconnect time.Time
wsIsReconnecting bool
wsBackoffMultiplier int
wsHandlers []WSHandler
refreshHandlers []func()
instanceUUID uuid.UUID
}
@ -81,19 +83,20 @@ func NewAPIController(akURL url.URL, token string) *APIController {
// Because we don't know the outpost UUID, we simply do a list and pick the first
// The service account this token belongs to should only have access to a single outpost
outposts, _ := retry.DoWithData[*api.PaginatedOutpostList](
func() (*api.PaginatedOutpostList, error) {
outposts, _, err := apiClient.OutpostsApi.OutpostsInstancesList(context.Background()).Execute()
return outposts, err
},
retry.Attempts(0),
retry.Delay(time.Second*3),
retry.OnRetry(func(attempt uint, err error) {
log.WithError(err).Error("Failed to fetch outpost configuration, retrying in 3 seconds")
}),
)
var outposts *api.PaginatedOutpostList
var err error
for {
outposts, _, err = apiClient.OutpostsApi.OutpostsInstancesList(context.Background()).Execute()
if err == nil {
break
}
log.WithError(err).Error("Failed to fetch outpost configuration, retrying in 3 seconds")
time.Sleep(time.Second * 3)
}
if len(outposts.Results) < 1 {
log.Panic("No outposts found with given token, ensure the given token corresponds to an authenitk Outpost")
panic("No outposts found with given token, ensure the given token corresponds to an authenitk Outpost")
}
outpost := outposts.Results[0]
@ -116,16 +119,17 @@ func NewAPIController(akURL url.URL, token string) *APIController {
token: token,
logger: log,
reloadOffset: time.Duration(rand.Intn(10)) * time.Second,
instanceUUID: uuid.New(),
Outpost: outpost,
eventHandlers: []EventHandler{},
refreshHandlers: make([]func(), 0),
reloadOffset: time.Duration(rand.Intn(10)) * time.Second,
instanceUUID: uuid.New(),
Outpost: outpost,
wsHandlers: []WSHandler{},
wsBackoffMultiplier: 1,
refreshHandlers: make([]func(), 0),
}
ac.logger.WithField("offset", ac.reloadOffset.String()).Debug("HA Reload offset")
err = ac.initEvent(akURL, outpost.Pk)
err = ac.initWS(akURL, outpost.Pk)
if err != nil {
go ac.recentEvents()
go ac.reconnectWS()
}
ac.configureRefreshSignal()
return ac
@ -196,7 +200,7 @@ func (a *APIController) OnRefresh() error {
return err
}
func (a *APIController) getEventPingArgs() map[string]interface{} {
func (a *APIController) getWebsocketPingArgs() map[string]interface{} {
args := map[string]interface{}{
"version": constants.VERSION,
"buildHash": constants.BUILD(""),
@ -222,12 +226,12 @@ func (a *APIController) StartBackgroundTasks() error {
"build": constants.BUILD(""),
}).Set(1)
go func() {
a.logger.Debug("Starting Event Handler...")
a.startEventHandler()
a.logger.Debug("Starting WS Handler...")
a.startWSHandler()
}()
go func() {
a.logger.Debug("Starting Event health notifier...")
a.startEventHealth()
a.logger.Debug("Starting WS Health notifier...")
a.startWSHealth()
}()
go func() {
a.logger.Debug("Starting Interval updater...")

View File

@ -1,37 +0,0 @@
package ak
import (
"context"
"github.com/mitchellh/mapstructure"
)
type EventKind int
const (
// Code used to acknowledge a previous message
EventKindAck EventKind = 0
// Code used to send a healthcheck keepalive
EventKindHello EventKind = 1
// Code received to trigger a config update
EventKindTriggerUpdate EventKind = 2
// Code received to trigger some provider specific function
EventKindProviderSpecific EventKind = 3
// Code received to identify the end of a session
EventKindSessionEnd EventKind = 4
)
type EventHandler func(ctx context.Context, msg Event) error
type Event struct {
Instruction EventKind `json:"instruction"`
Args interface{} `json:"args"`
}
func (wm Event) ArgsAs(out interface{}) error {
return mapstructure.Decode(wm.Args, out)
}
type EventArgsSessionEnd struct {
SessionID string `mapstructure:"session_id"`
}

View File

@ -11,7 +11,6 @@ import (
"strings"
"time"
"github.com/avast/retry-go/v4"
"github.com/gorilla/websocket"
"github.com/prometheus/client_golang/prometheus"
"goauthentik.io/internal/config"
@ -31,7 +30,7 @@ func (ac *APIController) getWebsocketURL(akURL url.URL, outpostUUID string, quer
return wsUrl
}
func (ac *APIController) initEvent(akURL url.URL, outpostUUID string) error {
func (ac *APIController) initWS(akURL url.URL, outpostUUID string) error {
query := akURL.Query()
query.Set("instance_uuid", ac.instanceUUID.String())
@ -58,19 +57,19 @@ func (ac *APIController) initEvent(akURL url.URL, outpostUUID string) error {
return err
}
ac.eventConn = ws
ac.wsConn = ws
// Send hello message with our version
msg := Event{
Instruction: EventKindHello,
Args: ac.getEventPingArgs(),
msg := websocketMessage{
Instruction: WebsocketInstructionHello,
Args: ac.getWebsocketPingArgs(),
}
err = ws.WriteJSON(msg)
if err != nil {
ac.logger.WithField("logger", "authentik.outpost.events").WithError(err).Warning("Failed to hello to authentik")
ac.logger.WithField("logger", "authentik.outpost.ak-ws").WithError(err).Warning("Failed to hello to authentik")
return err
}
ac.lastWsReconnect = time.Now()
ac.logger.WithField("logger", "authentik.outpost.events").WithField("outpost", outpostUUID).Info("Successfully connected websocket")
ac.logger.WithField("logger", "authentik.outpost.ak-ws").WithField("outpost", outpostUUID).Info("Successfully connected websocket")
return nil
}
@ -78,19 +77,19 @@ func (ac *APIController) initEvent(akURL url.URL, outpostUUID string) error {
func (ac *APIController) Shutdown() {
// Cleanly close the connection by sending a close message and then
// waiting (with timeout) for the server to close the connection.
err := ac.eventConn.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, ""))
err := ac.wsConn.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, ""))
if err != nil {
ac.logger.WithError(err).Warning("failed to write close message")
return
}
err = ac.eventConn.Close()
err = ac.wsConn.Close()
if err != nil {
ac.logger.WithError(err).Warning("failed to close websocket")
}
ac.logger.Info("finished shutdown")
}
func (ac *APIController) recentEvents() {
func (ac *APIController) reconnectWS() {
if ac.wsIsReconnecting {
return
}
@ -101,47 +100,46 @@ func (ac *APIController) recentEvents() {
Path: strings.ReplaceAll(ac.Client.GetConfig().Servers[0].URL, "api/v3", ""),
}
attempt := 1
_ = retry.Do(
func() error {
q := u.Query()
q.Set("attempt", strconv.Itoa(attempt))
u.RawQuery = q.Encode()
err := ac.initEvent(u, ac.Outpost.Pk)
attempt += 1
if err != nil {
return err
for {
q := u.Query()
q.Set("attempt", strconv.Itoa(attempt))
u.RawQuery = q.Encode()
err := ac.initWS(u, ac.Outpost.Pk)
attempt += 1
if err != nil {
ac.logger.Infof("waiting %d seconds to reconnect", ac.wsBackoffMultiplier)
time.Sleep(time.Duration(ac.wsBackoffMultiplier) * time.Second)
ac.wsBackoffMultiplier = ac.wsBackoffMultiplier * 2
// Limit to 300 seconds (5m)
if ac.wsBackoffMultiplier >= 300 {
ac.wsBackoffMultiplier = 300
}
} else {
ac.wsIsReconnecting = false
return nil
},
retry.Delay(1*time.Second),
retry.MaxDelay(5*time.Minute),
retry.DelayType(retry.BackOffDelay),
retry.Attempts(0),
retry.OnRetry(func(attempt uint, err error) {
ac.logger.Infof("waiting %d seconds to reconnect", attempt)
}),
)
ac.wsBackoffMultiplier = 1
return
}
}
}
func (ac *APIController) startEventHandler() {
logger := ac.logger.WithField("loop", "event-handler")
func (ac *APIController) startWSHandler() {
logger := ac.logger.WithField("loop", "ws-handler")
for {
var wsMsg Event
if ac.eventConn == nil {
go ac.recentEvents()
var wsMsg websocketMessage
if ac.wsConn == nil {
go ac.reconnectWS()
time.Sleep(time.Second * 5)
continue
}
err := ac.eventConn.ReadJSON(&wsMsg)
err := ac.wsConn.ReadJSON(&wsMsg)
if err != nil {
ConnectionStatus.With(prometheus.Labels{
"outpost_name": ac.Outpost.Name,
"outpost_type": ac.Server.Type(),
"uuid": ac.instanceUUID.String(),
}).Set(0)
logger.WithError(err).Warning("event read error")
go ac.recentEvents()
logger.WithError(err).Warning("ws read error")
go ac.reconnectWS()
time.Sleep(time.Second * 5)
continue
}
@ -151,8 +149,7 @@ func (ac *APIController) startEventHandler() {
"uuid": ac.instanceUUID.String(),
}).Set(1)
switch wsMsg.Instruction {
case EventKindAck:
case EventKindTriggerUpdate:
case WebsocketInstructionTriggerUpdate:
time.Sleep(ac.reloadOffset)
logger.Debug("Got update trigger...")
err := ac.OnRefresh()
@ -167,33 +164,30 @@ func (ac *APIController) startEventHandler() {
"build": constants.BUILD(""),
}).SetToCurrentTime()
}
default:
for _, h := range ac.eventHandlers {
err := h(context.Background(), wsMsg)
if err != nil {
ac.logger.WithError(err).Warning("failed to run event handler")
}
case WebsocketInstructionProviderSpecific:
for _, h := range ac.wsHandlers {
h(context.Background(), wsMsg.Args)
}
}
}
}
func (ac *APIController) startEventHealth() {
func (ac *APIController) startWSHealth() {
ticker := time.NewTicker(time.Second * 10)
for ; true; <-ticker.C {
if ac.eventConn == nil {
go ac.recentEvents()
if ac.wsConn == nil {
go ac.reconnectWS()
time.Sleep(time.Second * 5)
continue
}
err := ac.SendEventHello(map[string]interface{}{})
err := ac.SendWSHello(map[string]interface{}{})
if err != nil {
ac.logger.WithField("loop", "event-health").WithError(err).Warning("event write error")
go ac.recentEvents()
ac.logger.WithField("loop", "ws-health").WithError(err).Warning("ws write error")
go ac.reconnectWS()
time.Sleep(time.Second * 5)
continue
} else {
ac.logger.WithField("loop", "event-health").Trace("hello'd")
ac.logger.WithField("loop", "ws-health").Trace("hello'd")
ConnectionStatus.With(prometheus.Labels{
"outpost_name": ac.Outpost.Name,
"outpost_type": ac.Server.Type(),
@ -236,19 +230,19 @@ func (ac *APIController) startIntervalUpdater() {
}
}
func (a *APIController) AddEventHandler(handler EventHandler) {
a.eventHandlers = append(a.eventHandlers, handler)
func (a *APIController) AddWSHandler(handler WSHandler) {
a.wsHandlers = append(a.wsHandlers, handler)
}
func (a *APIController) SendEventHello(args map[string]interface{}) error {
allArgs := a.getEventPingArgs()
func (a *APIController) SendWSHello(args map[string]interface{}) error {
allArgs := a.getWebsocketPingArgs()
for key, value := range args {
allArgs[key] = value
}
aliveMsg := Event{
Instruction: EventKindHello,
aliveMsg := websocketMessage{
Instruction: WebsocketInstructionHello,
Args: allArgs,
}
err := a.eventConn.WriteJSON(aliveMsg)
err := a.wsConn.WriteJSON(aliveMsg)
return err
}

View File

@ -0,0 +1,19 @@
package ak
type websocketInstruction int
const (
// WebsocketInstructionAck Code used to acknowledge a previous message
WebsocketInstructionAck websocketInstruction = 0
// WebsocketInstructionHello Code used to send a healthcheck keepalive
WebsocketInstructionHello websocketInstruction = 1
// WebsocketInstructionTriggerUpdate Code received to trigger a config update
WebsocketInstructionTriggerUpdate websocketInstruction = 2
// WebsocketInstructionProviderSpecific Code received to trigger some provider specific function
WebsocketInstructionProviderSpecific websocketInstruction = 3
)
type websocketMessage struct {
Instruction websocketInstruction `json:"instruction"`
Args map[string]interface{} `json:"args"`
}

View File

@ -15,7 +15,7 @@ func URLMustParse(u string) *url.URL {
return ur
}
func TestEventWebsocketURL(t *testing.T) {
func TestWebsocketURL(t *testing.T) {
u := URLMustParse("http://localhost:9000?foo=bar")
uuid := "23470845-7263-4fe3-bd79-ec1d7bf77d77"
ac := &APIController{}
@ -23,7 +23,7 @@ func TestEventWebsocketURL(t *testing.T) {
assert.Equal(t, "ws://localhost:9000/ws/outpost/23470845-7263-4fe3-bd79-ec1d7bf77d77/?foo=bar", nu.String())
}
func TestEventWebsocketURL_Query(t *testing.T) {
func TestWebsocketURL_Query(t *testing.T) {
u := URLMustParse("http://localhost:9000?foo=bar")
uuid := "23470845-7263-4fe3-bd79-ec1d7bf77d77"
ac := &APIController{}
@ -33,7 +33,7 @@ func TestEventWebsocketURL_Query(t *testing.T) {
assert.Equal(t, "ws://localhost:9000/ws/outpost/23470845-7263-4fe3-bd79-ec1d7bf77d77/?bar=baz&foo=bar", nu.String())
}
func TestEventWebsocketURL_Subpath(t *testing.T) {
func TestWebsocketURL_Subpath(t *testing.T) {
u := URLMustParse("http://localhost:9000/foo/bar/")
uuid := "23470845-7263-4fe3-bd79-ec1d7bf77d77"
ac := &APIController{}

View File

@ -55,10 +55,11 @@ func MockAK(outpost api.Outpost, globalConfig api.Config) *APIController {
token: token,
logger: log,
reloadOffset: time.Duration(rand.Intn(10)) * time.Second,
instanceUUID: uuid.New(),
Outpost: outpost,
refreshHandlers: make([]func(), 0),
reloadOffset: time.Duration(rand.Intn(10)) * time.Second,
instanceUUID: uuid.New(),
Outpost: outpost,
wsBackoffMultiplier: 1,
refreshHandlers: make([]func(), 0),
}
ac.logger.WithField("offset", ac.reloadOffset.String()).Debug("HA Reload offset")
return ac

View File

@ -127,7 +127,7 @@ func (fe *FlowExecutor) getAnswer(stage StageComponent) string {
return ""
}
func (fe *FlowExecutor) SessionCookie() *http.Cookie {
func (fe *FlowExecutor) GetSession() *http.Cookie {
return fe.session
}

View File

@ -1,19 +0,0 @@
package flow
import "github.com/golang-jwt/jwt/v5"
type SessionCookieClaims struct {
jwt.Claims
SessionID string `json:"sid"`
Authenticated bool `json:"authenticated"`
}
func (fe *FlowExecutor) Session() *jwt.Token {
sc := fe.SessionCookie()
if sc == nil {
return nil
}
t, _, _ := jwt.NewParser().ParseUnverified(sc.Value, &SessionCookieClaims{})
return t
}

View File

@ -38,14 +38,7 @@ func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LD
username, err := instance.binder.GetUsername(bindDN)
if err == nil {
selectedApp = instance.GetAppSlug()
c, err := instance.binder.Bind(username, req)
if c == ldap.LDAPResultSuccess {
f := instance.GetFlags(req.BindDN)
ls.connectionsSync.Lock()
ls.connections[f.SessionID()] = conn
ls.connectionsSync.Unlock()
}
return c, err
return instance.binder.Bind(username, req)
} else {
req.Log().WithError(err).Debug("Username not for instance")
}

View File

@ -27,9 +27,8 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
passed, err := fe.Execute()
flags := flags.UserFlags{
Session: fe.SessionCookie(),
SessionJWT: fe.Session(),
UserPk: flags.InvalidUserPK,
Session: fe.GetSession(),
UserPk: flags.InvalidUserPK,
}
// only set flags if we don't have flags for this DN yet
// as flags are only checked during the bind, we can remember whether a certain DN

View File

@ -1,20 +0,0 @@
package ldap
import "net"
func (ls *LDAPServer) Close(dn string, conn net.Conn) error {
ls.connectionsSync.Lock()
defer ls.connectionsSync.Unlock()
key := ""
for k, c := range ls.connections {
if c == conn {
key = k
break
}
}
if key == "" {
return nil
}
delete(ls.connections, key)
return nil
}

View File

@ -1,30 +1,16 @@
package flags
import (
"crypto/sha256"
"encoding/hex"
"net/http"
"github.com/golang-jwt/jwt/v5"
"goauthentik.io/api/v3"
"goauthentik.io/internal/outpost/flow"
)
const InvalidUserPK = -1
type UserFlags struct {
UserInfo *api.User
UserPk int32
CanSearch bool
Session *http.Cookie
SessionJWT *jwt.Token
}
func (uf UserFlags) SessionID() string {
if uf.SessionJWT == nil {
return ""
}
h := sha256.New()
h.Write([]byte(uf.SessionJWT.Claims.(*flow.SessionCookieClaims).SessionID))
return hex.EncodeToString(h.Sum(nil))
UserInfo *api.User
UserPk int32
CanSearch bool
Session *http.Cookie
}

View File

@ -18,26 +18,21 @@ import (
)
type LDAPServer struct {
s *ldap.Server
log *log.Entry
ac *ak.APIController
cs *ak.CryptoStore
defaultCert *tls.Certificate
providers []*ProviderInstance
connections map[string]net.Conn
connectionsSync sync.Mutex
s *ldap.Server
log *log.Entry
ac *ak.APIController
cs *ak.CryptoStore
defaultCert *tls.Certificate
providers []*ProviderInstance
}
func NewServer(ac *ak.APIController) ak.Outpost {
ls := &LDAPServer{
log: log.WithField("logger", "authentik.outpost.ldap"),
ac: ac,
cs: ak.NewCryptoStore(ac.Client.CryptoApi),
providers: []*ProviderInstance{},
connections: map[string]net.Conn{},
connectionsSync: sync.Mutex{},
log: log.WithField("logger", "authentik.outpost.ldap"),
ac: ac,
cs: ak.NewCryptoStore(ac.Client.CryptoApi),
providers: []*ProviderInstance{},
}
ac.AddEventHandler(ls.handleWSSessionEnd)
s := ldap.NewServer()
s.EnforceLDAP = true
@ -55,7 +50,6 @@ func NewServer(ac *ak.APIController) ak.Outpost {
s.BindFunc("", ls)
s.UnbindFunc("", ls)
s.SearchFunc("", ls)
s.CloseFunc("", ls)
return ls
}
@ -123,23 +117,3 @@ func (ls *LDAPServer) TimerFlowCacheExpiry(ctx context.Context) {
p.binder.TimerFlowCacheExpiry(ctx)
}
}
func (ls *LDAPServer) handleWSSessionEnd(ctx context.Context, msg ak.Event) error {
if msg.Instruction != ak.EventKindSessionEnd {
return nil
}
mmsg := ak.EventArgsSessionEnd{}
err := msg.ArgsAs(&mmsg)
if err != nil {
return err
}
ls.connectionsSync.Lock()
defer ls.connectionsSync.Unlock()
ls.log.Info("Disconnecting session due to session end event")
conn, ok := ls.connections[mmsg.SessionID]
if !ok {
return nil
}
delete(ls.connections, mmsg.SessionID)
return conn.Close()
}

View File

@ -44,40 +44,38 @@ func (ds *DirectSearcher) SearchSubschema(req *search.Request) (ldap.ServerSearc
{
Name: "attributeTypes",
Values: []string{
"( 0.9.2342.19200300.100.1.1 NAME 'uid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.3 NAME 'mail' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.41 NAME 'mobile' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.102 NAME 'memberOf' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.13 NAME 'displayName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.131 NAME 'co' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.141 NAME 'department' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.146 NAME 'company' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1 NAME 'name' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.221 NAME 'sAMAccountName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.261 NAME 'division' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.44 NAME 'homeDirectory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.750 NAME 'groupType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.782 NAME 'objectCategory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.0 NAME 'uidNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.1 NAME 'gidNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.12 NAME 'memberUid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 2.5.18.1 NAME 'createTimestamp' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION )",
"( 2.5.18.2 NAME 'modifyTimestamp' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION )",
"( 2.5.21.2 NAME 'dITContentRules' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 2.5.21.5 NAME 'attributeTypes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 2.5.21.6 NAME 'objectClasses' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 2.5.4.0 NAME 'objectClass' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 2.5.4.4 NAME 'sn' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.3 NAME 'cn' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.6 NAME 'c' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.7 NAME 'l' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.10 NAME 'o' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.4.11 NAME 'ou' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.4.12 NAME 'title' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.13 NAME 'description' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.4.20 NAME 'telephoneNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.3 NAME 'cn' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.31 NAME 'member' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 2.5.4.4 NAME 'sn' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.42 NAME 'givenName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.6 NAME 'c' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.7 NAME 'l' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.21.2 NAME 'dITContentRules' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 2.5.21.5 NAME 'attributeTypes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 2.5.21.6 NAME 'objectClasses' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 0.9.2342.19200300.100.1.1 NAME 'uid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.3 NAME 'mail' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.41 NAME 'mobile' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.13 NAME 'displayName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.146 NAME 'company' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.102 NAME 'memberOf' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.131 NAME 'co' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.141 NAME 'department' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1 NAME 'name' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.44 NAME 'homeDirectory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.221 NAME 'sAMAccountName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.261 NAME 'division' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.750 NAME 'groupType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.782 NAME 'objectCategory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.0 NAME 'uidNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.1 NAME 'gidNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.12 NAME 'memberUid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
// Custom attributes
// Temporarily use 1.3.6.1.4.1.26027.1.1 as a base

View File

@ -53,14 +53,6 @@ func NewRequest(bindDN string, searchReq ldap.SearchRequest, conn net.Conn) (*Re
if err != nil && len(searchReq.Filter) > 0 {
l.WithError(err).WithField("objectClass", filterOC).Warning("invalid filter object class")
}
// Handle comma-separated attributes
normalizedAttributes := normalizeAttributes(searchReq.Attributes)
if len(normalizedAttributes) != len(searchReq.Attributes) {
// Create a copy of the search request with normalized attributes
searchReq.Attributes = normalizedAttributes
}
return &Request{
SearchRequest: searchReq,
BindDN: bindDN,
@ -72,31 +64,6 @@ func NewRequest(bindDN string, searchReq ldap.SearchRequest, conn net.Conn) (*Re
}, span
}
// normalizeAttributes handles the case where attributes might be passed as comma-separated strings
// rather than as individual array elements
func normalizeAttributes(attributes []string) []string {
if len(attributes) == 0 {
return attributes
}
result := make([]string, 0, len(attributes))
for _, attr := range attributes {
if strings.Contains(attr, ",") {
// Split comma-separated attributes and add them individually
parts := strings.Split(attr, ",")
for _, part := range parts {
part = strings.TrimSpace(part)
if part != "" {
result = append(result, part)
}
}
} else {
result = append(result, attr)
}
}
return result
}
func (r *Request) Context() context.Context {
return r.ctx
}

View File

@ -1,98 +0,0 @@
package search
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestNormalizeAttributes(t *testing.T) {
tests := []struct {
name string
input []string
expectedOutput []string
}{
{
name: "Empty input",
input: []string{},
expectedOutput: []string{},
},
{
name: "No commas",
input: []string{"uid", "cn", "sn"},
expectedOutput: []string{"uid", "cn", "sn"},
},
{
name: "Single comma-separated string",
input: []string{"uid,cn,sn"},
expectedOutput: []string{"uid", "cn", "sn"},
},
{
name: "Mixed input",
input: []string{"uid,cn", "sn"},
expectedOutput: []string{"uid", "cn", "sn"},
},
{
name: "With spaces",
input: []string{"uid, cn, sn"},
expectedOutput: []string{"uid", "cn", "sn"},
},
{
name: "Empty parts",
input: []string{"uid,, cn"},
expectedOutput: []string{"uid", "cn"},
},
{
name: "Single element",
input: []string{"uid"},
expectedOutput: []string{"uid"},
},
{
name: "Only commas",
input: []string{",,,"},
expectedOutput: []string{},
},
{
name: "Multiple comma-separated attributes",
input: []string{"uid,cn", "sn,mail", "givenName"},
expectedOutput: []string{"uid", "cn", "sn", "mail", "givenName"},
},
{
name: "Case preservation",
input: []string{"uid,CN,sAMAccountName"},
expectedOutput: []string{"uid", "CN", "sAMAccountName"},
},
{
name: "Leading and trailing spaces",
input: []string{" uid , cn , sn "},
expectedOutput: []string{"uid", "cn", "sn"},
},
{
name: "Real-world LDAP attribute examples",
input: []string{"objectClass,memberOf,mail", "sAMAccountName,userPrincipalName"},
expectedOutput: []string{"objectClass", "memberOf", "mail", "sAMAccountName", "userPrincipalName"},
},
{
name: "Jira-style attribute format",
input: []string{"uid,cn,sn"},
expectedOutput: []string{"uid", "cn", "sn"},
},
{
name: "Single string with single attribute",
input: []string{"cn"},
expectedOutput: []string{"cn"},
},
{
name: "Mix of standard and operational attributes",
input: []string{"uid,+", "createTimestamp"},
expectedOutput: []string{"uid", "+", "createTimestamp"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := normalizeAttributes(tt.input)
assert.Equal(t, tt.expectedOutput, result)
})
}
}

View File

@ -5,7 +5,6 @@ import (
"crypto/sha256"
"crypto/tls"
"encoding/gob"
"encoding/hex"
"fmt"
"html/template"
"net/http"
@ -119,8 +118,8 @@ func NewApplication(p api.ProxyOutpostConfig, c *http.Client, server Server, old
mux := mux.NewRouter()
// Save cookie name, based on hashed client ID
hs := sha256.Sum256([]byte(*p.ClientId))
bs := hex.EncodeToString(hs[:])
h := sha256.New()
bs := string(h.Sum([]byte(*p.ClientId)))
sessionName := fmt.Sprintf("authentik_proxy_%s", bs[:8])
// When HOST_BROWSER is set, use that as Host header for token requests to make the issuer match

View File

@ -66,7 +66,7 @@ func NewProxyServer(ac *ak.APIController) ak.Outpost {
globalMux.PathPrefix("/outpost.goauthentik.io/static").HandlerFunc(s.HandleStatic)
globalMux.Path("/outpost.goauthentik.io/ping").HandlerFunc(sentryutils.SentryNoSample(s.HandlePing))
rootMux.PathPrefix("/").HandlerFunc(s.Handle)
ac.AddEventHandler(s.handleWSMessage)
ac.AddWSHandler(s.handleWSMessage)
return s
}

View File

@ -3,27 +3,48 @@ package proxyv2
import (
"context"
"goauthentik.io/internal/outpost/ak"
"github.com/mitchellh/mapstructure"
"goauthentik.io/internal/outpost/proxyv2/application"
)
func (ps *ProxyServer) handleWSMessage(ctx context.Context, msg ak.Event) error {
if msg.Instruction != ak.EventKindSessionEnd {
return nil
}
mmsg := ak.EventArgsSessionEnd{}
err := msg.ArgsAs(&mmsg)
if err != nil {
return err
}
for _, p := range ps.apps {
ps.log.WithField("provider", p.Host).Debug("Logging out")
err := p.Logout(ctx, func(c application.Claims) bool {
return c.Sid == mmsg.SessionID
})
if err != nil {
ps.log.WithField("provider", p.Host).WithError(err).Warning("failed to logout")
}
}
return nil
type WSProviderSubType string
const (
WSProviderSubTypeLogout WSProviderSubType = "logout"
)
type WSProviderMsg struct {
SubType WSProviderSubType `mapstructure:"sub_type"`
SessionID string `mapstructure:"session_id"`
}
func ParseWSProvider(args map[string]interface{}) (*WSProviderMsg, error) {
msg := &WSProviderMsg{}
err := mapstructure.Decode(args, &msg)
if err != nil {
return nil, err
}
return msg, nil
}
func (ps *ProxyServer) handleWSMessage(ctx context.Context, args map[string]interface{}) {
msg, err := ParseWSProvider(args)
if err != nil {
ps.log.WithError(err).Warning("invalid provider-specific ws message")
return
}
switch msg.SubType {
case WSProviderSubTypeLogout:
for _, p := range ps.apps {
ps.log.WithField("provider", p.Host).Debug("Logging out")
err := p.Logout(ctx, func(c application.Claims) bool {
return c.Sid == msg.SessionID
})
if err != nil {
ps.log.WithField("provider", p.Host).WithError(err).Warning("failed to logout")
}
}
default:
ps.log.WithField("sub_type", msg.SubType).Warning("invalid sub_type")
}
}

View File

@ -6,6 +6,7 @@ import (
"strconv"
"sync"
"github.com/mitchellh/mapstructure"
log "github.com/sirupsen/logrus"
"github.com/wwt/guac"
@ -29,7 +30,7 @@ func NewServer(ac *ak.APIController) ak.Outpost {
connm: sync.RWMutex{},
conns: map[string]connection.Connection{},
}
ac.AddEventHandler(rs.wsHandler)
ac.AddWSHandler(rs.wsHandler)
return rs
}
@ -51,14 +52,12 @@ func parseIntOrZero(input string) int {
return x
}
func (rs *RACServer) wsHandler(ctx context.Context, msg ak.Event) error {
if msg.Instruction != ak.EventKindProviderSpecific {
return nil
}
func (rs *RACServer) wsHandler(ctx context.Context, args map[string]interface{}) {
wsm := WSMessage{}
err := msg.ArgsAs(&wsm)
err := mapstructure.Decode(args, &wsm)
if err != nil {
return err
rs.log.WithError(err).Warning("invalid ws message")
return
}
config := guac.NewGuacamoleConfiguration()
config.Protocol = wsm.Protocol
@ -72,23 +71,23 @@ func (rs *RACServer) wsHandler(ctx context.Context, msg ak.Event) error {
}
cc, err := connection.NewConnection(rs.ac, wsm.DestChannelID, config)
if err != nil {
return err
rs.log.WithError(err).Warning("failed to setup connection")
return
}
cc.OnError = func(err error) {
rs.connm.Lock()
delete(rs.conns, wsm.ConnID)
_ = rs.ac.SendEventHello(map[string]interface{}{
_ = rs.ac.SendWSHello(map[string]interface{}{
"active_connections": len(rs.conns),
})
rs.connm.Unlock()
}
rs.connm.Lock()
rs.conns[wsm.ConnID] = *cc
_ = rs.ac.SendEventHello(map[string]interface{}{
_ = rs.ac.SendWSHello(map[string]interface{}{
"active_connections": len(rs.conns),
})
rs.connm.Unlock()
return nil
}
func (rs *RACServer) Start() error {

View File

@ -2,7 +2,6 @@ package radius
import (
"crypto/sha512"
"encoding/hex"
"time"
"github.com/getsentry/sentry-go"
@ -69,9 +68,7 @@ func (rs *RadiusServer) ServeRADIUS(w radius.ResponseWriter, r *radius.Request)
}
}
if pi == nil {
hs := sha512.Sum512([]byte(r.Secret))
bs := hex.EncodeToString(hs[:])
nr.Log().WithField("hashed_secret", bs).Warning("No provider found")
nr.Log().WithField("hashed_secret", string(sha512.New().Sum(r.Secret))).Warning("No provider found")
_ = w.Write(r.Response(radius.CodeAccessReject))
return
}

View File

@ -9,7 +9,7 @@
"version": "0.0.0",
"license": "MIT",
"devDependencies": {
"aws-cdk": "^2.1018.1",
"aws-cdk": "^2.1017.1",
"cross-env": "^7.0.3"
},
"engines": {
@ -17,16 +17,16 @@
}
},
"node_modules/aws-cdk": {
"version": "2.1018.1",
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1018.1.tgz",
"integrity": "sha512-kFPRox5kSm+ktJ451o0ng9rD+60p5Kt1CZIWw8kXnvqbsxN2xv6qbmyWSXw7sGVXVwqrRKVj+71/JeDr+LMAZw==",
"version": "2.1017.1",
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1017.1.tgz",
"integrity": "sha512-KtDdkMhfVjDeexjpMrVoSlz2mTYI5BE/KotvJ7iFbZy1G0nkpW1ImZ54TdBefeeFmZ+8DAjU3I6nUFtymyOI1A==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"cdk": "bin/cdk"
},
"engines": {
"node": ">= 18.0.0"
"node": ">= 14.15.0"
},
"optionalDependencies": {
"fsevents": "2.3.2"

View File

@ -10,7 +10,7 @@
"node": ">=20"
},
"devDependencies": {
"aws-cdk": "^2.1018.1",
"aws-cdk": "^2.1017.1",
"cross-env": "^7.0.3"
}
}

View File

@ -26,7 +26,7 @@ Parameters:
Description: authentik Docker image
AuthentikVersion:
Type: String
Default: 2025.6.2
Default: 2025.6.0
Description: authentik Docker image tag
AuthentikServerCPU:
Type: Number

View File

@ -7,6 +7,8 @@ from pathlib import Path
from tempfile import gettempdir
from typing import TYPE_CHECKING
from cryptography.hazmat.backends.openssl.backend import backend
from defusedxml import defuse_stdlib
from prometheus_client.values import MultiProcessValue
from authentik import get_full_version
@ -16,7 +18,6 @@ from authentik.lib.logging import get_logger_config
from authentik.lib.utils.http import get_http_session
from authentik.lib.utils.reflection import get_env
from authentik.root.install_id import get_install_id_raw
from authentik.root.setup import setup
from lifecycle.migrate import run_migrations
from lifecycle.wait_for_db import wait_for_db
from lifecycle.worker import DjangoUvicornWorker
@ -27,7 +28,10 @@ if TYPE_CHECKING:
from authentik.root.asgi import AuthentikAsgi
setup()
defuse_stdlib()
if CONFIG.get_bool("compliance.fips.enabled", False):
backend._enable_fips()
wait_for_db()

View File

@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-06-19 00:10+0000\n"
"POT-Creation-Date: 2025-06-04 00:12+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
@ -763,12 +763,6 @@ msgid ""
"If left empty, Notification won't ben sent."
msgstr ""
#: authentik/events/models.py
msgid ""
"When enabled, notification will be sent to user the user that triggered the "
"event.When destination_group is configured, notification is sent to both."
msgstr ""
#: authentik/events/models.py
msgid "Notification Rule"
msgstr ""

View File

@ -6,18 +6,18 @@
# Translators:
# jcamat, 2022
# Angel, 2024
# Iamanaws, 2024
# Marcelo Elizeche Landó, 2025
# Jens L. <jens@goauthentik.io>, 2025
# Iamanaws, 2025
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-06-04 00:12+0000\n"
"POT-Creation-Date: 2025-05-28 11:25+0000\n"
"PO-Revision-Date: 2022-09-26 16:47+0000\n"
"Last-Translator: Iamanaws, 2025\n"
"Last-Translator: Jens L. <jens@goauthentik.io>, 2025\n"
"Language-Team: Spanish (https://app.transifex.com/authentik/teams/119923/es/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
@ -111,7 +111,7 @@ msgstr "Certificado Web usado por el servidor web Core de authentik"
#: authentik/brands/models.py
msgid "Certificates used for client authentication."
msgstr "Certificados utilizados para la autenticación del cliente."
msgstr ""
#: authentik/brands/models.py
msgid "Brand"
@ -131,7 +131,7 @@ msgstr "Descripción adicional no disponible."
#: authentik/core/api/groups.py
msgid "Cannot set group as parent of itself."
msgstr "No se puede establecer un grupo como su propio padre."
msgstr "No se puede establecer el grupo como padre de sí mismo."
#: authentik/core/api/providers.py
msgid ""
@ -183,11 +183,11 @@ msgstr "Remueve usuario del grupo"
#: authentik/core/models.py
msgid "Enable superuser status"
msgstr "Habilitar el estado de superusuario"
msgstr "Habiliar estado de \"superusuario\""
#: authentik/core/models.py
msgid "Disable superuser status"
msgstr "Deshabilitar el estado de superusuario"
msgstr "Deshabiliar estado de \"superusuario\""
#: authentik/core/models.py
msgid "User's display name."
@ -241,7 +241,7 @@ msgstr "Flujo utilizado al autorizar a este proveedor."
#: authentik/core/models.py
msgid "Flow used ending the session from a provider."
msgstr "Flujo utilizado para finalizar la sesión desde un proveedor."
msgstr "Flujo usado para terminar la sesión de un proveedor."
#: authentik/core/models.py
msgid ""
@ -273,11 +273,11 @@ msgstr "Aplicaciones"
#: authentik/core/models.py
msgid "Application Entitlement"
msgstr "Derecho de Aplicación"
msgstr ""
#: authentik/core/models.py
msgid "Application Entitlements"
msgstr "Derechos de Aplicación"
msgstr ""
#: authentik/core/models.py
msgid "Use the source-specific identifier"
@ -288,9 +288,9 @@ msgid ""
"Link to a user with identical email address. Can have security implications "
"when a source doesn't validate email addresses."
msgstr ""
"Enlace a un usuario con la misma dirección de correo electrónico. Puede "
"tener implicaciones de seguridad cuando una fuente no valida las direcciones"
" de correo electrónico."
"Apunta a un usuario con una dirección de correo electrónico idéntica. Puede "
"tener implicaciones de seguridad cuando una fuente no valida la dirección de"
" correo electrónico."
#: authentik/core/models.py
msgid ""
@ -305,8 +305,8 @@ msgid ""
"Link to a user with identical username. Can have security implications when "
"a username is used with another source."
msgstr ""
"Enlace a un usuario con el mismo nombre de usuario. Puede tener "
"implicaciones de seguridad cuando un nombre de usuario se utiliza con otra "
"Enlace a un usuario con un nombre de usuario idéntico. Puede tener "
"implicaciones de seguridad cuando se usa un nombre de usuario con otra "
"fuente."
#: authentik/core/models.py
@ -322,8 +322,8 @@ msgid ""
"Link to a group with identical name. Can have security implications when a "
"group name is used with another source."
msgstr ""
"Enlace a un grupo con el mismo nombre. Puede tener implicaciones de "
"seguridad cuando un nombre de grupo se utiliza con otra fuente."
"Enlace a un grupo con un nombre idéntico. Puede tener implicaciones de "
"seguridad cuando se utiliza un nombre de grupo con otra fuente."
#: authentik/core/models.py
msgid "Use the group name, but deny enrollment when the name already exists."
@ -385,7 +385,7 @@ msgstr "Asignaciones de Propiedades"
#: authentik/core/models.py
msgid "session data"
msgstr "datos de sesión"
msgstr ""
#: authentik/core/models.py
msgid "Session"
@ -424,7 +424,7 @@ msgstr "¡Autenticado exitosamente con {source}!"
#: authentik/core/sources/flow_manager.py
#, python-brace-format
msgid "Successfully linked {source}!"
msgstr "¡{source} enlazado correctamente!"
msgstr "¡{source} vinculado exitosamente!"
#: authentik/core/sources/flow_manager.py
msgid "Source is not configured for enrollment."
@ -476,11 +476,11 @@ msgstr ""
#: authentik/crypto/models.py
msgid "Certificate-Key Pair"
msgstr "Par Certificado-Clave"
msgstr "Par de claves de certificado"
#: authentik/crypto/models.py
msgid "Certificate-Key Pairs"
msgstr "Pares Certificado-Clave"
msgstr "Pares de claves de certificado"
#: authentik/enterprise/api.py
msgid "Enterprise is required to create/update this object."
@ -511,7 +511,7 @@ msgstr ""
#: authentik/enterprise/policies/unique_password/models.py
msgid "Number of passwords to check against."
msgstr "Número de contraseñas contra las que verificar."
msgstr ""
#: authentik/enterprise/policies/unique_password/models.py
#: authentik/policies/password/models.py
@ -521,20 +521,18 @@ msgstr "La contraseña no se ha establecido en contexto"
#: authentik/enterprise/policies/unique_password/models.py
msgid "This password has been used previously. Please choose a different one."
msgstr ""
"Esta contraseña se ha utilizado anteriormente. Por favor, elija una "
"diferente."
#: authentik/enterprise/policies/unique_password/models.py
msgid "Password Uniqueness Policy"
msgstr "Política de Unicidad de Contraseñas"
msgstr ""
#: authentik/enterprise/policies/unique_password/models.py
msgid "Password Uniqueness Policies"
msgstr "Políticas de Unicidad de Contraseñas"
msgstr ""
#: authentik/enterprise/policies/unique_password/models.py
msgid "User Password History"
msgstr "Historial de Contraseñas del Usuario"
msgstr ""
#: authentik/enterprise/policy.py
msgid "Enterprise required to access this feature."
@ -619,39 +617,39 @@ msgstr "Clave de firma"
#: authentik/enterprise/providers/ssf/models.py
msgid "Key used to sign the SSF Events."
msgstr "Clave utilizada para firmar los eventos SSF."
msgstr ""
#: authentik/enterprise/providers/ssf/models.py
msgid "Shared Signals Framework Provider"
msgstr "Proveedor del Marco de Señales Compartidas"
msgstr ""
#: authentik/enterprise/providers/ssf/models.py
msgid "Shared Signals Framework Providers"
msgstr "Proveedores del Marco de Señales Compartidas"
msgstr ""
#: authentik/enterprise/providers/ssf/models.py
msgid "Add stream to SSF provider"
msgstr "Agregar flujo de datos al proveedor SSF"
msgstr ""
#: authentik/enterprise/providers/ssf/models.py
msgid "SSF Stream"
msgstr "Flujo de Datos SSF"
msgstr ""
#: authentik/enterprise/providers/ssf/models.py
msgid "SSF Streams"
msgstr "Flujos de Datos SSF"
msgstr ""
#: authentik/enterprise/providers/ssf/models.py
msgid "SSF Stream Event"
msgstr "Evento de Flujo de Datos SSF"
msgstr ""
#: authentik/enterprise/providers/ssf/models.py
msgid "SSF Stream Events"
msgstr "Eventos de Flujos de Datos SSF"
msgstr ""
#: authentik/enterprise/providers/ssf/tasks.py
msgid "Failed to send request"
msgstr "Error al enviar la solicitud"
msgstr "Falló envio de petición"
#: authentik/enterprise/stages/authenticator_endpoint_gdtc/models.py
msgid "Endpoint Authenticator Google Device Trust Connector Stage"
@ -683,29 +681,26 @@ msgid ""
"option has a higher priority than the `client_certificate` option on "
"`Brand`."
msgstr ""
"Configura las autoridades certificadoras para validar el certificado. Esta "
"opción tiene una prioridad mayor que la opción `client_certificate` en "
"`Brand`."
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stage"
msgstr "Etapa de TLS mutuo"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stages"
msgstr "Etapas de TLS mutuo"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Permissions to pass Certificates for outposts."
msgstr "Permisos para pasar Certificados a los puestos avanzados."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "Certificate required but no certificate was given."
msgstr "Se requiere certificado, pero no se proporcionó ninguno."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "No user found for certificate."
msgstr "No se encontró usuario para el certificado."
msgstr ""
#: authentik/enterprise/stages/source/models.py
msgid ""
@ -758,16 +753,12 @@ msgid ""
"Customize the body of the request. Mapping should return data that is JSON-"
"serializable."
msgstr ""
"Personaliza el cuerpo de la solicitud. El mapeo debe devolver datos que sean"
" serializables en JSON."
#: authentik/events/models.py
msgid ""
"Configure additional headers to be sent. Mapping should return a dictionary "
"of key-value pairs"
msgstr ""
"Configura encabezados adicionales para enviar. El mapeo debe devolver un "
"diccionario de pares clave-valor"
#: authentik/events/models.py
msgid ""
@ -795,7 +786,7 @@ msgstr "Transporte de notificaciones"
#: authentik/events/models.py
msgid "Notification Transports"
msgstr "Medios de Notificación"
msgstr "Transportes de notificación"
#: authentik/events/models.py
msgid "Notice"
@ -822,9 +813,9 @@ msgid ""
"Select which transports should be used to notify the user. If none are "
"selected, the notification will only be shown in the authentik UI."
msgstr ""
"Selecciona qué medios se deben usar para notificar al usuario. Si no se "
"selecciona ninguno, la notificación solo se mostrará en la interfaz de "
"authentik."
"Seleccione qué transportes se deben usar para notificar al usuario. Si no se"
" selecciona ninguno, la notificación solo se mostrará en la interfaz de "
"usuario de authentik."
#: authentik/events/models.py
msgid "Controls which severity level the created notifications will have."
@ -996,7 +987,7 @@ msgstr "Evalúa políticas durante el proceso de planeación del Flujo."
#: authentik/flows/models.py
msgid "Evaluate policies when the Stage is presented to the user."
msgstr "Evaluar las políticas cuando la Etapa se presenta al usuario."
msgstr ""
#: authentik/flows/models.py
msgid ""
@ -1043,8 +1034,6 @@ msgid ""
"When enabled, provider will not modify or create objects in the remote "
"system."
msgstr ""
"Cuando está habilitado, el proveedor no modificará ni creará objetos en el "
"sistema remoto."
#: authentik/lib/sync/outgoing/tasks.py
msgid "Starting full provider sync"
@ -1052,21 +1041,20 @@ msgstr "Iniciando sincronización completa de proveedor"
#: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing users"
msgstr "Sincronizando usuarios"
msgstr ""
#: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing groups"
msgstr "Sincronizando grupos"
msgstr ""
#: authentik/lib/sync/outgoing/tasks.py
#, python-brace-format
msgid "Syncing page {page} of {object_type}"
msgstr "Sincronizando página {page} de {object_type}"
msgid "Syncing page {page} of groups"
msgstr "Sincronizando página {page} de grupos"
#: authentik/lib/sync/outgoing/tasks.py
msgid "Dropping mutating request due to dry run"
msgstr ""
"Descartando solicitud de mutación debido a ejecución en modo de simulación"
#: authentik/lib/sync/outgoing/tasks.py
#, python-brace-format
@ -1245,7 +1233,7 @@ msgstr ""
#: authentik/policies/expiry/models.py
msgid "Password has expired."
msgstr "La contraseña ha expirado."
msgstr "La contraseña ha caducado."
#: authentik/policies/expiry/models.py
msgid "Password Expiry Policy"
@ -1283,7 +1271,7 @@ msgstr "La IP del cliente no está en un país permitido."
#: authentik/policies/geoip/models.py
msgid "Distance from previous authentication is larger than threshold."
msgstr "La distancia desde la autenticación anterior es mayor que el umbral."
msgstr "La distancia desde la autenticación previa es mayor que el límite."
#: authentik/policies/geoip/models.py
msgid "Distance is further than possible."
@ -1332,7 +1320,7 @@ msgstr "Vinculación de Políticas"
#: authentik/policies/models.py
msgid "Policy Bindings"
msgstr "Vinculaciones de Políticas"
msgstr "Vinculaciones de políticas"
#: authentik/policies/models.py
msgid ""
@ -1606,11 +1594,11 @@ msgstr "ES256 (Encriptación Asimétrica)"
#: authentik/providers/oauth2/models.py
msgid "ES384 (Asymmetric Encryption)"
msgstr "ES384 (Encriptación Asimétrica)"
msgstr ""
#: authentik/providers/oauth2/models.py
msgid "ES512 (Asymmetric Encryption)"
msgstr "ES512 (Encriptación Asimétrica)"
msgstr ""
#: authentik/providers/oauth2/models.py
msgid "Scope used by the client"
@ -1825,7 +1813,7 @@ msgstr "Valida Certificados SSL de servidores de origen"
#: authentik/providers/proxy/models.py
msgid "Internal host SSL Validation"
msgstr "Validación SSL del host interno"
msgstr "Validación SSL de host interno"
#: authentik/providers/proxy/models.py
msgid ""
@ -2039,7 +2027,7 @@ msgstr ""
#: authentik/providers/saml/models.py
msgid "AuthnContextClassRef Property Mapping"
msgstr "Asignación de Propiedades de AuthnContextClassRef"
msgstr ""
#: authentik/providers/saml/models.py
msgid ""
@ -2047,9 +2035,6 @@ msgid ""
"empty, the AuthnContextClassRef will be set based on which authentication "
"methods the user used to authenticate."
msgstr ""
"Configura cómo se creará el valor de AuthnContextClassRef. Si se deja vacío,"
" el AuthnContextClassRef se establecerá según los métodos de autenticación "
"que el usuario haya utilizado para autenticarse."
#: authentik/providers/saml/models.py
msgid ""
@ -2199,11 +2184,11 @@ msgstr "Predeterminado"
#: authentik/providers/scim/models.py
msgid "AWS"
msgstr "AWS"
msgstr ""
#: authentik/providers/scim/models.py
msgid "Slack"
msgstr "Slack"
msgstr ""
#: authentik/providers/scim/models.py
msgid "Base URL to SCIM requests, usually ends in /v2"
@ -2215,13 +2200,11 @@ msgstr "Token de Autenticación"
#: authentik/providers/scim/models.py
msgid "SCIM Compatibility Mode"
msgstr "Modo de Compatibilidad SCIM"
msgstr ""
#: authentik/providers/scim/models.py
msgid "Alter authentik behavior for vendor-specific SCIM implementations."
msgstr ""
"Modificar el comportamiento de authentik para implementaciones SCIM "
"específicas de proveedores."
#: authentik/providers/scim/models.py
msgid "SCIM Provider"
@ -2249,7 +2232,7 @@ msgstr "Roles"
#: authentik/rbac/models.py
msgid "Initial Permissions"
msgstr "Permisos Iniciales"
msgstr ""
#: authentik/rbac/models.py
msgid "System permission"
@ -2287,7 +2270,7 @@ msgstr ""
#: authentik/recovery/views.py
msgid "Used recovery-link to authenticate."
msgstr "Se utilizó un enlace de recuperación para autenticarse."
msgstr "Se usó el enlace de recuperación para autenticarse."
#: authentik/sources/kerberos/models.py
msgid "Kerberos realm"
@ -2299,7 +2282,7 @@ msgstr "krb5.conf personalizado a usar. Usa el del sistema por defecto."
#: authentik/sources/kerberos/models.py
msgid "KAdmin server type"
msgstr "Tipo de servidor KAdmin"
msgstr ""
#: authentik/sources/kerberos/models.py
msgid "Sync users from Kerberos into authentik"
@ -2307,24 +2290,23 @@ msgstr "Sincronizar usuarios desde Kerberos hacia Authentik"
#: authentik/sources/kerberos/models.py
msgid "When a user changes their password, sync it back to Kerberos"
msgstr ""
"Cuando un usuario cambie su contraseña, sincronizarla de vuelta a Kerberos."
msgstr "Cuando un usuario cambia su contraseña, sincronizarlo hacia Kerberos"
#: authentik/sources/kerberos/models.py
msgid "Principal to authenticate to kadmin for sync."
msgstr "Principal para autenticarse en kadmin para la sincronización."
msgstr "Principal para autenticarse como kadmin para la sincronización."
#: authentik/sources/kerberos/models.py
msgid "Password to authenticate to kadmin for sync"
msgstr "Contraseña para autenticarse en kadmin para la sincronización"
msgstr "Contraseña para autenticarse como kadmin para la sincronización"
#: authentik/sources/kerberos/models.py
msgid ""
"Keytab to authenticate to kadmin for sync. Must be base64-encoded or in the "
"form TYPE:residual"
msgstr ""
"Keytab para autenticarse en kadmin para la sincronización. Debe estar "
"codificado en base64 o en el formato TIPO:residuo"
"Keytab para autenticarse como kadmin para la sincronización. Debe estar "
"codificado en base64 o en el formato TIPO:residual"
#: authentik/sources/kerberos/models.py
msgid ""
@ -2340,7 +2322,7 @@ msgid ""
"HTTP@hostname"
msgstr ""
"Forzar el uso de un nombre de servidor específico para SPNEGO. Debe estar en"
" el formato HTTP@nombre_de_host"
" el formato HTTP@nombredelservidor"
#: authentik/sources/kerberos/models.py
msgid "SPNEGO keytab base64-encoded or path to keytab in the form FILE:path"
@ -2357,8 +2339,8 @@ msgid ""
"If enabled, the authentik-stored password will be updated upon login with "
"the Kerberos password backend"
msgstr ""
"Si está habilitado, la contraseña almacenada en authentik se actualizará al "
"iniciar sesión con el backend de contraseñas de Kerberos."
"Si está habilitado, la contraseña almacenada por authentik se actualizada "
"al iniciar sesión con el backend de contraseñas Kerberos"
#: authentik/sources/kerberos/models.py
msgid "Kerberos Source"
@ -2406,7 +2388,7 @@ msgid ""
msgstr ""
"\n"
" Asegúrate de que tienes entradas válidas\n"
" (obtenibles mediante kinit) \n"
" (se obtienen a través de kinit) \n"
" y de haber configurado correctamente el navegador.\n"
" Por favor, contacta a tu administrador.\n"
" "
@ -2471,10 +2453,6 @@ msgstr "DN de grupo de adición"
msgid "Consider Objects matching this filter to be Users."
msgstr "Considere que los objetos que coinciden con este filtro son usuarios."
#: authentik/sources/ldap/models.py
msgid "Attribute which matches the value of `group_membership_field`."
msgstr "Atributo que coincide con el valor de `group_membership_field`."
#: authentik/sources/ldap/models.py
msgid "Field which contains members of a group."
msgstr "Campo que contiene los miembros de un grupo."
@ -2507,17 +2485,12 @@ msgid ""
"attribute. This allows nested group resolution on systems like FreeIPA and "
"Active Directory"
msgstr ""
"Buscar la pertenencia a grupos basándose en un atributo del usuario en lugar"
" de un atributo del grupo. Esto permite la resolución de grupos anidados en "
"sistemas como FreeIPA y Active Directory"
#: authentik/sources/ldap/models.py
msgid ""
"Delete authentik users and groups which were previously supplied by this "
"source, but are now missing from it."
msgstr ""
"Eliminar usuarios y grupos de authentik que fueron proporcionados "
"previamente por esta fuente, pero que ahora están ausentes."
#: authentik/sources/ldap/models.py
msgid "LDAP Source"
@ -2539,24 +2512,22 @@ msgstr "Asignaciones de Propiedades de Fuente de LDAP"
msgid ""
"Unique ID used while checking if this object still exists in the directory."
msgstr ""
"ID único utilizado para verificar si este objeto aún existe en el "
"directorio."
#: authentik/sources/ldap/models.py
msgid "User LDAP Source Connection"
msgstr "Conexión de Fuente LDAP de Usuario"
msgstr ""
#: authentik/sources/ldap/models.py
msgid "User LDAP Source Connections"
msgstr "Conexiones de Fuente LDAP de Usuario"
msgstr ""
#: authentik/sources/ldap/models.py
msgid "Group LDAP Source Connection"
msgstr "Conexión de Fuente LDAP de Grupo"
msgstr ""
#: authentik/sources/ldap/models.py
msgid "Group LDAP Source Connections"
msgstr "Conexiones de Fuente LDAP de Grupo"
msgstr ""
#: authentik/sources/ldap/signals.py
msgid "Password does not match Active Directory Complexity."
@ -2568,11 +2539,11 @@ msgstr "No se recibió ningún token."
#: authentik/sources/oauth/models.py
msgid "HTTP Basic Authentication"
msgstr "Autenticación Básica HTTP"
msgstr ""
#: authentik/sources/oauth/models.py
msgid "Include the client ID and secret as request parameters"
msgstr "Incluir el ID de cliente y el secreto como parámetros de la solicitud"
msgstr ""
#: authentik/sources/oauth/models.py
msgid "Request Token URL"
@ -2619,8 +2590,6 @@ msgid ""
"How to perform authentication during an authorization_code token request "
"flow"
msgstr ""
"Cómo realizar la autenticación durante un flujo de solicitud de token con "
"authorization_code"
#: authentik/sources/oauth/models.py
msgid "OAuth Source"
@ -2938,7 +2907,7 @@ msgstr "Conexiones de Fuente de SAML de Grupo"
#: authentik/sources/saml/views.py
#, python-brace-format
msgid "Continue to {source_name}"
msgstr "Continuar a {source_name}"
msgstr ""
#: authentik/sources/scim/models.py
msgid "SCIM Source"
@ -2974,7 +2943,7 @@ msgstr "Dispositivos Duo"
#: authentik/stages/authenticator_email/models.py
msgid "Email OTP"
msgstr "OTP por Correo Electrónico"
msgstr ""
#: authentik/stages/authenticator_email/models.py
#: authentik/stages/email/models.py
@ -2995,11 +2964,11 @@ msgstr ""
#: authentik/stages/authenticator_email/models.py
msgid "Email Authenticator Setup Stage"
msgstr "Etapa de Configuración del Autenticador de Correo Electrónico"
msgstr ""
#: authentik/stages/authenticator_email/models.py
msgid "Email Authenticator Setup Stages"
msgstr "Etapas de Configuración del Autenticador de Correo Electrónico"
msgstr ""
#: authentik/stages/authenticator_email/models.py
#: authentik/stages/authenticator_email/stage.py
@ -3010,11 +2979,11 @@ msgstr ""
#: authentik/stages/authenticator_email/models.py
msgid "Email Device"
msgstr "Dispositivo de correo electrónico"
msgstr "Dispositivo de Email"
#: authentik/stages/authenticator_email/models.py
msgid "Email Devices"
msgstr "Dispositivos de correo electrónico"
msgstr "Dispositivos de Email"
#: authentik/stages/authenticator_email/stage.py
#: authentik/stages/authenticator_sms/stage.py
@ -3024,7 +2993,7 @@ msgstr "El código no coincide"
#: authentik/stages/authenticator_email/stage.py
msgid "Invalid email"
msgstr "Correo electrónico inválido"
msgstr "Email Inválido"
#: authentik/stages/authenticator_email/templates/email/email_otp.html
#: authentik/stages/email/templates/email/password_reset.html
@ -3044,9 +3013,6 @@ msgid ""
" Email MFA code.\n"
" "
msgstr ""
"\n"
" Código MFA por correo electrónico.\n"
" "
#: authentik/stages/authenticator_email/templates/email/email_otp.html
#, python-format
@ -3056,8 +3022,7 @@ msgid ""
" "
msgstr ""
"\n"
" Si no solicitaste este código, por favor ignora este correo. El código anterior es válido por %(expires)s.\n"
" "
"Si no solicitaste este código, por favor ignora este correo. El código anterior es válido por %(expires)s."
#: authentik/stages/authenticator_email/templates/email/email_otp.txt
#: authentik/stages/email/templates/email/password_reset.txt
@ -3070,8 +3035,6 @@ msgid ""
"\n"
"Email MFA code\n"
msgstr ""
"\n"
"Código MFA por correo electrónico\n"
#: authentik/stages/authenticator_email/templates/email/email_otp.txt
#, python-format
@ -3313,8 +3276,8 @@ msgstr "No se pudo validar el token"
msgid ""
"Offset after which consent expires. (Format: hours=1;minutes=2;seconds=3)."
msgstr ""
"Desfase después del cual expira el consentimiento. (Formato: "
"hours=1;minutes=2;seconds=3)."
"Compensación después de la cual caduca el consentimiento. (Formato: horas = "
"1; minutos = 2; segundos = 3)."
#: authentik/stages/consent/models.py
msgid "Consent Stage"
@ -3334,7 +3297,7 @@ msgstr "Consentimientos del usuario"
#: authentik/stages/consent/stage.py
msgid "Invalid consent token, re-showing prompt"
msgstr "Token de consentimiento inválido, mostrando el aviso nuevamente"
msgstr ""
#: authentik/stages/deny/models.py
msgid "Deny Stage"
@ -3354,11 +3317,11 @@ msgstr "Etapas ficticias"
#: authentik/stages/email/flow.py
msgid "Continue to confirm this email address."
msgstr "Continúa para confirmar esta dirección de correo electrónico."
msgstr ""
#: authentik/stages/email/flow.py
msgid "Link was already used, please request a new link."
msgstr "El enlace ya fue utilizado, por favor, solícita uno nuevo."
msgstr ""
#: authentik/stages/email/models.py
msgid "Password Reset"
@ -3482,8 +3445,7 @@ msgid ""
" "
msgstr ""
"\n"
" Si no solicitaste un cambio de contraseña, por favor ignora este correo. El enlace anterior es válido por %(expires)s.\n"
" "
"Si no solicitaste un cambio de contraseña, por favor ignora este correo. El enlace anterior es válido por %(expires)s."
#: authentik/stages/email/templates/email/password_reset.txt
msgid ""
@ -3567,26 +3529,24 @@ msgid ""
"Show the user the 'Remember me on this device' toggle, allowing repeat users"
" to skip straight to entering their password."
msgstr ""
"Mostrar al usuario la opción \"Recordarme en este dispositivo\", permitiendo"
" que los usuarios recurrentes pasen directamente a ingresar su contraseña."
#: authentik/stages/identification/models.py
msgid "Optional enrollment flow, which is linked at the bottom of the page."
msgstr ""
"Flujo de inscripción opcional, que se enlaza en la parte inferior de la "
"página."
"Flujo de inscripción opcional, que está vinculado en la parte inferior de la"
" página."
#: authentik/stages/identification/models.py
msgid "Optional recovery flow, which is linked at the bottom of the page."
msgstr ""
"Flujo de recuperación opcional, que se enlaza en la parte inferior de la "
"página."
"Flujo de recuperación opcional, que está vinculado en la parte inferior de "
"la página."
#: authentik/stages/identification/models.py
msgid "Optional passwordless flow, which is linked at the bottom of the page."
msgstr ""
"Flujo opcional sin contraseña, que se enlaza en la parte inferior de la "
"página."
"Flujo sin contraseña opcional, el cual está vinculado en la parte inferior "
"de la página."
#: authentik/stages/identification/models.py
msgid "Specify which sources should be shown."
@ -3820,11 +3780,11 @@ msgstr "Las contraseñas no coinciden."
#: authentik/stages/redirect/api.py
msgid "Target URL should be present when mode is Static."
msgstr "La URL de destino debe estar presente cuando el modo es Estático."
msgstr ""
#: authentik/stages/redirect/api.py
msgid "Target Flow should be present when mode is Flow."
msgstr "El Flujo de Destino debe estar presente cuando el modo es Flujo."
msgstr ""
#: authentik/stages/redirect/models.py
msgid "Redirect Stage"
@ -3881,6 +3841,10 @@ msgstr "Etapas de inicio de"
msgid "No Pending user to login."
msgstr "Ningún usuario pendiente para iniciar sesión."
#: authentik/stages/user_login/stage.py
msgid "Successfully logged in!"
msgstr "¡Se ha iniciado sesión correctamente!"
#: authentik/stages/user_logout/models.py
msgid "User Logout Stage"
msgstr "Etapa de cierre de sesión del usuario"
@ -3956,12 +3920,10 @@ msgstr ""
#: authentik/tenants/models.py
msgid "Reputation cannot decrease lower than this value. Zero or negative."
msgstr ""
"La reputación no puede disminuir por debajo de este valor. Cero o negativo."
#: authentik/tenants/models.py
msgid "Reputation cannot increase higher than this value. Zero or positive."
msgstr ""
"La reputación no puede aumentar por encima de este valor. Cero o positivo."
#: authentik/tenants/models.py
msgid "The option configures the footer links on the flow executor pages."
@ -3984,8 +3946,8 @@ msgstr "Personificación habilitada/deshabilitada globalmente."
#: authentik/tenants/models.py
msgid "Require administrators to provide a reason for impersonating a user."
msgstr ""
"Requerir que los administradores proporcionen una razón para personificar a "
"un usuario."
"Requerir a los administradores proporcionar una razón para suplantar un "
"usuario."
#: authentik/tenants/models.py
msgid "Default token duration"
@ -3997,7 +3959,7 @@ msgstr "Longitud predeterminada del token"
#: authentik/tenants/models.py
msgid "Tenant"
msgstr "Inquilino"
msgstr "inquilino"
#: authentik/tenants/models.py
msgid "Tenants"

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More