Compare commits

..

1 Commits

Author SHA1 Message Date
ee6b8b596e web/NPM Workspaces: Prep SFE package. 2025-05-20 02:53:50 +02:00
462 changed files with 11013 additions and 31787 deletions

View File

@ -1,5 +1,5 @@
[bumpversion] [bumpversion]
current_version = 2025.6.3 current_version = 2025.4.1
tag = True tag = True
commit = True commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
@ -21,8 +21,6 @@ optional_value = final
[bumpversion:file:package.json] [bumpversion:file:package.json]
[bumpversion:file:package-lock.json]
[bumpversion:file:docker-compose.yml] [bumpversion:file:docker-compose.yml]
[bumpversion:file:schema.yml] [bumpversion:file:schema.yml]
@ -33,4 +31,6 @@ optional_value = final
[bumpversion:file:internal/constants/constants.go] [bumpversion:file:internal/constants/constants.go]
[bumpversion:file:web/src/common/constants.ts]
[bumpversion:file:lifecycle/aws/template.yaml] [bumpversion:file:lifecycle/aws/template.yaml]

View File

@ -36,7 +36,7 @@ runs:
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- name: Setup docker cache - name: Setup docker cache
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 uses: ScribeMD/docker-cache@0.5.0
with: with:
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
- name: Setup dependencies - name: Setup dependencies

View File

@ -23,13 +23,7 @@ updates:
- package-ecosystem: npm - package-ecosystem: npm
directories: directories:
- "/web" - "/web"
- "/web/packages/sfe" - "/web/sfe"
- "/web/packages/core"
- "/web/packages/esbuild-plugin-live-reload"
- "/packages/prettier-config"
- "/packages/tsconfig"
- "/packages/docusaurus-config"
- "/packages/eslint-config"
schedule: schedule:
interval: daily interval: daily
time: "04:00" time: "04:00"
@ -74,9 +68,6 @@ updates:
wdio: wdio:
patterns: patterns:
- "@wdio/*" - "@wdio/*"
goauthentik:
patterns:
- "@goauthentik/*"
- package-ecosystem: npm - package-ecosystem: npm
directory: "/website" directory: "/website"
schedule: schedule:
@ -97,9 +88,6 @@ updates:
- "swc-*" - "swc-*"
- "lightningcss*" - "lightningcss*"
- "@rspack/binding*" - "@rspack/binding*"
goauthentik:
patterns:
- "@goauthentik/*"
- package-ecosystem: npm - package-ecosystem: npm
directory: "/lifecycle/aws" directory: "/lifecycle/aws"
schedule: schedule:

View File

@ -62,7 +62,6 @@ jobs:
psql: psql:
- 15-alpine - 15-alpine
- 16-alpine - 16-alpine
- 17-alpine
run_id: [1, 2, 3, 4, 5] run_id: [1, 2, 3, 4, 5]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -117,7 +116,6 @@ jobs:
psql: psql:
- 15-alpine - 15-alpine
- 16-alpine - 16-alpine
- 17-alpine
run_id: [1, 2, 3, 4, 5] run_id: [1, 2, 3, 4, 5]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -202,7 +200,7 @@ jobs:
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
path: web/dist path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
- name: prepare web ui - name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true' if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web working-directory: web

View File

@ -49,7 +49,6 @@ jobs:
matrix: matrix:
job: job:
- build - build
- build:integrations
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4

View File

@ -2,7 +2,7 @@ name: "CodeQL"
on: on:
push: push:
branches: [main, next, version*] branches: [main, "*", next, version*]
pull_request: pull_request:
branches: [main] branches: [main]
schedule: schedule:

View File

@ -7,7 +7,6 @@ on:
- packages/eslint-config/** - packages/eslint-config/**
- packages/prettier-config/** - packages/prettier-config/**
- packages/tsconfig/** - packages/tsconfig/**
- web/packages/esbuild-plugin-live-reload/**
workflow_dispatch: workflow_dispatch:
jobs: jobs:
publish: publish:
@ -17,28 +16,27 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
package: package:
- packages/docusaurus-config - docusaurus-config
- packages/eslint-config - eslint-config
- packages/prettier-config - prettier-config
- packages/tsconfig - tsconfig
- web/packages/esbuild-plugin-live-reload
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 2 fetch-depth: 2
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version-file: ${{ matrix.package }}/package.json node-version-file: packages/${{ matrix.package }}/package.json
registry-url: "https://registry.npmjs.org" registry-url: "https://registry.npmjs.org"
- name: Get changed files - name: Get changed files
id: changed-files id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
with: with:
files: | files: |
${{ matrix.package }}/package.json packages/${{ matrix.package }}/package.json
- name: Publish package - name: Publish package
if: steps.changed-files.outputs.any_changed == 'true' if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ${{ matrix.package }} working-directory: packages/${{ matrix.package}}
run: | run: |
npm ci npm ci
npm run build npm run build

View File

@ -15,7 +15,6 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
steps: steps:
- uses: actions/checkout@v4
- id: generate_token - id: generate_token
uses: tibdex/github-app-token@v2 uses: tibdex/github-app-token@v2
with: with:
@ -32,7 +31,7 @@ jobs:
env: env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }} GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: | run: |
gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies gh pr edit -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies
- uses: peter-evans/enable-pull-request-automerge@v3 - uses: peter-evans/enable-pull-request-automerge@v3
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}

View File

@ -1,7 +1,7 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
# Stage 1: Build website # Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS website-builder FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder
ENV NODE_ENV=production ENV NODE_ENV=production
@ -20,7 +20,7 @@ COPY ./SECURITY.md /work/
RUN npm run build-bundled RUN npm run build-bundled
# Stage 2: Build webui # Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS web-builder FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder
ARG GIT_BUILD_HASH ARG GIT_BUILD_HASH
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
@ -94,9 +94,9 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 5: Download uv # Stage 5: Download uv
FROM ghcr.io/astral-sh/uv:0.7.8 AS uv FROM ghcr.io/astral-sh/uv:0.7.5 AS uv
# Stage 6: Base python image # Stage 6: Base python image
FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base
ENV VENV_PATH="/ak-root/.venv" \ ENV VENV_PATH="/ak-root/.venv" \
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \

View File

@ -1,6 +1,6 @@
.PHONY: gen dev-reset all clean test web website .PHONY: gen dev-reset all clean test web website
SHELL := /usr/bin/env bash SHELL := /bin/bash
.SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail
PWD = $(shell pwd) PWD = $(shell pwd)
UID = $(shell id -u) UID = $(shell id -u)
@ -86,10 +86,6 @@ dev-create-db:
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state. dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
update-test-mmdb: ## Update test GeoIP and ASN Databases
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb -o ${PWD}/tests/GeoLite2-ASN-Test.mmdb
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb -o ${PWD}/tests/GeoLite2-City-Test.mmdb
######################### #########################
## API Schema ## API Schema
######################### #########################

View File

@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
| Version | Supported | | Version | Supported |
| --------- | --------- | | --------- | --------- |
| 2025.2.x | ✅ |
| 2025.4.x | ✅ | | 2025.4.x | ✅ |
| 2025.6.x | ✅ |
## Reporting a Vulnerability ## Reporting a Vulnerability

View File

@ -2,7 +2,7 @@
from os import environ from os import environ
__version__ = "2025.6.3" __version__ = "2025.4.1"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -148,14 +148,3 @@ class TestBrands(APITestCase):
"default_locale": "", "default_locale": "",
}, },
) )
def test_custom_css(self):
"""Test custom_css"""
brand = create_test_brand()
brand.branding_custom_css = """* {
font-family: "Foo bar";
}"""
brand.save()
res = self.client.get(reverse("authentik_core:if-user"))
self.assertEqual(res.status_code, 200)
self.assertIn(brand.branding_custom_css, res.content.decode())

View File

@ -5,8 +5,6 @@ from typing import Any
from django.db.models import F, Q from django.db.models import F, Q
from django.db.models import Value as V from django.db.models import Value as V
from django.http.request import HttpRequest from django.http.request import HttpRequest
from django.utils.html import _json_script_escapes
from django.utils.safestring import mark_safe
from authentik import get_full_version from authentik import get_full_version
from authentik.brands.models import Brand from authentik.brands.models import Brand
@ -34,13 +32,8 @@ def context_processor(request: HttpRequest) -> dict[str, Any]:
"""Context Processor that injects brand object into every template""" """Context Processor that injects brand object into every template"""
brand = getattr(request, "brand", DEFAULT_BRAND) brand = getattr(request, "brand", DEFAULT_BRAND)
tenant = getattr(request, "tenant", Tenant()) tenant = getattr(request, "tenant", Tenant())
# similarly to `json_script` we escape everything HTML-related, however django
# only directly exposes this as a function that also wraps it in a <script> tag
# which we dont want for CSS
brand_css = mark_safe(str(brand.branding_custom_css).translate(_json_script_escapes)) # nosec
return { return {
"brand": brand, "brand": brand,
"brand_css": brand_css,
"footer_links": tenant.footer_links, "footer_links": tenant.footer_links,
"html_meta": {**get_http_meta()}, "html_meta": {**get_http_meta()},
"version": get_full_version(), "version": get_full_version(),

View File

@ -84,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN
from authentik.lib.avatars import get_avatar from authentik.lib.avatars import get_avatar
from authentik.rbac.decorators import permission_required from authentik.rbac.decorators import permission_required
from authentik.rbac.models import get_permission_choices from authentik.rbac.models import get_permission_choices
from authentik.stages.email.flow import pickle_flow_token_for_email
from authentik.stages.email.models import EmailStage from authentik.stages.email.models import EmailStage
from authentik.stages.email.tasks import send_mails from authentik.stages.email.tasks import send_mails
from authentik.stages.email.utils import TemplateEmailMessage from authentik.stages.email.utils import TemplateEmailMessage
@ -452,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
def list(self, request, *args, **kwargs): def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs) return super().list(request, *args, **kwargs)
def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: def _create_recovery_link(self) -> tuple[str, Token]:
"""Create a recovery link (when the current brand has a recovery flow set), """Create a recovery link (when the current brand has a recovery flow set),
that can either be shown to an admin or sent to the user directly""" that can either be shown to an admin or sent to the user directly"""
brand: Brand = self.request._request.brand brand: Brand = self.request._request.brand
@ -474,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
raise ValidationError( raise ValidationError(
{"non_field_errors": "Recovery flow not applicable to user"} {"non_field_errors": "Recovery flow not applicable to user"}
) from None ) from None
_plan = FlowToken.pickle(plan)
if for_email:
_plan = pickle_flow_token_for_email(plan)
token, __ = FlowToken.objects.update_or_create( token, __ = FlowToken.objects.update_or_create(
identifier=f"{user.uid}-password-reset", identifier=f"{user.uid}-password-reset",
defaults={ defaults={
"user": user, "user": user,
"flow": flow, "flow": flow,
"_plan": _plan, "_plan": FlowToken.pickle(plan),
"revoke_on_execution": not for_email,
}, },
) )
querystring = urlencode({QS_KEY_TOKEN: token.key}) querystring = urlencode({QS_KEY_TOKEN: token.key})
@ -653,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
if for_user.email == "": if for_user.email == "":
LOGGER.debug("User doesn't have an email address") LOGGER.debug("User doesn't have an email address")
raise ValidationError({"non_field_errors": "User does not have an email address set."}) raise ValidationError({"non_field_errors": "User does not have an email address set."})
link, token = self._create_recovery_link(for_email=True) link, token = self._create_recovery_link()
# Lookup the email stage to assure the current user can access it # Lookup the email stage to assure the current user can access it
stages = get_objects_for_user( stages = get_objects_for_user(
request.user, "authentik_stages_email.view_emailstage" request.user, "authentik_stages_email.view_emailstage"

View File

@ -79,7 +79,6 @@ def _migrate_session(
AuthenticatedSession.objects.using(db_alias).create( AuthenticatedSession.objects.using(db_alias).create(
session=session, session=session,
user=old_auth_session.user, user=old_auth_session.user,
uuid=old_auth_session.uuid,
) )

View File

@ -1,81 +1,10 @@
# Generated by Django 5.1.9 on 2025-05-14 11:15 # Generated by Django 5.1.9 on 2025-05-14 11:15
from django.apps.registry import Apps, apps as global_apps from django.apps.registry import Apps
from django.db import migrations from django.db import migrations
from django.contrib.contenttypes.management import create_contenttypes
from django.contrib.auth.management import create_permissions
from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
"""Migrate permissions from OldAuthenticatedSession to AuthenticatedSession"""
db_alias = schema_editor.connection.alias
# `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the
# real config for creating permissions and content types
authentik_core_config = global_apps.get_app_config("authentik_core")
# These are only ran by django after all migrations, but we need them right now.
# `global_apps` is needed,
create_permissions(authentik_core_config, using=db_alias, verbosity=1)
create_contenttypes(authentik_core_config, using=db_alias, verbosity=1)
# But from now on, this is just a regular migration, so use `apps`
Permission = apps.get_model("auth", "Permission")
ContentType = apps.get_model("contenttypes", "ContentType")
try:
old_ct = ContentType.objects.using(db_alias).get(
app_label="authentik_core", model="oldauthenticatedsession"
)
new_ct = ContentType.objects.using(db_alias).get(
app_label="authentik_core", model="authenticatedsession"
)
except ContentType.DoesNotExist:
# This should exist at this point, but if not, let's cut our losses
return
# Get all permissions for the old content type
old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct)
# Create equivalent permissions for the new content type
for old_perm in old_perms:
new_perm = (
Permission.objects.using(db_alias)
.filter(
content_type=new_ct,
codename=old_perm.codename,
)
.first()
)
if not new_perm:
# This should exist at this point, but if not, let's cut our losses
continue
# Global user permissions
User = apps.get_model("authentik_core", "User")
User.user_permissions.through.objects.using(db_alias).filter(
permission=old_perm
).all().update(permission=new_perm)
# Global role permissions
DjangoGroup = apps.get_model("auth", "Group")
DjangoGroup.permissions.through.objects.using(db_alias).filter(
permission=old_perm
).all().update(permission=new_perm)
# Object user permissions
UserObjectPermission = apps.get_model("guardian", "UserObjectPermission")
UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update(
permission=new_perm, content_type=new_ct
)
# Object role permissions
GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission")
GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update(
permission=new_perm, content_type=new_ct
)
def remove_old_authenticated_session_content_type( def remove_old_authenticated_session_content_type(
apps: Apps, schema_editor: BaseDatabaseSchemaEditor apps: Apps, schema_editor: BaseDatabaseSchemaEditor
): ):
@ -92,12 +21,7 @@ class Migration(migrations.Migration):
] ]
operations = [ operations = [
migrations.RunPython(
code=migrate_authenticated_session_permissions,
reverse_code=migrations.RunPython.noop,
),
migrations.RunPython( migrations.RunPython(
code=remove_old_authenticated_session_content_type, code=remove_old_authenticated_session_content_type,
reverse_code=migrations.RunPython.noop,
), ),
] ]

View File

@ -16,7 +16,7 @@
{% block head_before %} {% block head_before %}
{% endblock %} {% endblock %}
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
<style>{{ brand_css }}</style> <style>{{ brand.branding_custom_css }}</style>
<script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
<script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script>
{% block head %} {% block head %}

View File

@ -3,14 +3,7 @@ from urllib.parse import unquote_plus
from cryptography.exceptions import InvalidSignature from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives import hashes
from cryptography.x509 import ( from cryptography.x509 import Certificate, NameOID, ObjectIdentifier, load_pem_x509_certificate
Certificate,
NameOID,
ObjectIdentifier,
UnsupportedGeneralNameType,
load_pem_x509_certificate,
)
from cryptography.x509.verification import PolicyBuilder, Store, VerificationError
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from authentik.brands.models import Brand from authentik.brands.models import Brand
@ -109,22 +102,16 @@ class MTLSStageView(ChallengeStageView):
return None return None
def validate_cert(self, authorities: list[CertificateKeyPair], certs: list[Certificate]): def validate_cert(self, authorities: list[CertificateKeyPair], certs: list[Certificate]):
authorities_cert = [x.certificate for x in authorities]
for _cert in certs: for _cert in certs:
try: for ca in authorities:
PolicyBuilder().store(Store(authorities_cert)).build_client_verifier().verify( try:
_cert, [] _cert.verify_directly_issued_by(ca.certificate)
) return _cert
return _cert except (InvalidSignature, TypeError, ValueError) as exc:
except ( self.logger.warning(
InvalidSignature, "Discarding cert not issued by authority", cert=_cert, authority=ca, exc=exc
TypeError, )
ValueError, continue
VerificationError,
UnsupportedGeneralNameType,
) as exc:
self.logger.warning("Discarding invalid certificate", cert=_cert, exc=exc)
continue
return None return None
def check_if_user(self, cert: Certificate): def check_if_user(self, cert: Certificate):
@ -154,9 +141,7 @@ class MTLSStageView(ChallengeStageView):
"subject": cert.subject.rfc4514_string(), "subject": cert.subject.rfc4514_string(),
"issuer": cert.issuer.rfc4514_string(), "issuer": cert.issuer.rfc4514_string(),
"fingerprint_sha256": hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8"), "fingerprint_sha256": hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8"),
"fingerprint_sha1": hexlify(cert.fingerprint(hashes.SHA1()), ":").decode( # nosec "fingerprint_sha1": hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8"),
"utf-8"
),
} }
def auth_user(self, user: User, cert: Certificate): def auth_user(self, user: User, cert: Certificate):

View File

@ -1,31 +1,30 @@
-----BEGIN CERTIFICATE----- -----BEGIN CERTIFICATE-----
MIIFWTCCA0GgAwIBAgIUDEnKCSmIXG/akySGes7bhOGrN/8wDQYJKoZIhvcNAQEL MIIFOzCCAyOgAwIBAgIUbnIMy+Ewi5RvK7OBDxWMCk7wi08wDQYJKoZIhvcNAQEL
BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl
bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNTE5MTIzODQ2WhcNMjYw bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNDI3MTgzMTE3WhcNMjYw
NTE1MTIzODQ2WjARMQ8wDQYDVQQDDAZjbGllbnQwggIiMA0GCSqGSIb3DQEBAQUA NDIzMTgzMTE3WjARMQ8wDQYDVQQDDAZjbGllbnQwggIiMA0GCSqGSIb3DQEBAQUA
A4ICDwAwggIKAoICAQCkPkS1V6l0gj0ulxMznkxkgrw4p9Tjd8teSsGZt02A2Eo6 A4ICDwAwggIKAoICAQCdV+GEa7+7ito1i/z637OZW+0azv1kuF2aDwSzv+FJd+4L
7D8FbJ7pp3d5fYW/TWuEKVBLWTID6rijW5EGcdgTM5Jxf/QR+aZTEK6umQxUd4yO 6hCroRbVYTUFS3I3YwanOOZfau64xH0+pFM5Js8aREG68eqKBayx8vT27hyAOFhd
mOtp+xVS3KlcsSej2dFpeE5h5VkZizHpvh5xkoAP8W5VtQLOVF0hIeumHnJmaeLj giEVmSQJfla4ogvPie1rJ0HVOL7CiR72HDPQvz+9k1iDX3xQ/4sdAb3XurN13e+M
+mhK9PBFpO7k9SFrYYhd/uLrYbIdANihbIO2Q74rNEJHewhFNM7oNSjjEWzRd/7S Gtavhjiyqxmoo/H4WRd8BhD/BZQFWtaxWODDY8aKk5R7omw6Xf7aRv1BlHdE4Ucy
qNdQij9JGrVG7u8YJJscEQHqyHMYFVCEMjxmsge5BO6Vx5OWmUE3wXPzb5TbyTS4 Wozvpsj2Kz0l61rRUhiMlE0D9dpijgaRYFB+M7R2casH3CdhGQbBHTRiqBkZa6iq
+yg88g9rYTUXrzz+poCyKpaur45qBsdw35lJ8nq69VJj2xJLGQDwoTgGSXRuPciC SDkTiTwNJQQJov8yPTsR+9P8OOuV6QN+DGm/FXJJFaPnsHw/HDy7EAbA1PcdbSyK
3OilQI+Ma+j8qQGJxJ8WJxISlf1cuhp+V4ZUd1lawlM5hAXyXmHRlH4pun4y+g7O XvJ8nVjdNhCEGbLGVSwAQLO+78hChVIN5YH+QSrP84YBSxKZYArnf4z2e9drqAN3
O34+fE3pK25JjVCicMT/rC2A/sb95j/fHTzzJpbB70U0I50maTcIsOkyw6aiF//E KmC26TkaUzkXnndnxOXBEIOSmyCdD4Dutg1XPE/bs8rA6rVGIR3pKXbCr29Z8hZn
0ShTDz14x22SCMolUc6hxTDZvBB6yrcJHd7d9CCnFH2Sgo13QrtNJ/atXgm13HGh Cn9jbxwDwTX865ljR1Oc3dnIeCWa9AS/uHaSMdGlbGbDrt4Bj/nyyfu8xc034K/0
wBzRwK38XUGl/J4pJaxAupTVCPriStUM3m0EYHNelRRUE91pbyeGT0rvOuv00uLw uPh3hF3FLWNAomRVZCvtuh/v7IEIQEgUbvQMWBhZJ8hu3HdtV8V9TIAryVKzEzGy
Rj7K7hJZR8avTKWmKrVBVpq+gSojGW1DwBS0NiDNkZs0d/IjB1wkzczEgdZjXwID Q72UHuQyK0njRDTmA/T+jn7P8GWOuf9eNdzd0gH0gcEuhCZFxPPRvUAeDuC7DQID
AQABo3QwcjAfBgNVHSMEGDAWgBTa+Ns6QzqlNvnTGszkouQQtZnVJDAdBgNVHSUE AQABo1YwVDAdBgNVHSUEFjAUBggrBgEFBQcDAgYIKwYBBQUHAwEwFAYDVR0RAQH/
FjAUBggrBgEFBQcDAgYIKwYBBQUHAwEwEQYDVR0RBAowCIIGY2xpZW50MB0GA1Ud BAowCIIGY2xpZW50MB0GA1UdDgQWBBQ5KZwTD8+4CqLnbM/cBSXg8XeLXTANBgkq
DgQWBBT1xg5sXkypRBwvCxBuyfoanaiZ5jANBgkqhkiG9w0BAQsFAAOCAgEAvUAz hkiG9w0BAQsFAAOCAgEABDkb3iyEOl1xKq1wxyRzf2L8qfPXAQw71FxMbgHArM+a
YwIjxY/0KHZDU8owdILVqKChzfLcy9OHNPyEI3TSOI8X6gNtBO+HE6r8aWGcC9vw e44wJGO3mZgPH0trOaJ+tuN6erB5YbZfsoX+xFacwskj9pKyb4QJLr/ENmJZRgyL
zzeIsNQ3UEjvRWi2r+vUVbiPTbFdZboNDSZv6ZmGHxwd85VsjXRGoXV6koCT/9zi wp5P6PB6IUJhvryvy/GxrG938YGmFtYQ+ikeJw5PWhB6218C1aZ9hsi94wZ1Zzrc
9/lCM1DwqwYSwBphMJdRVFRUMluSYk1oHflGeA18xgGuts4eFivJwhabGm1AdVVQ Ry0q0D4QvIEZ0X2HL1Harc7gerE3VqhgQ7EWyImM+lCRtNDduwDQnZauwhr2r6cW
/CYvqCuTxd/DCzWZBdyxYpDru64i/kyeJCt1pThKEFDWmpumFdBI4CxJ0OhxVSGp XG4VTe1RCNsDA0xinXQE2Xf9voCd0Zf6wOOXJseQtrXpf+tG4N13cy5heF5ihed1
dOXzK+Y6ULepxCvi6/OpSog52jQ6PnNd1ghiYtq7yO1T4GQz65M1vtHHVvQ3gfBE hDxSeki0KjTM+18kVVfVm4fzxf1Zg0gm54UlzWceIWh9EtnWMUV08H0D1M9YNmW8
AuKYQp6io7ypitRx+LpjsBQenyP4FFGfrq7pm90nLluOBOArfSdF0N+CP2wo/YFV hWTupk7M+jAw8Y+suHOe6/RLi0+fb9NSJpIpq4GqJ5UF2kerXHX0SvuAavoXyB0j
9BGf89OtvRi3BXCm2NXkE/Sc4We26tY8x7xNLOmNs8YOT0O3r/EQ690W9GIwRMx0 CQrUXkRScEKOO2KAbVExSG56Ff7Ee8cRUAQ6rLC5pQRACq/R0sa6RcUsFPXul3Yv
m0r/RXWn5V3o4Jib9r8eH9NzaDstD8g9dECcGfM4fHoM/DAGFaRrNcjMsS1APP3L vbO2rTuArAUPkNVFknwkndheN4lOslRd1If02HunZETmsnal6p+nmuMWt2pQ2fDA
jp7+BfBSXtrz9V6rVJ3CBLXlLK0AuSm7bqd1MJsGA9uMLpsVZIUA+KawcmPGdPU+ vIguG54FyQ1T1IbF/QhfTEY62CQAebcgutnqqJHt9qe7Jr6ev57hMrJDEjotSzkY
NxdpBCtzyurQSUyaTLtVqSeP35gMAwaNzUDph8Uh+vHz+kRwgXS19OQvTaud5LJu OhOVrcYqgLldr1nBqNVlIK/4VrDaWH8H5dNJ72gA9aMNVH4/bSTJhuO7cJkLnHw=
nQe4JNS+u5e2VDEBWUxt8NTpu6eShDN0iIEHtxA=
-----END CERTIFICATE----- -----END CERTIFICATE-----

View File

@ -5,12 +5,7 @@ from django.urls import reverse
from guardian.shortcuts import assign_perm from guardian.shortcuts import assign_perm
from authentik.core.models import User from authentik.core.models import User
from authentik.core.tests.utils import ( from authentik.core.tests.utils import create_test_brand, create_test_flow, create_test_user
create_test_brand,
create_test_cert,
create_test_flow,
create_test_user,
)
from authentik.crypto.models import CertificateKeyPair from authentik.crypto.models import CertificateKeyPair
from authentik.enterprise.stages.mtls.models import ( from authentik.enterprise.stages.mtls.models import (
CertAttributes, CertAttributes,
@ -132,18 +127,6 @@ class MTLSStageTests(FlowTestCase):
self.assertEqual(res.status_code, 200) self.assertEqual(res.status_code, 200)
self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") self.assertStageResponse(res, self.flow, component="ak-stage-access-denied")
def test_invalid_cert(self):
"""Test invalid certificate"""
cert = create_test_cert()
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Forwarded-TLS-Client-Cert": quote_plus(cert.certificate_data)},
)
self.assertEqual(res.status_code, 200)
self.assertStageResponse(res, self.flow, component="ak-stage-access-denied")
self.assertNotIn(PLAN_CONTEXT_PENDING_USER, plan().context)
def test_auth_no_user(self): def test_auth_no_user(self):
"""Test auth with no user""" """Test auth with no user"""
User.objects.filter(username="client").delete() User.objects.filter(username="client").delete()
@ -217,12 +200,14 @@ class MTLSStageTests(FlowTestCase):
self.assertEqual( self.assertEqual(
plan().context[PLAN_CONTEXT_CERTIFICATE], plan().context[PLAN_CONTEXT_CERTIFICATE],
{ {
"fingerprint_sha1": "52:39:ca:1e:3a:1f:78:3a:9f:26:3b:c2:84:99:48:68:99:99:81:8a", "fingerprint_sha1": (
"08:d4:a4:79:25:ca:c3:51:28:88:bb:30:c2:96:c3:44:5a:eb:18:07:84:ca:b4:75:27:74:61:19:8a:6a:af:fc"
),
"fingerprint_sha256": ( "fingerprint_sha256": (
"c1:07:8b:7c:e9:02:57:87:1e:92:e5:81:83:21:bc:92:c7:47:65:e3:97:fb:05:97:6f:36:9e:b5:31:77:98:b7" "08:d4:a4:79:25:ca:c3:51:28:88:bb:30:c2:96:c3:44:5a:eb:18:07:84:ca:b4:75:27:74:61:19:8a:6a:af:fc"
), ),
"issuer": "OU=Self-signed,O=authentik,CN=authentik Test CA", "issuer": "OU=Self-signed,O=authentik,CN=authentik Test CA",
"serial_number": "70153443448884702681996102271549704759327537151", "serial_number": "630532384467334865093173111400266136879266564943",
"subject": "CN=client", "subject": "CN=client",
}, },
) )

View File

@ -15,13 +15,13 @@ class MMDBContextProcessor(EventContextProcessor):
self.reader: Reader | None = None self.reader: Reader | None = None
self._last_mtime: float = 0.0 self._last_mtime: float = 0.0
self.logger = get_logger() self.logger = get_logger()
self.load() self.open()
def path(self) -> str | None: def path(self) -> str | None:
"""Get the path to the MMDB file to load""" """Get the path to the MMDB file to load"""
raise NotImplementedError raise NotImplementedError
def load(self): def open(self):
"""Get GeoIP Reader, if configured, otherwise none""" """Get GeoIP Reader, if configured, otherwise none"""
path = self.path() path = self.path()
if path == "" or not path: if path == "" or not path:
@ -44,7 +44,7 @@ class MMDBContextProcessor(EventContextProcessor):
diff = self._last_mtime < mtime diff = self._last_mtime < mtime
if diff > 0: if diff > 0:
self.logger.info("Found new MMDB Database, reopening", diff=diff, path=path) self.logger.info("Found new MMDB Database, reopening", diff=diff, path=path)
self.load() self.open()
except OSError as exc: except OSError as exc:
self.logger.warning("Failed to check MMDB age", exc=exc) self.logger.warning("Failed to check MMDB age", exc=exc)

View File

@ -1,18 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-27 12:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_flows", "0027_auto_20231028_1424"),
]
operations = [
migrations.AddField(
model_name="flowtoken",
name="revoke_on_execution",
field=models.BooleanField(default=True),
),
]

View File

@ -303,10 +303,9 @@ class FlowToken(Token):
flow = models.ForeignKey(Flow, on_delete=models.CASCADE) flow = models.ForeignKey(Flow, on_delete=models.CASCADE)
_plan = models.TextField() _plan = models.TextField()
revoke_on_execution = models.BooleanField(default=True)
@staticmethod @staticmethod
def pickle(plan: "FlowPlan") -> str: def pickle(plan) -> str:
"""Pickle into string""" """Pickle into string"""
data = dumps(plan) data = dumps(plan)
return b64encode(data).decode() return b64encode(data).decode()

View File

@ -99,10 +99,9 @@ class ChallengeStageView(StageView):
self.logger.debug("Got StageInvalidException", exc=exc) self.logger.debug("Got StageInvalidException", exc=exc)
return self.executor.stage_invalid() return self.executor.stage_invalid()
if not challenge.is_valid(): if not challenge.is_valid():
self.logger.error( self.logger.warning(
"f(ch): Invalid challenge", "f(ch): Invalid challenge",
errors=challenge.errors, errors=challenge.errors,
challenge=challenge.data,
) )
return HttpChallengeResponse(challenge) return HttpChallengeResponse(challenge)

View File

@ -146,8 +146,7 @@ class FlowExecutorView(APIView):
except (AttributeError, EOFError, ImportError, IndexError) as exc: except (AttributeError, EOFError, ImportError, IndexError) as exc:
LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) LOGGER.warning("f(exec): Failed to restore token plan", exc=exc)
finally: finally:
if token.revoke_on_execution: token.delete()
token.delete()
if not isinstance(plan, FlowPlan): if not isinstance(plan, FlowPlan):
return None return None
plan.context[PLAN_CONTEXT_IS_RESTORED] = token plan.context[PLAN_CONTEXT_IS_RESTORED] = token

View File

@ -81,6 +81,7 @@ debugger: false
log_level: info log_level: info
session_storage: cache
sessions: sessions:
unauthenticated_age: days=1 unauthenticated_age: days=1

View File

@ -1,7 +1,6 @@
from collections.abc import Callable from collections.abc import Callable
from dataclasses import asdict from dataclasses import asdict
from celery import group
from celery.exceptions import Retry from celery.exceptions import Retry
from celery.result import allow_join_result from celery.result import allow_join_result
from django.core.paginator import Paginator from django.core.paginator import Paginator
@ -83,41 +82,21 @@ class SyncTasks:
self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name)
return return
try: try:
messages.append(_("Syncing users")) for page in users_paginator.page_range:
user_results = ( messages.append(_("Syncing page {page} of users".format(page=page)))
group( for msg in sync_objects.apply_async(
[ args=(class_to_path(User), page, provider_pk),
sync_objects.signature( time_limit=PAGE_TIMEOUT,
args=(class_to_path(User), page, provider_pk), soft_time_limit=PAGE_TIMEOUT,
time_limit=PAGE_TIMEOUT, ).get():
soft_time_limit=PAGE_TIMEOUT,
)
for page in users_paginator.page_range
]
)
.apply_async()
.get()
)
for result in user_results:
for msg in result:
messages.append(LogEvent(**msg)) messages.append(LogEvent(**msg))
messages.append(_("Syncing groups")) for page in groups_paginator.page_range:
group_results = ( messages.append(_("Syncing page {page} of groups".format(page=page)))
group( for msg in sync_objects.apply_async(
[ args=(class_to_path(Group), page, provider_pk),
sync_objects.signature( time_limit=PAGE_TIMEOUT,
args=(class_to_path(Group), page, provider_pk), soft_time_limit=PAGE_TIMEOUT,
time_limit=PAGE_TIMEOUT, ).get():
soft_time_limit=PAGE_TIMEOUT,
)
for page in groups_paginator.page_range
]
)
.apply_async()
.get()
)
for result in group_results:
for msg in result:
messages.append(LogEvent(**msg)) messages.append(LogEvent(**msg))
except TransientSyncException as exc: except TransientSyncException as exc:
self.logger.warning("transient sync exception", exc=exc) self.logger.warning("transient sync exception", exc=exc)
@ -130,7 +109,7 @@ class SyncTasks:
def sync_objects( def sync_objects(
self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter
): ):
_object_type: type[Model] = path_to_class(object_type) _object_type = path_to_class(object_type)
self.logger = get_logger().bind( self.logger = get_logger().bind(
provider_type=class_to_path(self._provider_model), provider_type=class_to_path(self._provider_model),
provider_pk=provider_pk, provider_pk=provider_pk,
@ -153,19 +132,6 @@ class SyncTasks:
self.logger.debug("starting discover") self.logger.debug("starting discover")
client.discover() client.discover()
self.logger.debug("starting sync for page", page=page) self.logger.debug("starting sync for page", page=page)
messages.append(
asdict(
LogEvent(
_(
"Syncing page {page} of {object_type}".format(
page=page, object_type=_object_type._meta.verbose_name_plural
)
),
log_level="info",
logger=f"{provider._meta.verbose_name}@{object_type}",
)
)
)
for obj in paginator.page(page).object_list: for obj in paginator.page(page).object_list:
obj: Model obj: Model
try: try:

View File

@ -38,7 +38,6 @@ class TestOutpostWS(TransactionTestCase):
) )
connected, _ = await communicator.connect() connected, _ = await communicator.connect()
self.assertFalse(connected) self.assertFalse(connected)
await communicator.disconnect()
async def test_auth_valid(self): async def test_auth_valid(self):
"""Test auth with token""" """Test auth with token"""
@ -49,7 +48,6 @@ class TestOutpostWS(TransactionTestCase):
) )
connected, _ = await communicator.connect() connected, _ = await communicator.connect()
self.assertTrue(connected) self.assertTrue(connected)
await communicator.disconnect()
async def test_send(self): async def test_send(self):
"""Test sending of Hello""" """Test sending of Hello"""

View File

@ -7,8 +7,10 @@ from django.db import migrations
def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from authentik.core.models import User
from django.apps import apps as real_apps from django.apps import apps as real_apps
from django.contrib.auth.management import create_permissions from django.contrib.auth.management import create_permissions
from guardian.shortcuts import UserObjectPermission
db_alias = schema_editor.connection.alias db_alias = schema_editor.connection.alias

View File

@ -50,4 +50,3 @@ AMR_PASSWORD = "pwd" # nosec
AMR_MFA = "mfa" AMR_MFA = "mfa"
AMR_OTP = "otp" AMR_OTP = "otp"
AMR_WEBAUTHN = "user" AMR_WEBAUTHN = "user"
AMR_SMART_CARD = "sc"

View File

@ -16,7 +16,6 @@ from authentik.providers.oauth2.constants import (
ACR_AUTHENTIK_DEFAULT, ACR_AUTHENTIK_DEFAULT,
AMR_MFA, AMR_MFA,
AMR_PASSWORD, AMR_PASSWORD,
AMR_SMART_CARD,
AMR_WEBAUTHN, AMR_WEBAUTHN,
) )
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
@ -140,10 +139,9 @@ class IDToken:
amr.append(AMR_PASSWORD) amr.append(AMR_PASSWORD)
if method == "auth_webauthn_pwl": if method == "auth_webauthn_pwl":
amr.append(AMR_WEBAUTHN) amr.append(AMR_WEBAUTHN)
if "certificate" in method_args:
amr.append(AMR_SMART_CARD)
if "mfa_devices" in method_args: if "mfa_devices" in method_args:
amr.append(AMR_MFA) if len(amr) > 0:
amr.append(AMR_MFA)
if amr: if amr:
id_token.amr = amr id_token.amr = amr

View File

@ -47,8 +47,6 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]):
def reconcile(self, current: V1Ingress, reference: V1Ingress): def reconcile(self, current: V1Ingress, reference: V1Ingress):
super().reconcile(current, reference) super().reconcile(current, reference)
self._check_annotations(current, reference) self._check_annotations(current, reference)
if current.spec.ingress_class_name != reference.spec.ingress_class_name:
raise NeedsUpdate()
# Create a list of all expected host and tls hosts # Create a list of all expected host and tls hosts
expected_hosts = [] expected_hosts = []
expected_hosts_tls = [] expected_hosts_tls = []

View File

@ -66,10 +66,7 @@ class RACClientConsumer(AsyncWebsocketConsumer):
def init_outpost_connection(self): def init_outpost_connection(self):
"""Initialize guac connection settings""" """Initialize guac connection settings"""
self.token = ( self.token = (
ConnectionToken.filter_not_expired( ConnectionToken.filter_not_expired(token=self.scope["url_route"]["kwargs"]["token"])
token=self.scope["url_route"]["kwargs"]["token"],
session__session__session_key=self.scope["session"].session_key,
)
.select_related("endpoint", "provider", "session", "session__user") .select_related("endpoint", "provider", "session", "session__user")
.first() .first()
) )

View File

@ -166,6 +166,7 @@ class ConnectionToken(ExpiringModel):
always_merger.merge(settings, default_settings) always_merger.merge(settings, default_settings)
always_merger.merge(settings, self.endpoint.provider.settings) always_merger.merge(settings, self.endpoint.provider.settings)
always_merger.merge(settings, self.endpoint.settings) always_merger.merge(settings, self.endpoint.settings)
always_merger.merge(settings, self.settings)
def mapping_evaluator(mappings: QuerySet): def mapping_evaluator(mappings: QuerySet):
for mapping in mappings: for mapping in mappings:
@ -190,7 +191,6 @@ class ConnectionToken(ExpiringModel):
mapping_evaluator( mapping_evaluator(
RACPropertyMapping.objects.filter(endpoint__in=[self.endpoint]).order_by("name") RACPropertyMapping.objects.filter(endpoint__in=[self.endpoint]).order_by("name")
) )
always_merger.merge(settings, self.settings)
settings["drive-path"] = f"/tmp/connection/{self.token}" # nosec settings["drive-path"] = f"/tmp/connection/{self.token}" # nosec
settings["create-drive-path"] = "true" settings["create-drive-path"] = "true"

View File

@ -90,6 +90,23 @@ class TestModels(TransactionTestCase):
"resize-method": "display-update", "resize-method": "display-update",
}, },
) )
# Set settings in token
token.settings = {
"level": "token",
}
token.save()
self.assertEqual(
token.get_settings(),
{
"hostname": self.endpoint.host.split(":")[0],
"port": "1324",
"client-name": f"authentik - {self.user}",
"drive-path": path,
"create-drive-path": "true",
"level": "token",
"resize-method": "display-update",
},
)
# Set settings in property mapping (provider) # Set settings in property mapping (provider)
mapping = RACPropertyMapping.objects.create( mapping = RACPropertyMapping.objects.create(
name=generate_id(), name=generate_id(),
@ -134,22 +151,3 @@ class TestModels(TransactionTestCase):
"resize-method": "display-update", "resize-method": "display-update",
}, },
) )
# Set settings in token
token.settings = {
"level": "token",
}
token.save()
self.assertEqual(
token.get_settings(),
{
"hostname": self.endpoint.host.split(":")[0],
"port": "1324",
"client-name": f"authentik - {self.user}",
"drive-path": path,
"create-drive-path": "true",
"foo": "true",
"bar": "6",
"resize-method": "display-update",
"level": "token",
},
)

View File

@ -87,22 +87,3 @@ class TestRACViews(APITestCase):
) )
body = loads(flow_response.content) body = loads(flow_response.content)
self.assertEqual(body["component"], "ak-stage-access-denied") self.assertEqual(body["component"], "ak-stage-access-denied")
def test_different_session(self):
"""Test request"""
self.client.force_login(self.user)
response = self.client.get(
reverse(
"authentik_providers_rac:start",
kwargs={"app": self.app.slug, "endpoint": str(self.endpoint.pk)},
)
)
self.assertEqual(response.status_code, 302)
flow_response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
)
body = loads(flow_response.content)
next_url = body["to"]
self.client.logout()
final_response = self.client.get(next_url)
self.assertEqual(final_response.url, reverse("authentik_core:if-user"))

View File

@ -20,9 +20,6 @@ from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.engine import PolicyEngine from authentik.policies.engine import PolicyEngine
from authentik.policies.views import PolicyAccessView from authentik.policies.views import PolicyAccessView
from authentik.providers.rac.models import ConnectionToken, Endpoint, RACProvider from authentik.providers.rac.models import ConnectionToken, Endpoint, RACProvider
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
PLAN_CONNECTION_SETTINGS = "connection_settings"
class RACStartView(PolicyAccessView): class RACStartView(PolicyAccessView):
@ -68,10 +65,7 @@ class RACInterface(InterfaceView):
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse: def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
# Early sanity check to ensure token still exists # Early sanity check to ensure token still exists
token = ConnectionToken.filter_not_expired( token = ConnectionToken.filter_not_expired(token=self.kwargs["token"]).first()
token=self.kwargs["token"],
session__session__session_key=request.session.session_key,
).first()
if not token: if not token:
return redirect("authentik_core:if-user") return redirect("authentik_core:if-user")
self.token = token self.token = token
@ -115,15 +109,10 @@ class RACFinalStage(RedirectStage):
return super().dispatch(request, *args, **kwargs) return super().dispatch(request, *args, **kwargs)
def get_challenge(self, *args, **kwargs) -> RedirectChallenge: def get_challenge(self, *args, **kwargs) -> RedirectChallenge:
settings = self.executor.plan.context.get(PLAN_CONNECTION_SETTINGS)
if not settings:
settings = self.executor.plan.context.get(PLAN_CONTEXT_PROMPT, {}).get(
PLAN_CONNECTION_SETTINGS
)
token = ConnectionToken.objects.create( token = ConnectionToken.objects.create(
provider=self.provider, provider=self.provider,
endpoint=self.endpoint, endpoint=self.endpoint,
settings=settings or {}, settings=self.executor.plan.context.get("connection_settings", {}),
session=self.request.session["authenticatedsession"], session=self.request.session["authenticatedsession"],
expires=now() + timedelta_from_string(self.provider.connection_expiry), expires=now() + timedelta_from_string(self.provider.connection_expiry),
expiring=True, expiring=True,

View File

@ -47,16 +47,15 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
def to_schema(self, obj: Group, connection: SCIMProviderGroup) -> SCIMGroupSchema: def to_schema(self, obj: Group, connection: SCIMProviderGroup) -> SCIMGroupSchema:
"""Convert authentik user into SCIM""" """Convert authentik user into SCIM"""
raw_scim_group = super().to_schema(obj, connection) raw_scim_group = super().to_schema(
obj,
connection,
schemas=(SCIM_GROUP_SCHEMA,),
)
try: try:
scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group)) scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group))
except ValidationError as exc: except ValidationError as exc:
raise StopSync(exc, obj) from exc raise StopSync(exc, obj) from exc
if SCIM_GROUP_SCHEMA not in scim_group.schemas:
scim_group.schemas.insert(0, SCIM_GROUP_SCHEMA)
# As this might be unset, we need to tell pydantic it's set so ensure the schemas
# are included, even if its just the defaults
scim_group.schemas = list(scim_group.schemas)
if not scim_group.externalId: if not scim_group.externalId:
scim_group.externalId = str(obj.pk) scim_group.externalId = str(obj.pk)

View File

@ -31,16 +31,15 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]):
def to_schema(self, obj: User, connection: SCIMProviderUser) -> SCIMUserSchema: def to_schema(self, obj: User, connection: SCIMProviderUser) -> SCIMUserSchema:
"""Convert authentik user into SCIM""" """Convert authentik user into SCIM"""
raw_scim_user = super().to_schema(obj, connection) raw_scim_user = super().to_schema(
obj,
connection,
schemas=(SCIM_USER_SCHEMA,),
)
try: try:
scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user)) scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user))
except ValidationError as exc: except ValidationError as exc:
raise StopSync(exc, obj) from exc raise StopSync(exc, obj) from exc
if SCIM_USER_SCHEMA not in scim_user.schemas:
scim_user.schemas.insert(0, SCIM_USER_SCHEMA)
# As this might be unset, we need to tell pydantic it's set so ensure the schemas
# are included, even if its just the defaults
scim_user.schemas = list(scim_user.schemas)
if not scim_user.externalId: if not scim_user.externalId:
scim_user.externalId = str(obj.uid) scim_user.externalId = str(obj.uid)
return scim_user return scim_user

View File

@ -91,57 +91,6 @@ class SCIMUserTests(TestCase):
}, },
) )
@Mocker()
def test_user_create_custom_schema(self, mock: Mocker):
"""Test user creation with custom schema"""
schema = SCIMMapping.objects.create(
name="custom_schema",
expression="""return {"schemas": ["foo"]}""",
)
self.provider.property_mappings.add(schema)
scim_id = generate_id()
mock.get(
"https://localhost/ServiceProviderConfig",
json={},
)
mock.post(
"https://localhost/Users",
json={
"id": scim_id,
},
)
uid = generate_id()
user = User.objects.create(
username=uid,
name=f"{uid} {uid}",
email=f"{uid}@goauthentik.io",
)
self.assertEqual(mock.call_count, 2)
self.assertEqual(mock.request_history[0].method, "GET")
self.assertEqual(mock.request_history[1].method, "POST")
self.assertJSONEqual(
mock.request_history[1].body,
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User", "foo"],
"active": True,
"emails": [
{
"primary": True,
"type": "other",
"value": f"{uid}@goauthentik.io",
}
],
"externalId": user.uid,
"name": {
"familyName": uid,
"formatted": f"{uid} {uid}",
"givenName": uid,
},
"displayName": f"{uid} {uid}",
"userName": uid,
},
)
@Mocker() @Mocker()
def test_user_create_different_provider_same_id(self, mock: Mocker): def test_user_create_different_provider_same_id(self, mock: Mocker):
"""Test user creation with multiple providers that happen """Test user creation with multiple providers that happen
@ -435,7 +384,7 @@ class SCIMUserTests(TestCase):
self.assertIn(request.method, SAFE_METHODS) self.assertIn(request.method, SAFE_METHODS)
task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first() task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first()
self.assertIsNotNone(task) self.assertIsNotNone(task)
drop_msg = task.messages[3] drop_msg = task.messages[2]
self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run") self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run")
self.assertIsNotNone(drop_msg["attributes"]["url"]) self.assertIsNotNone(drop_msg["attributes"]["url"])
self.assertIsNotNone(drop_msg["attributes"]["body"]) self.assertIsNotNone(drop_msg["attributes"]["body"])

View File

@ -132,7 +132,7 @@ TENANT_CREATION_FAKES_MIGRATIONS = True
TENANT_BASE_SCHEMA = "template" TENANT_BASE_SCHEMA = "template"
PUBLIC_SCHEMA_NAME = CONFIG.get("postgresql.default_schema") PUBLIC_SCHEMA_NAME = CONFIG.get("postgresql.default_schema")
GUARDIAN_MONKEY_PATCH_USER = False GUARDIAN_MONKEY_PATCH = False
SPECTACULAR_SETTINGS = { SPECTACULAR_SETTINGS = {
"TITLE": "authentik", "TITLE": "authentik",
@ -424,7 +424,7 @@ else:
"BACKEND": "authentik.root.storages.FileStorage", "BACKEND": "authentik.root.storages.FileStorage",
"OPTIONS": { "OPTIONS": {
"location": Path(CONFIG.get("storage.media.file.path")), "location": Path(CONFIG.get("storage.media.file.path")),
"base_url": CONFIG.get("web.path", "/") + "media/", "base_url": "/media/",
}, },
} }
# Compatibility for apps not supporting top-level STORAGES # Compatibility for apps not supporting top-level STORAGES

View File

@ -3,46 +3,25 @@
import os import os
from argparse import ArgumentParser from argparse import ArgumentParser
from unittest import TestCase from unittest import TestCase
from unittest.mock import patch
import pytest import pytest
from django.conf import settings from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.test.runner import DiscoverRunner from django.test.runner import DiscoverRunner
from structlog.stdlib import get_logger
from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR
from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR
from authentik.lib.config import CONFIG from authentik.lib.config import CONFIG
from authentik.lib.sentry import sentry_init from authentik.lib.sentry import sentry_init
from authentik.root.signals import post_startup, pre_startup, startup from authentik.root.signals import post_startup, pre_startup, startup
from tests.e2e.utils import get_docker_tag
# globally set maxDiff to none to show full assert error # globally set maxDiff to none to show full assert error
TestCase.maxDiff = None TestCase.maxDiff = None
def get_docker_tag() -> str:
"""Get docker-tag based off of CI variables"""
env_pr_branch = "GITHUB_HEAD_REF"
default_branch = "GITHUB_REF"
branch_name = os.environ.get(default_branch, "main")
if os.environ.get(env_pr_branch, "") != "":
branch_name = os.environ[env_pr_branch]
branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
return f"gh-{branch_name}"
def patched__get_ct_cached(app_label, codename):
"""Caches `ContentType` instances like its `QuerySet` does."""
return ContentType.objects.get(app_label=app_label, permission__codename=codename)
class PytestTestRunner(DiscoverRunner): # pragma: no cover class PytestTestRunner(DiscoverRunner): # pragma: no cover
"""Runs pytest to discover and run tests.""" """Runs pytest to discover and run tests."""
def __init__(self, **kwargs): def __init__(self, **kwargs):
super().__init__(**kwargs) super().__init__(**kwargs)
self.logger = get_logger().bind(runner="pytest")
self.args = [] self.args = []
if self.failfast: if self.failfast:
@ -52,8 +31,6 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
if kwargs.get("randomly_seed", None): if kwargs.get("randomly_seed", None):
self.args.append(f"--randomly-seed={kwargs['randomly_seed']}") self.args.append(f"--randomly-seed={kwargs['randomly_seed']}")
if kwargs.get("no_capture", False):
self.args.append("--capture=no")
settings.TEST = True settings.TEST = True
settings.CELERY["task_always_eager"] = True settings.CELERY["task_always_eager"] = True
@ -69,10 +46,6 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
CONFIG.set("error_reporting.sample_rate", 0) CONFIG.set("error_reporting.sample_rate", 0)
CONFIG.set("error_reporting.environment", "testing") CONFIG.set("error_reporting.environment", "testing")
CONFIG.set("error_reporting.send_pii", True) CONFIG.set("error_reporting.send_pii", True)
ASN_CONTEXT_PROCESSOR.load()
GEOIP_CONTEXT_PROCESSOR.load()
sentry_init() sentry_init()
pre_startup.send(sender=self, mode="test") pre_startup.send(sender=self, mode="test")
@ -91,11 +64,6 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
"Default behaviour: use random.Random().getrandbits(32), so the seed is" "Default behaviour: use random.Random().getrandbits(32), so the seed is"
"different on each run.", "different on each run.",
) )
parser.add_argument(
"--no-capture",
action="store_true",
help="Disable any capturing of stdout/stderr during tests.",
)
def run_tests(self, test_labels, extra_tests=None, **kwargs): def run_tests(self, test_labels, extra_tests=None, **kwargs):
"""Run pytest and return the exitcode. """Run pytest and return the exitcode.
@ -138,10 +106,4 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
f"path instead." f"path instead."
) )
self.logger.info("Running tests", test_files=self.args) return pytest.main(self.args)
with patch("guardian.shortcuts._get_ct_cached", patched__get_ct_cached):
try:
return pytest.main(self.args)
except Exception as e:
self.logger.error("Error running tests", error=str(e), test_files=self.args)
return 1

View File

@ -103,7 +103,6 @@ class LDAPSourceSerializer(SourceSerializer):
"user_object_filter", "user_object_filter",
"group_object_filter", "group_object_filter",
"group_membership_field", "group_membership_field",
"user_membership_attribute",
"object_uniqueness_field", "object_uniqueness_field",
"password_login_update_internal_password", "password_login_update_internal_password",
"sync_users", "sync_users",
@ -112,7 +111,6 @@ class LDAPSourceSerializer(SourceSerializer):
"sync_parent_group", "sync_parent_group",
"connectivity", "connectivity",
"lookup_groups_from_user", "lookup_groups_from_user",
"delete_not_found_objects",
] ]
extra_kwargs = {"bind_password": {"write_only": True}} extra_kwargs = {"bind_password": {"write_only": True}}
@ -140,7 +138,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
"user_object_filter", "user_object_filter",
"group_object_filter", "group_object_filter",
"group_membership_field", "group_membership_field",
"user_membership_attribute",
"object_uniqueness_field", "object_uniqueness_field",
"password_login_update_internal_password", "password_login_update_internal_password",
"sync_users", "sync_users",
@ -150,7 +147,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
"user_property_mappings", "user_property_mappings",
"group_property_mappings", "group_property_mappings",
"lookup_groups_from_user", "lookup_groups_from_user",
"delete_not_found_objects",
] ]
search_fields = ["name", "slug"] search_fields = ["name", "slug"]
ordering = ["name"] ordering = ["name"]

View File

@ -1,48 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-28 08:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0048_delete_oldauthenticatedsession_content_type"),
("authentik_sources_ldap", "0008_groupldapsourceconnection_userldapsourceconnection"),
]
operations = [
migrations.AddField(
model_name="groupldapsourceconnection",
name="validated_by",
field=models.UUIDField(
blank=True,
help_text="Unique ID used while checking if this object still exists in the directory.",
null=True,
),
),
migrations.AddField(
model_name="ldapsource",
name="delete_not_found_objects",
field=models.BooleanField(
default=False,
help_text="Delete authentik users and groups which were previously supplied by this source, but are now missing from it.",
),
),
migrations.AddField(
model_name="userldapsourceconnection",
name="validated_by",
field=models.UUIDField(
blank=True,
help_text="Unique ID used while checking if this object still exists in the directory.",
null=True,
),
),
migrations.AddIndex(
model_name="groupldapsourceconnection",
index=models.Index(fields=["validated_by"], name="authentik_s_validat_b70447_idx"),
),
migrations.AddIndex(
model_name="userldapsourceconnection",
index=models.Index(fields=["validated_by"], name="authentik_s_validat_ff2ebc_idx"),
),
]

View File

@ -1,32 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-29 11:22
from django.apps.registry import Apps
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def set_user_membership_attribute(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
LDAPSource = apps.get_model("authentik_sources_ldap", "LDAPSource")
db_alias = schema_editor.connection.alias
LDAPSource.objects.using(db_alias).filter(group_membership_field="memberUid").all().update(
user_membership_attribute="ldap_uniq"
)
class Migration(migrations.Migration):
dependencies = [
("authentik_sources_ldap", "0009_groupldapsourceconnection_validated_by_and_more"),
]
operations = [
migrations.AddField(
model_name="ldapsource",
name="user_membership_attribute",
field=models.TextField(
default="distinguishedName",
help_text="Attribute which matches the value of `group_membership_field`.",
),
),
migrations.RunPython(set_user_membership_attribute, migrations.RunPython.noop),
]

View File

@ -100,10 +100,6 @@ class LDAPSource(Source):
default="(objectClass=person)", default="(objectClass=person)",
help_text=_("Consider Objects matching this filter to be Users."), help_text=_("Consider Objects matching this filter to be Users."),
) )
user_membership_attribute = models.TextField(
default=LDAP_DISTINGUISHED_NAME,
help_text=_("Attribute which matches the value of `group_membership_field`."),
)
group_membership_field = models.TextField( group_membership_field = models.TextField(
default="member", help_text=_("Field which contains members of a group.") default="member", help_text=_("Field which contains members of a group.")
) )
@ -141,14 +137,6 @@ class LDAPSource(Source):
), ),
) )
delete_not_found_objects = models.BooleanField(
default=False,
help_text=_(
"Delete authentik users and groups which were previously supplied by this source, "
"but are now missing from it."
),
)
@property @property
def component(self) -> str: def component(self) -> str:
return "ak-source-ldap-form" return "ak-source-ldap-form"
@ -333,12 +321,6 @@ class LDAPSourcePropertyMapping(PropertyMapping):
class UserLDAPSourceConnection(UserSourceConnection): class UserLDAPSourceConnection(UserSourceConnection):
validated_by = models.UUIDField(
null=True,
blank=True,
help_text=_("Unique ID used while checking if this object still exists in the directory."),
)
@property @property
def serializer(self) -> type[Serializer]: def serializer(self) -> type[Serializer]:
from authentik.sources.ldap.api import ( from authentik.sources.ldap.api import (
@ -350,18 +332,9 @@ class UserLDAPSourceConnection(UserSourceConnection):
class Meta: class Meta:
verbose_name = _("User LDAP Source Connection") verbose_name = _("User LDAP Source Connection")
verbose_name_plural = _("User LDAP Source Connections") verbose_name_plural = _("User LDAP Source Connections")
indexes = [
models.Index(fields=["validated_by"]),
]
class GroupLDAPSourceConnection(GroupSourceConnection): class GroupLDAPSourceConnection(GroupSourceConnection):
validated_by = models.UUIDField(
null=True,
blank=True,
help_text=_("Unique ID used while checking if this object still exists in the directory."),
)
@property @property
def serializer(self) -> type[Serializer]: def serializer(self) -> type[Serializer]:
from authentik.sources.ldap.api import ( from authentik.sources.ldap.api import (
@ -373,6 +346,3 @@ class GroupLDAPSourceConnection(GroupSourceConnection):
class Meta: class Meta:
verbose_name = _("Group LDAP Source Connection") verbose_name = _("Group LDAP Source Connection")
verbose_name_plural = _("Group LDAP Source Connections") verbose_name_plural = _("Group LDAP Source Connections")
indexes = [
models.Index(fields=["validated_by"]),
]

View File

@ -9,7 +9,7 @@ from structlog.stdlib import BoundLogger, get_logger
from authentik.core.sources.mapper import SourceMapper from authentik.core.sources.mapper import SourceMapper
from authentik.lib.config import CONFIG from authentik.lib.config import CONFIG
from authentik.lib.sync.mapper import PropertyMappingManager from authentik.lib.sync.mapper import PropertyMappingManager
from authentik.sources.ldap.models import LDAPSource, flatten from authentik.sources.ldap.models import LDAPSource
class BaseLDAPSynchronizer: class BaseLDAPSynchronizer:
@ -77,16 +77,6 @@ class BaseLDAPSynchronizer:
"""Get objects from LDAP, implemented in subclass""" """Get objects from LDAP, implemented in subclass"""
raise NotImplementedError() raise NotImplementedError()
def get_attributes(self, object):
if "attributes" not in object:
return
return object.get("attributes", {})
def get_identifier(self, attributes: dict):
if not attributes.get(self._source.object_uniqueness_field):
return
return flatten(attributes[self._source.object_uniqueness_field])
def search_paginator( # noqa: PLR0913 def search_paginator( # noqa: PLR0913
self, self,
search_base, search_base,

View File

@ -1,61 +0,0 @@
from collections.abc import Generator
from itertools import batched
from uuid import uuid4
from ldap3 import SUBTREE
from authentik.core.models import Group
from authentik.sources.ldap.models import GroupLDAPSourceConnection
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer
from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE, UPDATE_CHUNK_SIZE
class GroupLDAPForwardDeletion(BaseLDAPSynchronizer):
"""Delete LDAP Groups from authentik"""
@staticmethod
def name() -> str:
return "group_deletions"
def get_objects(self, **kwargs) -> Generator:
if not self._source.sync_groups or not self._source.delete_not_found_objects:
self.message("Group syncing is disabled for this Source")
return iter(())
uuid = uuid4()
groups = self._source.connection().extend.standard.paged_search(
search_base=self.base_dn_groups,
search_filter=self._source.group_object_filter,
search_scope=SUBTREE,
attributes=[self._source.object_uniqueness_field],
generator=True,
**kwargs,
)
for batch in batched(groups, UPDATE_CHUNK_SIZE, strict=False):
identifiers = []
for group in batch:
if not (attributes := self.get_attributes(group)):
continue
if identifier := self.get_identifier(attributes):
identifiers.append(identifier)
GroupLDAPSourceConnection.objects.filter(identifier__in=identifiers).update(
validated_by=uuid
)
return batched(
GroupLDAPSourceConnection.objects.filter(source=self._source)
.exclude(validated_by=uuid)
.values_list("group", flat=True)
.iterator(chunk_size=DELETE_CHUNK_SIZE),
DELETE_CHUNK_SIZE,
strict=False,
)
def sync(self, group_pks: tuple) -> int:
"""Delete authentik groups"""
if not self._source.sync_groups or not self._source.delete_not_found_objects:
self.message("Group syncing is disabled for this Source")
return -1
self._logger.debug("Deleting groups", group_pks=group_pks)
_, deleted_per_type = Group.objects.filter(pk__in=group_pks).delete()
return deleted_per_type.get(Group._meta.label, 0)

View File

@ -1,63 +0,0 @@
from collections.abc import Generator
from itertools import batched
from uuid import uuid4
from ldap3 import SUBTREE
from authentik.core.models import User
from authentik.sources.ldap.models import UserLDAPSourceConnection
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer
UPDATE_CHUNK_SIZE = 10_000
DELETE_CHUNK_SIZE = 50
class UserLDAPForwardDeletion(BaseLDAPSynchronizer):
"""Delete LDAP Users from authentik"""
@staticmethod
def name() -> str:
return "user_deletions"
def get_objects(self, **kwargs) -> Generator:
if not self._source.sync_users or not self._source.delete_not_found_objects:
self.message("User syncing is disabled for this Source")
return iter(())
uuid = uuid4()
users = self._source.connection().extend.standard.paged_search(
search_base=self.base_dn_users,
search_filter=self._source.user_object_filter,
search_scope=SUBTREE,
attributes=[self._source.object_uniqueness_field],
generator=True,
**kwargs,
)
for batch in batched(users, UPDATE_CHUNK_SIZE, strict=False):
identifiers = []
for user in batch:
if not (attributes := self.get_attributes(user)):
continue
if identifier := self.get_identifier(attributes):
identifiers.append(identifier)
UserLDAPSourceConnection.objects.filter(identifier__in=identifiers).update(
validated_by=uuid
)
return batched(
UserLDAPSourceConnection.objects.filter(source=self._source)
.exclude(validated_by=uuid)
.values_list("user", flat=True)
.iterator(chunk_size=DELETE_CHUNK_SIZE),
DELETE_CHUNK_SIZE,
strict=False,
)
def sync(self, user_pks: tuple) -> int:
"""Delete authentik users"""
if not self._source.sync_users or not self._source.delete_not_found_objects:
self.message("User syncing is disabled for this Source")
return -1
self._logger.debug("Deleting users", user_pks=user_pks)
_, deleted_per_type = User.objects.filter(pk__in=user_pks).delete()
return deleted_per_type.get(User._meta.label, 0)

View File

@ -58,16 +58,18 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
return -1 return -1
group_count = 0 group_count = 0
for group in page_data: for group in page_data:
if (attributes := self.get_attributes(group)) is None: if "attributes" not in group:
continue continue
attributes = group.get("attributes", {})
group_dn = flatten(flatten(group.get("entryDN", group.get("dn")))) group_dn = flatten(flatten(group.get("entryDN", group.get("dn"))))
if not (uniq := self.get_identifier(attributes)): if not attributes.get(self._source.object_uniqueness_field):
self.message( self.message(
f"Uniqueness field not found/not set in attributes: '{group_dn}'", f"Uniqueness field not found/not set in attributes: '{group_dn}'",
attributes=attributes.keys(), attributes=attributes.keys(),
dn=group_dn, dn=group_dn,
) )
continue continue
uniq = flatten(attributes[self._source.object_uniqueness_field])
try: try:
defaults = { defaults = {
k: flatten(v) k: flatten(v)

View File

@ -63,19 +63,25 @@ class MembershipLDAPSynchronizer(BaseLDAPSynchronizer):
group_member_dn = group_member.get("dn", {}) group_member_dn = group_member.get("dn", {})
members.append(group_member_dn) members.append(group_member_dn)
else: else:
if (attributes := self.get_attributes(group)) is None: if "attributes" not in group:
continue continue
members = attributes.get(self._source.group_membership_field, []) members = group.get("attributes", {}).get(self._source.group_membership_field, [])
ak_group = self.get_group(group) ak_group = self.get_group(group)
if not ak_group: if not ak_group:
continue continue
membership_mapping_attribute = LDAP_DISTINGUISHED_NAME
if self._source.group_membership_field == "memberUid":
# If memberships are based on the posixGroup's 'memberUid'
# attribute we use the RDN instead of the FDN to lookup members.
membership_mapping_attribute = LDAP_UNIQUENESS
users = User.objects.filter( users = User.objects.filter(
Q(**{f"attributes__{self._source.user_membership_attribute}__in": members}) Q(**{f"attributes__{membership_mapping_attribute}__in": members})
| Q( | Q(
**{ **{
f"attributes__{self._source.user_membership_attribute}__isnull": True, f"attributes__{membership_mapping_attribute}__isnull": True,
"ak_groups__in": [ak_group], "ak_groups__in": [ak_group],
} }
) )

View File

@ -60,16 +60,18 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
return -1 return -1
user_count = 0 user_count = 0
for user in page_data: for user in page_data:
if (attributes := self.get_attributes(user)) is None: if "attributes" not in user:
continue continue
attributes = user.get("attributes", {})
user_dn = flatten(user.get("entryDN", user.get("dn"))) user_dn = flatten(user.get("entryDN", user.get("dn")))
if not (uniq := self.get_identifier(attributes)): if not attributes.get(self._source.object_uniqueness_field):
self.message( self.message(
f"Uniqueness field not found/not set in attributes: '{user_dn}'", f"Uniqueness field not found/not set in attributes: '{user_dn}'",
attributes=attributes.keys(), attributes=attributes.keys(),
dn=user_dn, dn=user_dn,
) )
continue continue
uniq = flatten(attributes[self._source.object_uniqueness_field])
try: try:
defaults = { defaults = {
k: flatten(v) k: flatten(v)

View File

@ -17,8 +17,6 @@ from authentik.lib.utils.reflection import class_to_path, path_to_class
from authentik.root.celery import CELERY_APP from authentik.root.celery import CELERY_APP
from authentik.sources.ldap.models import LDAPSource from authentik.sources.ldap.models import LDAPSource
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer
from authentik.sources.ldap.sync.forward_delete_groups import GroupLDAPForwardDeletion
from authentik.sources.ldap.sync.forward_delete_users import UserLDAPForwardDeletion
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer
from authentik.sources.ldap.sync.users import UserLDAPSynchronizer from authentik.sources.ldap.sync.users import UserLDAPSynchronizer
@ -54,11 +52,11 @@ def ldap_connectivity_check(pk: str | None = None):
@CELERY_APP.task( @CELERY_APP.task(
# We take the configured hours timeout time by 3.5 as we run user and # We take the configured hours timeout time by 2.5 as we run user and
# group in parallel and then membership, then deletions, so 3x is to cover the serial tasks, # group in parallel and then membership, so 2x is to cover the serial tasks,
# and 0.5x on top of that to give some more leeway # and 0.5x on top of that to give some more leeway
soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5,
task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5,
) )
def ldap_sync_single(source_pk: str): def ldap_sync_single(source_pk: str):
"""Sync a single source""" """Sync a single source"""
@ -71,31 +69,18 @@ def ldap_sync_single(source_pk: str):
return return
# Delete all sync tasks from the cache # Delete all sync tasks from the cache
DBSystemTask.objects.filter(name="ldap_sync", uid__startswith=source.slug).delete() DBSystemTask.objects.filter(name="ldap_sync", uid__startswith=source.slug).delete()
task = chain(
# The order of these operations needs to be preserved as each depends on the previous one(s) # User and group sync can happen at once, they have no dependencies on each other
# 1. User and group sync can happen simultaneously group(
# 2. Membership sync needs to run afterwards ldap_sync_paginator(source, UserLDAPSynchronizer)
# 3. Finally, user and group deletions can happen simultaneously + ldap_sync_paginator(source, GroupLDAPSynchronizer),
user_group_sync = ldap_sync_paginator(source, UserLDAPSynchronizer) + ldap_sync_paginator( ),
source, GroupLDAPSynchronizer # Membership sync needs to run afterwards
group(
ldap_sync_paginator(source, MembershipLDAPSynchronizer),
),
) )
membership_sync = ldap_sync_paginator(source, MembershipLDAPSynchronizer) task()
user_group_deletion = ldap_sync_paginator(
source, UserLDAPForwardDeletion
) + ldap_sync_paginator(source, GroupLDAPForwardDeletion)
# Celery is buggy with empty groups, so we are careful only to add non-empty groups.
# See https://github.com/celery/celery/issues/9772
task_groups = []
if user_group_sync:
task_groups.append(group(user_group_sync))
if membership_sync:
task_groups.append(group(membership_sync))
if user_group_deletion:
task_groups.append(group(user_group_deletion))
all_tasks = chain(task_groups)
all_tasks()
def ldap_sync_paginator(source: LDAPSource, sync: type[BaseLDAPSynchronizer]) -> list: def ldap_sync_paginator(source: LDAPSource, sync: type[BaseLDAPSynchronizer]) -> list:

View File

@ -2,33 +2,6 @@
from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server
# The mock modifies these in place, so we have to define them per string
user_in_slapd_dn = "cn=user_in_slapd_cn,ou=users,dc=goauthentik,dc=io"
user_in_slapd_cn = "user_in_slapd_cn"
user_in_slapd_uid = "user_in_slapd_uid"
user_in_slapd_object_class = "person"
user_in_slapd = {
"dn": user_in_slapd_dn,
"attributes": {
"cn": user_in_slapd_cn,
"uid": user_in_slapd_uid,
"objectClass": user_in_slapd_object_class,
},
}
group_in_slapd_dn = "cn=user_in_slapd_cn,ou=groups,dc=goauthentik,dc=io"
group_in_slapd_cn = "group_in_slapd_cn"
group_in_slapd_uid = "group_in_slapd_uid"
group_in_slapd_object_class = "groupOfNames"
group_in_slapd = {
"dn": group_in_slapd_dn,
"attributes": {
"cn": group_in_slapd_cn,
"uid": group_in_slapd_uid,
"objectClass": group_in_slapd_object_class,
"member": [user_in_slapd["dn"]],
},
}
def mock_slapd_connection(password: str) -> Connection: def mock_slapd_connection(password: str) -> Connection:
"""Create mock SLAPD connection""" """Create mock SLAPD connection"""
@ -123,14 +96,5 @@ def mock_slapd_connection(password: str) -> Connection:
"objectClass": "posixAccount", "objectClass": "posixAccount",
}, },
) )
# Known user and group
connection.strategy.add_entry(
user_in_slapd["dn"],
user_in_slapd["attributes"],
)
connection.strategy.add_entry(
group_in_slapd["dn"],
group_in_slapd["attributes"],
)
connection.bind() connection.bind()
return connection return connection

View File

@ -13,26 +13,14 @@ from authentik.events.system_tasks import TaskStatus
from authentik.lib.generators import generate_id, generate_key from authentik.lib.generators import generate_id, generate_key
from authentik.lib.sync.outgoing.exceptions import StopSync from authentik.lib.sync.outgoing.exceptions import StopSync
from authentik.lib.utils.reflection import class_to_path from authentik.lib.utils.reflection import class_to_path
from authentik.sources.ldap.models import ( from authentik.sources.ldap.models import LDAPSource, LDAPSourcePropertyMapping
GroupLDAPSourceConnection,
LDAPSource,
LDAPSourcePropertyMapping,
UserLDAPSourceConnection,
)
from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer
from authentik.sources.ldap.sync.users import UserLDAPSynchronizer from authentik.sources.ldap.sync.users import UserLDAPSynchronizer
from authentik.sources.ldap.tasks import ldap_sync, ldap_sync_all from authentik.sources.ldap.tasks import ldap_sync, ldap_sync_all
from authentik.sources.ldap.tests.mock_ad import mock_ad_connection from authentik.sources.ldap.tests.mock_ad import mock_ad_connection
from authentik.sources.ldap.tests.mock_freeipa import mock_freeipa_connection from authentik.sources.ldap.tests.mock_freeipa import mock_freeipa_connection
from authentik.sources.ldap.tests.mock_slapd import ( from authentik.sources.ldap.tests.mock_slapd import mock_slapd_connection
group_in_slapd_cn,
group_in_slapd_uid,
mock_slapd_connection,
user_in_slapd_cn,
user_in_slapd_uid,
)
LDAP_PASSWORD = generate_key() LDAP_PASSWORD = generate_key()
@ -269,56 +257,12 @@ class LDAPSyncTests(TestCase):
self.source.group_membership_field = "memberUid" self.source.group_membership_field = "memberUid"
self.source.user_object_filter = "(objectClass=posixAccount)" self.source.user_object_filter = "(objectClass=posixAccount)"
self.source.group_object_filter = "(objectClass=posixGroup)" self.source.group_object_filter = "(objectClass=posixGroup)"
self.source.user_membership_attribute = "uid"
self.source.user_property_mappings.set( self.source.user_property_mappings.set(
[
*LDAPSourcePropertyMapping.objects.filter(
Q(managed__startswith="goauthentik.io/sources/ldap/default")
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
).all(),
LDAPSourcePropertyMapping.objects.create(
name="name",
expression='return {"attributes": {"uid": list_flatten(ldap.get("uid"))}}',
),
]
)
self.source.group_property_mappings.set(
LDAPSourcePropertyMapping.objects.filter( LDAPSourcePropertyMapping.objects.filter(
managed="goauthentik.io/sources/ldap/openldap-cn" Q(managed__startswith="goauthentik.io/sources/ldap/default")
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
) )
) )
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
self.source.save()
user_sync = UserLDAPSynchronizer(self.source)
user_sync.sync_full()
group_sync = GroupLDAPSynchronizer(self.source)
group_sync.sync_full()
membership_sync = MembershipLDAPSynchronizer(self.source)
membership_sync.sync_full()
# Test if membership mapping based on memberUid works.
posix_group = Group.objects.filter(name="group-posix").first()
self.assertTrue(posix_group.users.filter(name="user-posix").exists())
def test_sync_groups_openldap_posix_group_nonstandard_membership_attribute(self):
"""Test posix group sync"""
self.source.object_uniqueness_field = "cn"
self.source.group_membership_field = "memberUid"
self.source.user_object_filter = "(objectClass=posixAccount)"
self.source.group_object_filter = "(objectClass=posixGroup)"
self.source.user_membership_attribute = "cn"
self.source.user_property_mappings.set(
[
*LDAPSourcePropertyMapping.objects.filter(
Q(managed__startswith="goauthentik.io/sources/ldap/default")
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
).all(),
LDAPSourcePropertyMapping.objects.create(
name="name",
expression='return {"attributes": {"cn": list_flatten(ldap.get("cn"))}}',
),
]
)
self.source.group_property_mappings.set( self.source.group_property_mappings.set(
LDAPSourcePropertyMapping.objects.filter( LDAPSourcePropertyMapping.objects.filter(
managed="goauthentik.io/sources/ldap/openldap-cn" managed="goauthentik.io/sources/ldap/openldap-cn"
@ -364,160 +308,3 @@ class LDAPSyncTests(TestCase):
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get() ldap_sync_all.delay().get()
def test_user_deletion(self):
"""Test user deletion"""
user = User.objects.create_user(username="not-in-the-source")
UserLDAPSourceConnection.objects.create(
user=user, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertFalse(User.objects.filter(username="not-in-the-source").exists())
def test_user_deletion_still_in_source(self):
"""Test that user is not deleted if it's still in the source"""
username = user_in_slapd_cn
identifier = user_in_slapd_uid
user = User.objects.create_user(username=username)
UserLDAPSourceConnection.objects.create(
user=user, source=self.source, identifier=identifier
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(User.objects.filter(username=username).exists())
def test_user_deletion_no_sync(self):
"""Test that user is not deleted if sync_users is False"""
user = User.objects.create_user(username="not-in-the-source")
UserLDAPSourceConnection.objects.create(
user=user, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.sync_users = False
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(User.objects.filter(username="not-in-the-source").exists())
def test_user_deletion_no_delete(self):
"""Test that user is not deleted if delete_not_found_objects is False"""
user = User.objects.create_user(username="not-in-the-source")
UserLDAPSourceConnection.objects.create(
user=user, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(User.objects.filter(username="not-in-the-source").exists())
def test_group_deletion(self):
"""Test group deletion"""
group = Group.objects.create(name="not-in-the-source")
GroupLDAPSourceConnection.objects.create(
group=group, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertFalse(Group.objects.filter(name="not-in-the-source").exists())
def test_group_deletion_still_in_source(self):
"""Test that group is not deleted if it's still in the source"""
groupname = group_in_slapd_cn
identifier = group_in_slapd_uid
group = Group.objects.create(name=groupname)
GroupLDAPSourceConnection.objects.create(
group=group, source=self.source, identifier=identifier
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(Group.objects.filter(name=groupname).exists())
def test_group_deletion_no_sync(self):
"""Test that group is not deleted if sync_groups is False"""
group = Group.objects.create(name="not-in-the-source")
GroupLDAPSourceConnection.objects.create(
group=group, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.sync_groups = False
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(Group.objects.filter(name="not-in-the-source").exists())
def test_group_deletion_no_delete(self):
"""Test that group is not deleted if delete_not_found_objects is False"""
group = Group.objects.create(name="not-in-the-source")
GroupLDAPSourceConnection.objects.create(
group=group, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(Group.objects.filter(name="not-in-the-source").exists())
def test_batch_deletion(self):
"""Test batch deletion"""
BATCH_SIZE = DELETE_CHUNK_SIZE + 1
for i in range(BATCH_SIZE):
user = User.objects.create_user(username=f"not-in-the-source-{i}")
group = Group.objects.create(name=f"not-in-the-source-{i}")
group.users.add(user)
UserLDAPSourceConnection.objects.create(
user=user, source=self.source, identifier=f"not-in-the-source-{i}-user"
)
GroupLDAPSourceConnection.objects.create(
group=group, source=self.source, identifier=f"not-in-the-source-{i}-group"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertFalse(User.objects.filter(username__startswith="not-in-the-source").exists())
self.assertFalse(Group.objects.filter(name__startswith="not-in-the-source").exists())

View File

@ -9,7 +9,6 @@ from django.http.response import HttpResponseBadRequest
from django.shortcuts import get_object_or_404, redirect from django.shortcuts import get_object_or_404, redirect
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.utils.http import urlencode from django.utils.http import urlencode
from django.utils.translation import gettext as _
from django.views import View from django.views import View
from django.views.decorators.csrf import csrf_exempt from django.views.decorators.csrf import csrf_exempt
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
@ -129,9 +128,7 @@ class InitiateView(View):
# otherwise we default to POST_AUTO, with direct redirect # otherwise we default to POST_AUTO, with direct redirect
if source.binding_type == SAMLBindingTypes.POST: if source.binding_type == SAMLBindingTypes.POST:
injected_stages.append(in_memory_stage(ConsentStageView)) injected_stages.append(in_memory_stage(ConsentStageView))
plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = _( plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = f"Continue to {source.name}"
"Continue to {source_name}".format(source_name=source.name)
)
injected_stages.append(in_memory_stage(AutosubmitStageView)) injected_stages.append(in_memory_stage(AutosubmitStageView))
return self.handle_login_flow( return self.handle_login_flow(
source, source,

View File

@ -97,8 +97,7 @@ class GroupsView(SCIMObjectView):
self.logger.warning("Invalid group member", exc=exc) self.logger.warning("Invalid group member", exc=exc)
continue continue
query |= Q(uuid=member.value) query |= Q(uuid=member.value)
if query: group.users.set(User.objects.filter(query))
group.users.set(User.objects.filter(query))
if not connection: if not connection:
connection, _ = SCIMSourceGroup.objects.get_or_create( connection, _ = SCIMSourceGroup.objects.get_or_create(
source=self.source, source=self.source,

View File

@ -151,7 +151,9 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase):
webauthn_user_verification=UserVerification.PREFERRED, webauthn_user_verification=UserVerification.PREFERRED,
) )
stage.webauthn_allowed_device_types.set( stage.webauthn_allowed_device_types.set(
WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") WebAuthnDeviceType.objects.filter(
description="Android Authenticator with SafetyNet Attestation"
)
) )
session = self.client.session session = self.client.session
plan = FlowPlan(flow_pk=flow.pk.hex) plan = FlowPlan(flow_pk=flow.pk.hex)
@ -337,7 +339,9 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase):
device_classes=[DeviceClasses.WEBAUTHN], device_classes=[DeviceClasses.WEBAUTHN],
) )
stage.webauthn_allowed_device_types.set( stage.webauthn_allowed_device_types.set(
WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") WebAuthnDeviceType.objects.filter(
description="Android Authenticator with SafetyNet Attestation"
)
) )
session = self.client.session session = self.client.session
plan = FlowPlan(flow_pk=flow.pk.hex) plan = FlowPlan(flow_pk=flow.pk.hex)

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -141,7 +141,9 @@ class TestAuthenticatorWebAuthnStage(FlowTestCase):
"""Test registration with restricted devices (fail)""" """Test registration with restricted devices (fail)"""
webauthn_mds_import.delay(force=True).get() webauthn_mds_import.delay(force=True).get()
self.stage.device_type_restrictions.set( self.stage.device_type_restrictions.set(
WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") WebAuthnDeviceType.objects.filter(
description="Android Authenticator with SafetyNet Attestation"
)
) )
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]) plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])

View File

@ -4,8 +4,6 @@ from uuid import uuid4
from django.http import HttpRequest, HttpResponse from django.http import HttpRequest, HttpResponse
from django.utils.timezone import now from django.utils.timezone import now
from django.utils.translation import gettext as _
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField from rest_framework.fields import CharField
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
@ -49,11 +47,6 @@ class ConsentChallengeResponse(ChallengeResponse):
component = CharField(default="ak-stage-consent") component = CharField(default="ak-stage-consent")
token = CharField(required=True) token = CharField(required=True)
def validate_token(self, token: str):
if token != self.stage.executor.request.session[SESSION_KEY_CONSENT_TOKEN]:
raise ValidationError(_("Invalid consent token, re-showing prompt"))
return token
class ConsentStageView(ChallengeStageView): class ConsentStageView(ChallengeStageView):
"""Simple consent checker.""" """Simple consent checker."""
@ -127,6 +120,9 @@ class ConsentStageView(ChallengeStageView):
return super().get(request, *args, **kwargs) return super().get(request, *args, **kwargs)
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse: def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
if response.data["token"] != self.request.session[SESSION_KEY_CONSENT_TOKEN]:
self.logger.info("Invalid consent token, re-showing prompt")
return self.get(self.request)
if self.should_always_prompt(): if self.should_always_prompt():
return self.executor.stage_ok() return self.executor.stage_ok()
current_stage: ConsentStage = self.executor.current_stage current_stage: ConsentStage = self.executor.current_stage

View File

@ -17,7 +17,6 @@ from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
from authentik.stages.consent.models import ConsentMode, ConsentStage, UserConsent from authentik.stages.consent.models import ConsentMode, ConsentStage, UserConsent
from authentik.stages.consent.stage import ( from authentik.stages.consent.stage import (
PLAN_CONTEXT_CONSENT_HEADER,
PLAN_CONTEXT_CONSENT_PERMISSIONS, PLAN_CONTEXT_CONSENT_PERMISSIONS,
SESSION_KEY_CONSENT_TOKEN, SESSION_KEY_CONSENT_TOKEN,
) )
@ -34,40 +33,6 @@ class TestConsentStage(FlowTestCase):
slug=generate_id(), slug=generate_id(),
) )
def test_mismatched_token(self):
"""Test incorrect token"""
flow = create_test_flow(FlowDesignation.AUTHENTICATION)
stage = ConsentStage.objects.create(name=generate_id(), mode=ConsentMode.ALWAYS_REQUIRE)
binding = FlowStageBinding.objects.create(target=flow, stage=stage, order=2)
plan = FlowPlan(flow_pk=flow.pk.hex, bindings=[binding], markers=[StageMarker()])
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
self.assertEqual(response.status_code, 200)
session = self.client.session
response = self.client.post(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
{
"token": generate_id(),
},
)
self.assertEqual(response.status_code, 200)
self.assertStageResponse(
response,
flow,
component="ak-stage-consent",
response_errors={
"token": [{"string": "Invalid consent token, re-showing prompt", "code": "invalid"}]
},
)
self.assertFalse(UserConsent.objects.filter(user=self.user).exists())
def test_always_required(self): def test_always_required(self):
"""Test always required consent""" """Test always required consent"""
flow = create_test_flow(FlowDesignation.AUTHENTICATION) flow = create_test_flow(FlowDesignation.AUTHENTICATION)
@ -193,7 +158,6 @@ class TestConsentStage(FlowTestCase):
context={ context={
PLAN_CONTEXT_APPLICATION: self.application, PLAN_CONTEXT_APPLICATION: self.application,
PLAN_CONTEXT_CONSENT_PERMISSIONS: [PermissionDict(id="foo", name="foo-desc")], PLAN_CONTEXT_CONSENT_PERMISSIONS: [PermissionDict(id="foo", name="foo-desc")],
PLAN_CONTEXT_CONSENT_HEADER: "test header",
}, },
) )
session = self.client.session session = self.client.session

View File

@ -1,38 +0,0 @@
from base64 import b64encode
from copy import deepcopy
from pickle import dumps # nosec
from django.utils.translation import gettext as _
from authentik.flows.models import FlowToken, in_memory_stage
from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, FlowPlan
from authentik.stages.consent.stage import PLAN_CONTEXT_CONSENT_HEADER, ConsentStageView
def pickle_flow_token_for_email(plan: FlowPlan):
"""Insert a consent stage into the flow plan and pickle it for a FlowToken,
to be sent via Email. This is to prevent automated email scanners, which sometimes
open links in emails in a full browser from breaking the link."""
plan_copy = deepcopy(plan)
plan_copy.insert_stage(in_memory_stage(EmailTokenRevocationConsentStageView), index=0)
plan_copy.context[PLAN_CONTEXT_CONSENT_HEADER] = _("Continue to confirm this email address.")
data = dumps(plan_copy)
return b64encode(data).decode()
class EmailTokenRevocationConsentStageView(ConsentStageView):
def get(self, request, *args, **kwargs):
token: FlowToken = self.executor.plan.context[PLAN_CONTEXT_IS_RESTORED]
try:
token.refresh_from_db()
except FlowToken.DoesNotExist:
return self.executor.stage_invalid(
_("Link was already used, please request a new link.")
)
return super().get(request, *args, **kwargs)
def challenge_valid(self, response):
token: FlowToken = self.executor.plan.context[PLAN_CONTEXT_IS_RESTORED]
token.delete()
return super().challenge_valid(response)

View File

@ -23,7 +23,6 @@ from authentik.flows.stage import ChallengeStageView
from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY
from authentik.lib.utils.errors import exception_to_string from authentik.lib.utils.errors import exception_to_string
from authentik.lib.utils.time import timedelta_from_string from authentik.lib.utils.time import timedelta_from_string
from authentik.stages.email.flow import pickle_flow_token_for_email
from authentik.stages.email.models import EmailStage from authentik.stages.email.models import EmailStage
from authentik.stages.email.tasks import send_mails from authentik.stages.email.tasks import send_mails
from authentik.stages.email.utils import TemplateEmailMessage from authentik.stages.email.utils import TemplateEmailMessage
@ -87,8 +86,7 @@ class EmailStageView(ChallengeStageView):
user=pending_user, user=pending_user,
identifier=identifier, identifier=identifier,
flow=self.executor.flow, flow=self.executor.flow,
_plan=pickle_flow_token_for_email(self.executor.plan), _plan=FlowToken.pickle(self.executor.plan),
revoke_on_execution=False,
) )
token = tokens.first() token = tokens.first()
# Check if token is expired and rotate key if so # Check if token is expired and rotate key if so

View File

@ -100,11 +100,9 @@ def send_mail(
# Because we use the Message-ID as UID for the task, manually assign it # Because we use the Message-ID as UID for the task, manually assign it
message_object.extra_headers["Message-ID"] = message_id message_object.extra_headers["Message-ID"] = message_id
# Add the logo if it is used in the email body (we can't add it in the # Add the logo (we can't add it in the previous message since MIMEImage
# previous message since MIMEImage can't be converted to json) # can't be converted to json)
body = get_email_body(message_object) message_object.attach(logo_data())
if "cid:logo" in body:
message_object.attach(logo_data())
if ( if (
message_object.to message_object.to

View File

@ -96,7 +96,7 @@
<table width="100%" style="background-color: #FFFFFF; border-spacing: 0; margin-top: 15px;"> <table width="100%" style="background-color: #FFFFFF; border-spacing: 0; margin-top: 15px;">
<tr height="80"> <tr height="80">
<td align="center" style="padding: 20px 0;"> <td align="center" style="padding: 20px 0;">
<img src="{% block logo_url %}cid:logo{% endblock %}" border="0=" alt="authentik logo" class="flexibleImage logo"> <img src="{% block logo_url %}cid:logo.png{% endblock %}" border="0=" alt="authentik logo" class="flexibleImage logo">
</td> </td>
</tr> </tr>
{% block content %} {% block content %}

View File

@ -174,5 +174,5 @@ class TestEmailStageSending(FlowTestCase):
response = self.client.post(url) response = self.client.post(url)
response = self.client.post(url) response = self.client.post(url)
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertGreaterEqual(len(mail.outbox), 1) self.assertTrue(len(mail.outbox) >= 1)
self.assertEqual(mail.outbox[0].subject, "authentik") self.assertEqual(mail.outbox[0].subject, "authentik")

View File

@ -17,7 +17,6 @@ from authentik.flows.tests import FlowTestCase
from authentik.flows.views.executor import QS_KEY_TOKEN, SESSION_KEY_PLAN, FlowExecutorView from authentik.flows.views.executor import QS_KEY_TOKEN, SESSION_KEY_PLAN, FlowExecutorView
from authentik.lib.config import CONFIG from authentik.lib.config import CONFIG
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
from authentik.stages.consent.stage import SESSION_KEY_CONSENT_TOKEN
from authentik.stages.email.models import EmailStage from authentik.stages.email.models import EmailStage
from authentik.stages.email.stage import PLAN_CONTEXT_EMAIL_OVERRIDE, EmailStageView from authentik.stages.email.stage import PLAN_CONTEXT_EMAIL_OVERRIDE, EmailStageView
@ -161,17 +160,6 @@ class TestEmailStage(FlowTestCase):
kwargs={"flow_slug": self.flow.slug}, kwargs={"flow_slug": self.flow.slug},
) )
) )
self.assertStageResponse(response, self.flow, component="ak-stage-consent")
response = self.client.post(
reverse(
"authentik_api:flow-executor",
kwargs={"flow_slug": self.flow.slug},
),
data={
"token": self.client.session[SESSION_KEY_CONSENT_TOKEN],
},
follow=True,
)
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
@ -194,7 +182,6 @@ class TestEmailStage(FlowTestCase):
# Set flow token user to a different user # Set flow token user to a different user
token: FlowToken = FlowToken.objects.get(user=self.user) token: FlowToken = FlowToken.objects.get(user=self.user)
token.user = create_test_admin_user() token.user = create_test_admin_user()
token.revoke_on_execution = True
token.save() token.save()
with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()): with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()):

View File

@ -19,8 +19,7 @@ def logo_data() -> MIMEImage:
path = Path("web/dist/assets/icons/icon_left_brand.png") path = Path("web/dist/assets/icons/icon_left_brand.png")
with open(path, "rb") as _logo_file: with open(path, "rb") as _logo_file:
logo = MIMEImage(_logo_file.read()) logo = MIMEImage(_logo_file.read())
logo.add_header("Content-ID", "<logo>") logo.add_header("Content-ID", "logo.png")
logo.add_header("Content-Disposition", "inline", filename="logo.png")
return logo return logo

View File

@ -101,9 +101,9 @@ class BoundSessionMiddleware(SessionMiddleware):
SESSION_KEY_BINDING_GEO, GeoIPBinding.NO_BINDING SESSION_KEY_BINDING_GEO, GeoIPBinding.NO_BINDING
) )
if configured_binding_net != NetworkBinding.NO_BINDING: if configured_binding_net != NetworkBinding.NO_BINDING:
BoundSessionMiddleware.recheck_session_net(configured_binding_net, last_ip, new_ip) self.recheck_session_net(configured_binding_net, last_ip, new_ip)
if configured_binding_geo != GeoIPBinding.NO_BINDING: if configured_binding_geo != GeoIPBinding.NO_BINDING:
BoundSessionMiddleware.recheck_session_geo(configured_binding_geo, last_ip, new_ip) self.recheck_session_geo(configured_binding_geo, last_ip, new_ip)
# If we got to this point without any error being raised, we need to # If we got to this point without any error being raised, we need to
# update the last saved IP to the current one # update the last saved IP to the current one
if SESSION_KEY_BINDING_NET in request.session or SESSION_KEY_BINDING_GEO in request.session: if SESSION_KEY_BINDING_NET in request.session or SESSION_KEY_BINDING_GEO in request.session:
@ -111,8 +111,7 @@ class BoundSessionMiddleware(SessionMiddleware):
# (== basically requires the user to be logged in) # (== basically requires the user to be logged in)
request.session[request.session.model.Keys.LAST_IP] = new_ip request.session[request.session.model.Keys.LAST_IP] = new_ip
@staticmethod def recheck_session_net(self, binding: NetworkBinding, last_ip: str, new_ip: str):
def recheck_session_net(binding: NetworkBinding, last_ip: str, new_ip: str):
"""Check network/ASN binding""" """Check network/ASN binding"""
last_asn = ASN_CONTEXT_PROCESSOR.asn(last_ip) last_asn = ASN_CONTEXT_PROCESSOR.asn(last_ip)
new_asn = ASN_CONTEXT_PROCESSOR.asn(new_ip) new_asn = ASN_CONTEXT_PROCESSOR.asn(new_ip)
@ -159,8 +158,7 @@ class BoundSessionMiddleware(SessionMiddleware):
new_ip, new_ip,
) )
@staticmethod def recheck_session_geo(self, binding: GeoIPBinding, last_ip: str, new_ip: str):
def recheck_session_geo(binding: GeoIPBinding, last_ip: str, new_ip: str):
"""Check GeoIP binding""" """Check GeoIP binding"""
last_geo = GEOIP_CONTEXT_PROCESSOR.city(last_ip) last_geo = GEOIP_CONTEXT_PROCESSOR.city(last_ip)
new_geo = GEOIP_CONTEXT_PROCESSOR.city(new_ip) new_geo = GEOIP_CONTEXT_PROCESSOR.city(new_ip)
@ -181,8 +179,8 @@ class BoundSessionMiddleware(SessionMiddleware):
if last_geo.continent != new_geo.continent: if last_geo.continent != new_geo.continent:
raise SessionBindingBroken( raise SessionBindingBroken(
"geoip.continent", "geoip.continent",
last_geo.continent.to_dict(), last_geo.continent,
new_geo.continent.to_dict(), new_geo.continent,
last_ip, last_ip,
new_ip, new_ip,
) )
@ -194,8 +192,8 @@ class BoundSessionMiddleware(SessionMiddleware):
if last_geo.country != new_geo.country: if last_geo.country != new_geo.country:
raise SessionBindingBroken( raise SessionBindingBroken(
"geoip.country", "geoip.country",
last_geo.country.to_dict(), last_geo.country,
new_geo.country.to_dict(), new_geo.country,
last_ip, last_ip,
new_ip, new_ip,
) )
@ -204,8 +202,8 @@ class BoundSessionMiddleware(SessionMiddleware):
if last_geo.city != new_geo.city: if last_geo.city != new_geo.city:
raise SessionBindingBroken( raise SessionBindingBroken(
"geoip.city", "geoip.city",
last_geo.city.to_dict(), last_geo.city,
new_geo.city.to_dict(), new_geo.city,
last_ip, last_ip,
new_ip, new_ip,
) )

View File

@ -11,7 +11,7 @@ from rest_framework.fields import BooleanField, CharField
from authentik.core.models import Session, User from authentik.core.models import Session, User
from authentik.events.middleware import audit_ignore from authentik.events.middleware import audit_ignore
from authentik.flows.challenge import ChallengeResponse, WithUserInfoChallenge from authentik.flows.challenge import ChallengeResponse, WithUserInfoChallenge
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, PLAN_CONTEXT_SOURCE
from authentik.flows.stage import ChallengeStageView from authentik.flows.stage import ChallengeStageView
from authentik.lib.utils.time import timedelta_from_string from authentik.lib.utils.time import timedelta_from_string
from authentik.root.middleware import ClientIPMiddleware from authentik.root.middleware import ClientIPMiddleware
@ -108,6 +108,10 @@ class UserLoginStageView(ChallengeStageView):
flow_slug=self.executor.flow.slug, flow_slug=self.executor.flow.slug,
session_duration=delta, session_duration=delta,
) )
# Only show success message if we don't have a source in the flow
# as sources show their own success messages
if not self.executor.plan.context.get(PLAN_CONTEXT_SOURCE, None):
messages.success(self.request, _("Successfully logged in!"))
if self.executor.current_stage.terminate_other_sessions: if self.executor.current_stage.terminate_other_sessions:
Session.objects.filter( Session.objects.filter(
authenticatedsession__user=user, authenticatedsession__user=user,

View File

@ -3,7 +3,6 @@
from time import sleep from time import sleep
from unittest.mock import patch from unittest.mock import patch
from django.http import HttpRequest
from django.urls import reverse from django.urls import reverse
from django.utils.timezone import now from django.utils.timezone import now
@ -18,12 +17,7 @@ from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
from authentik.lib.utils.time import timedelta_from_string from authentik.lib.utils.time import timedelta_from_string
from authentik.root.middleware import ClientIPMiddleware from authentik.root.middleware import ClientIPMiddleware
from authentik.stages.user_login.middleware import ( from authentik.stages.user_login.models import UserLoginStage
BoundSessionMiddleware,
SessionBindingBroken,
logout_extra,
)
from authentik.stages.user_login.models import GeoIPBinding, NetworkBinding, UserLoginStage
class TestUserLoginStage(FlowTestCase): class TestUserLoginStage(FlowTestCase):
@ -198,52 +192,3 @@ class TestUserLoginStage(FlowTestCase):
self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
response = self.client.get(reverse("authentik_api:application-list")) response = self.client.get(reverse("authentik_api:application-list"))
self.assertEqual(response.status_code, 403) self.assertEqual(response.status_code, 403)
def test_binding_net_break_log(self):
"""Test logout_extra with exception"""
# IPs from https://github.com/maxmind/MaxMind-DB/blob/main/source-data/GeoLite2-ASN-Test.json
for args, expect in [
[[NetworkBinding.BIND_ASN, "8.8.8.8", "8.8.8.8"], ["network.missing"]],
[[NetworkBinding.BIND_ASN, "1.0.0.1", "1.128.0.1"], ["network.asn"]],
[
[NetworkBinding.BIND_ASN_NETWORK, "12.81.96.1", "12.81.128.1"],
["network.asn_network"],
],
[[NetworkBinding.BIND_ASN_NETWORK_IP, "1.0.0.1", "1.0.0.2"], ["network.ip"]],
]:
with self.subTest(args[0]):
with self.assertRaises(SessionBindingBroken) as cm:
BoundSessionMiddleware.recheck_session_net(*args)
self.assertEqual(cm.exception.reason, expect[0])
# Ensure the request can be logged without throwing errors
self.client.force_login(self.user)
request = HttpRequest()
request.session = self.client.session
request.user = self.user
logout_extra(request, cm.exception)
def test_binding_geo_break_log(self):
"""Test logout_extra with exception"""
# IPs from https://github.com/maxmind/MaxMind-DB/blob/main/source-data/GeoLite2-City-Test.json
for args, expect in [
[[GeoIPBinding.BIND_CONTINENT, "8.8.8.8", "8.8.8.8"], ["geoip.missing"]],
[[GeoIPBinding.BIND_CONTINENT, "2.125.160.216", "67.43.156.1"], ["geoip.continent"]],
[
[GeoIPBinding.BIND_CONTINENT_COUNTRY, "81.2.69.142", "89.160.20.112"],
["geoip.country"],
],
[
[GeoIPBinding.BIND_CONTINENT_COUNTRY_CITY, "2.125.160.216", "81.2.69.142"],
["geoip.city"],
],
]:
with self.subTest(args[0]):
with self.assertRaises(SessionBindingBroken) as cm:
BoundSessionMiddleware.recheck_session_geo(*args)
self.assertEqual(cm.exception.reason, expect[0])
# Ensure the request can be logged without throwing errors
self.client.force_login(self.user)
request = HttpRequest()
request.session = self.client.session
request.user = self.user
logout_extra(request, cm.exception)

View File

@ -2,7 +2,7 @@
"$schema": "http://json-schema.org/draft-07/schema", "$schema": "http://json-schema.org/draft-07/schema",
"$id": "https://goauthentik.io/blueprints/schema.json", "$id": "https://goauthentik.io/blueprints/schema.json",
"type": "object", "type": "object",
"title": "authentik 2025.6.3 Blueprint schema", "title": "authentik 2025.4.1 Blueprint schema",
"required": [ "required": [
"version", "version",
"entries" "entries"
@ -8147,12 +8147,6 @@
"title": "Group membership field", "title": "Group membership field",
"description": "Field which contains members of a group." "description": "Field which contains members of a group."
}, },
"user_membership_attribute": {
"type": "string",
"minLength": 1,
"title": "User membership attribute",
"description": "Attribute which matches the value of `group_membership_field`."
},
"object_uniqueness_field": { "object_uniqueness_field": {
"type": "string", "type": "string",
"minLength": 1, "minLength": 1,
@ -8186,11 +8180,6 @@
"type": "boolean", "type": "boolean",
"title": "Lookup groups from user", "title": "Lookup groups from user",
"description": "Lookup group membership based on a user attribute instead of a group attribute. This allows nested group resolution on systems like FreeIPA and Active Directory" "description": "Lookup group membership based on a user attribute instead of a group attribute. This allows nested group resolution on systems like FreeIPA and Active Directory"
},
"delete_not_found_objects": {
"type": "boolean",
"title": "Delete not found objects",
"description": "Delete authentik users and groups which were previously supplied by this source, but are now missing from it."
} }
}, },
"required": [] "required": []

View File

@ -31,7 +31,7 @@ services:
volumes: volumes:
- redis:/data - redis:/data
server: server:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.3} image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.4.1}
restart: unless-stopped restart: unless-stopped
command: server command: server
environment: environment:
@ -55,7 +55,7 @@ services:
redis: redis:
condition: service_healthy condition: service_healthy
worker: worker:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.3} image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.4.1}
restart: unless-stopped restart: unless-stopped
command: worker command: worker
environment: environment:

2
go.mod
View File

@ -27,7 +27,7 @@ require (
github.com/spf13/cobra v1.9.1 github.com/spf13/cobra v1.9.1
github.com/stretchr/testify v1.10.0 github.com/stretchr/testify v1.10.0
github.com/wwt/guac v1.3.2 github.com/wwt/guac v1.3.2
goauthentik.io/api/v3 v3.2025041.4 goauthentik.io/api/v3 v3.2025041.2
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
golang.org/x/oauth2 v0.30.0 golang.org/x/oauth2 v0.30.0
golang.org/x/sync v0.14.0 golang.org/x/sync v0.14.0

4
go.sum
View File

@ -290,8 +290,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
goauthentik.io/api/v3 v3.2025041.4 h1:cGqzWYnUHrWDoaXWDpIL/kWnX9sFrIhkYDye0P0OEAo= goauthentik.io/api/v3 v3.2025041.2 h1:vFYYnhcDcxL95RczZwhzt3i4LptFXMvIRN+vgf8sQYg=
goauthentik.io/api/v3 v3.2025041.4/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw= goauthentik.io/api/v3 v3.2025041.2/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=

View File

@ -33,4 +33,4 @@ func UserAgent() string {
return fmt.Sprintf("authentik@%s", FullVersion()) return fmt.Sprintf("authentik@%s", FullVersion())
} }
const VERSION = "2025.6.3" const VERSION = "2025.4.1"

View File

@ -28,18 +28,16 @@ func NewSessionBinder(si server.LDAPServerInstance, oldBinder bind.Binder) *Sess
si: si, si: si,
log: log.WithField("logger", "authentik.outpost.ldap.binder.session"), log: log.WithField("logger", "authentik.outpost.ldap.binder.session"),
} }
if oldBinder != nil { if oldSb, ok := oldBinder.(*SessionBinder); ok {
if oldSb, ok := oldBinder.(*SessionBinder); ok { sb.DirectBinder = oldSb.DirectBinder
sb.DirectBinder = oldSb.DirectBinder sb.sessions = oldSb.sessions
sb.sessions = oldSb.sessions sb.log.Debug("re-initialised session binder")
sb.log.Debug("re-initialised session binder") } else {
return sb sb.sessions = ttlcache.New(ttlcache.WithDisableTouchOnHit[Credentials, ldap.LDAPResultCode]())
} sb.DirectBinder = *direct.NewDirectBinder(si)
go sb.sessions.Start()
sb.log.Debug("initialised session binder")
} }
sb.sessions = ttlcache.New(ttlcache.WithDisableTouchOnHit[Credentials, ldap.LDAPResultCode]())
sb.DirectBinder = *direct.NewDirectBinder(si)
go sb.sessions.Start()
sb.log.Debug("initialised session binder")
return sb return sb
} }

View File

@ -16,7 +16,6 @@ import (
memorybind "goauthentik.io/internal/outpost/ldap/bind/memory" memorybind "goauthentik.io/internal/outpost/ldap/bind/memory"
"goauthentik.io/internal/outpost/ldap/constants" "goauthentik.io/internal/outpost/ldap/constants"
"goauthentik.io/internal/outpost/ldap/flags" "goauthentik.io/internal/outpost/ldap/flags"
"goauthentik.io/internal/outpost/ldap/search"
directsearch "goauthentik.io/internal/outpost/ldap/search/direct" directsearch "goauthentik.io/internal/outpost/ldap/search/direct"
memorysearch "goauthentik.io/internal/outpost/ldap/search/memory" memorysearch "goauthentik.io/internal/outpost/ldap/search/memory"
) )
@ -86,11 +85,7 @@ func (ls *LDAPServer) Refresh() error {
providers[idx].certUUID = *kp providers[idx].certUUID = *kp
} }
if *provider.SearchMode.Ptr() == api.LDAPAPIACCESSMODE_CACHED { if *provider.SearchMode.Ptr() == api.LDAPAPIACCESSMODE_CACHED {
var oldSearcher search.Searcher providers[idx].searcher = memorysearch.NewMemorySearcher(providers[idx])
if existing != nil {
oldSearcher = existing.searcher
}
providers[idx].searcher = memorysearch.NewMemorySearcher(providers[idx], oldSearcher)
} else if *provider.SearchMode.Ptr() == api.LDAPAPIACCESSMODE_DIRECT { } else if *provider.SearchMode.Ptr() == api.LDAPAPIACCESSMODE_DIRECT {
providers[idx].searcher = directsearch.NewDirectSearcher(providers[idx]) providers[idx].searcher = directsearch.NewDirectSearcher(providers[idx])
} }

View File

@ -31,26 +31,13 @@ type MemorySearcher struct {
groups []api.Group groups []api.Group
} }
func NewMemorySearcher(si server.LDAPServerInstance, existing search.Searcher) *MemorySearcher { func NewMemorySearcher(si server.LDAPServerInstance) *MemorySearcher {
ms := &MemorySearcher{ ms := &MemorySearcher{
si: si, si: si,
log: log.WithField("logger", "authentik.outpost.ldap.searcher.memory"), log: log.WithField("logger", "authentik.outpost.ldap.searcher.memory"),
ds: direct.NewDirectSearcher(si), ds: direct.NewDirectSearcher(si),
} }
if existing != nil {
if ems, ok := existing.(*MemorySearcher); ok {
ems.si = si
ems.fetch()
ems.log.Debug("re-initialised memory searcher")
return ems
}
}
ms.fetch()
ms.log.Debug("initialised memory searcher") ms.log.Debug("initialised memory searcher")
return ms
}
func (ms *MemorySearcher) fetch() {
// Error is not handled here, we get an empty/truncated list and the error is logged // Error is not handled here, we get an empty/truncated list and the error is logged
users, _ := ak.Paginator(ms.si.GetAPIClient().CoreApi.CoreUsersList(context.TODO()).IncludeGroups(true), ak.PaginatorOptions{ users, _ := ak.Paginator(ms.si.GetAPIClient().CoreApi.CoreUsersList(context.TODO()).IncludeGroups(true), ak.PaginatorOptions{
PageSize: 100, PageSize: 100,
@ -62,6 +49,7 @@ func (ms *MemorySearcher) fetch() {
Logger: ms.log, Logger: ms.log,
}) })
ms.groups = groups ms.groups = groups
return ms
} }
func (ms *MemorySearcher) SearchBase(req *search.Request) (ldap.ServerSearchResult, error) { func (ms *MemorySearcher) SearchBase(req *search.Request) (ldap.ServerSearchResult, error) {

View File

@ -5,7 +5,6 @@ import (
"crypto/sha256" "crypto/sha256"
"crypto/tls" "crypto/tls"
"encoding/gob" "encoding/gob"
"encoding/hex"
"fmt" "fmt"
"html/template" "html/template"
"net/http" "net/http"
@ -119,8 +118,8 @@ func NewApplication(p api.ProxyOutpostConfig, c *http.Client, server Server, old
mux := mux.NewRouter() mux := mux.NewRouter()
// Save cookie name, based on hashed client ID // Save cookie name, based on hashed client ID
hs := sha256.Sum256([]byte(*p.ClientId)) h := sha256.New()
bs := hex.EncodeToString(hs[:]) bs := string(h.Sum([]byte(*p.ClientId)))
sessionName := fmt.Sprintf("authentik_proxy_%s", bs[:8]) sessionName := fmt.Sprintf("authentik_proxy_%s", bs[:8])
// When HOST_BROWSER is set, use that as Host header for token requests to make the issuer match // When HOST_BROWSER is set, use that as Host header for token requests to make the issuer match

View File

@ -3,7 +3,6 @@ package application
type ProxyClaims struct { type ProxyClaims struct {
UserAttributes map[string]interface{} `json:"user_attributes"` UserAttributes map[string]interface{} `json:"user_attributes"`
BackendOverride string `json:"backend_override"` BackendOverride string `json:"backend_override"`
HostHeader string `json:"host_header"`
IsSuperuser bool `json:"is_superuser"` IsSuperuser bool `json:"is_superuser"`
} }

View File

@ -74,18 +74,13 @@ func (a *Application) proxyModifyRequest(ou *url.URL) func(req *http.Request) {
r.URL.Scheme = ou.Scheme r.URL.Scheme = ou.Scheme
r.URL.Host = ou.Host r.URL.Host = ou.Host
claims := a.getClaimsFromSession(r) claims := a.getClaimsFromSession(r)
if claims != nil && claims.Proxy != nil { if claims != nil && claims.Proxy != nil && claims.Proxy.BackendOverride != "" {
if claims.Proxy.BackendOverride != "" { u, err := url.Parse(claims.Proxy.BackendOverride)
u, err := url.Parse(claims.Proxy.BackendOverride) if err != nil {
if err != nil { a.log.WithField("backend_override", claims.Proxy.BackendOverride).WithError(err).Warning("failed parse user backend override")
a.log.WithField("backend_override", claims.Proxy.BackendOverride).WithError(err).Warning("failed parse user backend override") } else {
} else { r.URL.Scheme = u.Scheme
r.URL.Scheme = u.Scheme r.URL.Host = u.Host
r.URL.Host = u.Host
}
}
if claims.Proxy.HostHeader != "" {
r.Host = claims.Proxy.HostHeader
} }
} }
a.log.WithField("upstream_url", r.URL.String()).Trace("final upstream url") a.log.WithField("upstream_url", r.URL.String()).Trace("final upstream url")

View File

@ -2,7 +2,6 @@ package radius
import ( import (
"crypto/sha512" "crypto/sha512"
"encoding/hex"
"time" "time"
"github.com/getsentry/sentry-go" "github.com/getsentry/sentry-go"
@ -69,9 +68,7 @@ func (rs *RadiusServer) ServeRADIUS(w radius.ResponseWriter, r *radius.Request)
} }
} }
if pi == nil { if pi == nil {
hs := sha512.Sum512([]byte(r.Secret)) nr.Log().WithField("hashed_secret", string(sha512.New().Sum(r.Secret))).Warning("No provider found")
bs := hex.EncodeToString(hs[:])
nr.Log().WithField("hashed_secret", bs).Warning("No provider found")
_ = w.Write(r.Response(radius.CodeAccessReject)) _ = w.Write(r.Response(radius.CodeAccessReject))
return return
} }

View File

@ -67,15 +67,11 @@ func (ws *WebServer) configureStatic() {
// Media files, if backend is file // Media files, if backend is file
if config.Get().Storage.Media.Backend == "file" { if config.Get().Storage.Media.Backend == "file" {
fsMedia := http.FileServer(http.Dir(config.Get().Storage.Media.File.Path)) fsMedia := http.StripPrefix("/media", http.FileServer(http.Dir(config.Get().Storage.Media.File.Path)))
indexLessRouter.PathPrefix(config.Get().Web.Path).PathPrefix("/media/").Handler(pathStripper( indexLessRouter.PathPrefix(config.Get().Web.Path).PathPrefix("/media/").HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'; sandbox")
w.Header().Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'; sandbox") fsMedia.ServeHTTP(w, r)
fsMedia.ServeHTTP(w, r) })
}),
"media/",
config.Get().Web.Path,
))
} }
staticRouter.PathPrefix(config.Get().Web.Path).PathPrefix("/if/help/").Handler(pathStripper( staticRouter.PathPrefix(config.Get().Web.Path).PathPrefix("/if/help/").Handler(pathStripper(

View File

@ -83,8 +83,7 @@ if [[ "$1" == "server" ]]; then
run_authentik run_authentik
elif [[ "$1" == "worker" ]]; then elif [[ "$1" == "worker" ]]; then
set_mode "worker" set_mode "worker"
shift check_if_root "python -m manage worker"
check_if_root "python -m manage worker $@"
elif [[ "$1" == "worker-status" ]]; then elif [[ "$1" == "worker-status" ]]; then
wait_for_db wait_for_db
celery -A authentik.root.celery flower \ celery -A authentik.root.celery flower \

View File

@ -9,7 +9,7 @@
"version": "0.0.0", "version": "0.0.0",
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"aws-cdk": "^2.1016.1", "aws-cdk": "^2.1016.0",
"cross-env": "^7.0.3" "cross-env": "^7.0.3"
}, },
"engines": { "engines": {
@ -17,9 +17,9 @@
} }
}, },
"node_modules/aws-cdk": { "node_modules/aws-cdk": {
"version": "2.1016.1", "version": "2.1016.0",
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1016.1.tgz", "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1016.0.tgz",
"integrity": "sha512-248TBiluT8jHUjkpzvWJOHv2fS+An9fiII3eji8H7jwfTu5yMBk7on4B/AVNr9A1GXJk9I32qf9Q0A3rLWRYPQ==", "integrity": "sha512-zdJ/tQp0iE/s8l8zLQPgdUJUHpS6KblkzdP5nOYC/NbD5OCdhS8QS7vLBkT8M7mNyZh3Ep3C+/m6NsxrurRe0A==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"bin": { "bin": {

View File

@ -10,7 +10,7 @@
"node": ">=20" "node": ">=20"
}, },
"devDependencies": { "devDependencies": {
"aws-cdk": "^2.1016.1", "aws-cdk": "^2.1016.0",
"cross-env": "^7.0.3" "cross-env": "^7.0.3"
} }
} }

View File

@ -26,7 +26,7 @@ Parameters:
Description: authentik Docker image Description: authentik Docker image
AuthentikVersion: AuthentikVersion:
Type: String Type: String
Default: 2025.6.3 Default: 2025.4.1
Description: authentik Docker image tag Description: authentik Docker image tag
AuthentikServerCPU: AuthentikServerCPU:
Type: Number Type: Number

Binary file not shown.

View File

@ -32,17 +32,15 @@
# datenschmutz, 2025 # datenschmutz, 2025
# 97cce0ae0cad2a2cc552d3165d04643e_de3d740, 2025 # 97cce0ae0cad2a2cc552d3165d04643e_de3d740, 2025
# Dominic Wagner <mail@dominic-wagner.de>, 2025 # Dominic Wagner <mail@dominic-wagner.de>, 2025
# Till-Frederik Riechard, 2025
# Alexander Mnich, 2025
# #
#, fuzzy #, fuzzy
msgid "" msgid ""
msgstr "" msgstr ""
"Project-Id-Version: PACKAGE VERSION\n" "Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-05-28 11:25+0000\n" "POT-Creation-Date: 2025-04-23 09:00+0000\n"
"PO-Revision-Date: 2022-09-26 16:47+0000\n" "PO-Revision-Date: 2022-09-26 16:47+0000\n"
"Last-Translator: Alexander Mnich, 2025\n" "Last-Translator: Dominic Wagner <mail@dominic-wagner.de>, 2025\n"
"Language-Team: German (https://app.transifex.com/authentik/teams/119923/de/)\n" "Language-Team: German (https://app.transifex.com/authentik/teams/119923/de/)\n"
"MIME-Version: 1.0\n" "MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n" "Content-Type: text/plain; charset=UTF-8\n"
@ -134,10 +132,6 @@ msgstr ""
msgid "Web Certificate used by the authentik Core webserver." msgid "Web Certificate used by the authentik Core webserver."
msgstr "Vom Authentik-Core-Webserver verwendetes Zertifikat." msgstr "Vom Authentik-Core-Webserver verwendetes Zertifikat."
#: authentik/brands/models.py
msgid "Certificates used for client authentication."
msgstr ""
#: authentik/brands/models.py #: authentik/brands/models.py
msgid "Brand" msgid "Brand"
msgstr "Marke" msgstr "Marke"
@ -411,7 +405,7 @@ msgstr "Eigenschaften"
#: authentik/core/models.py #: authentik/core/models.py
msgid "session data" msgid "session data"
msgstr "Sitzungsdaten" msgstr ""
#: authentik/core/models.py #: authentik/core/models.py
msgid "Session" msgid "Session"
@ -539,7 +533,7 @@ msgstr ""
#: authentik/enterprise/policies/unique_password/models.py #: authentik/enterprise/policies/unique_password/models.py
msgid "Number of passwords to check against." msgid "Number of passwords to check against."
msgstr "Anzahl Passwörter, gegen die geprüft wird." msgstr ""
#: authentik/enterprise/policies/unique_password/models.py #: authentik/enterprise/policies/unique_password/models.py
#: authentik/policies/password/models.py #: authentik/policies/password/models.py
@ -549,20 +543,18 @@ msgstr "Passwort nicht im Kontext festgelegt"
#: authentik/enterprise/policies/unique_password/models.py #: authentik/enterprise/policies/unique_password/models.py
msgid "This password has been used previously. Please choose a different one." msgid "This password has been used previously. Please choose a different one."
msgstr "" msgstr ""
"Dieses Passwort wurde in Vergangenheit bereits verwendet. Bitte nutzen Sie "
"ein anderes."
#: authentik/enterprise/policies/unique_password/models.py #: authentik/enterprise/policies/unique_password/models.py
msgid "Password Uniqueness Policy" msgid "Password Uniqueness Policy"
msgstr "Passwort-Einzigartigkeits-Richtlinie" msgstr ""
#: authentik/enterprise/policies/unique_password/models.py #: authentik/enterprise/policies/unique_password/models.py
msgid "Password Uniqueness Policies" msgid "Password Uniqueness Policies"
msgstr "Passwort-Einzigartigkeits-Richtlinien" msgstr ""
#: authentik/enterprise/policies/unique_password/models.py #: authentik/enterprise/policies/unique_password/models.py
msgid "User Password History" msgid "User Password History"
msgstr "Nutzer-Passwort-Historie" msgstr ""
#: authentik/enterprise/policy.py #: authentik/enterprise/policy.py
msgid "Enterprise required to access this feature." msgid "Enterprise required to access this feature."
@ -701,33 +693,6 @@ msgstr "Endgeräte"
msgid "Verifying your browser..." msgid "Verifying your browser..."
msgstr "Verifiziere deinen Browser..." msgstr "Verifiziere deinen Browser..."
#: authentik/enterprise/stages/mtls/models.py
msgid ""
"Configure certificate authorities to validate the certificate against. This "
"option has a higher priority than the `client_certificate` option on "
"`Brand`."
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stage"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stages"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Permissions to pass Certificates for outposts."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "Certificate required but no certificate was given."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "No user found for certificate."
msgstr ""
#: authentik/enterprise/stages/source/models.py #: authentik/enterprise/stages/source/models.py
msgid "" msgid ""
"Amount of time a user can take to return from the source to continue the " "Amount of time a user can take to return from the source to continue the "
@ -1023,7 +988,7 @@ msgstr ""
#: authentik/flows/models.py #: authentik/flows/models.py
msgid "Evaluate policies when the Stage is presented to the user." msgid "Evaluate policies when the Stage is presented to the user."
msgstr "Richtlinien auswerten, wenn die Phase dem Benutzer angezeigt wird." msgstr ""
#: authentik/flows/models.py #: authentik/flows/models.py
msgid "" msgid ""
@ -1078,12 +1043,9 @@ msgid "Starting full provider sync"
msgstr "Starte komplette Provider Synchronisation." msgstr "Starte komplette Provider Synchronisation."
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing users" #, python-brace-format
msgstr "" msgid "Syncing page {page} of users"
msgstr "Synchonisiere Benutzer Seite {page}"
#: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing groups"
msgstr ""
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
#, python-brace-format #, python-brace-format
@ -1631,11 +1593,11 @@ msgstr "ES256 (Asymmetrische Verschlüsselung)"
#: authentik/providers/oauth2/models.py #: authentik/providers/oauth2/models.py
msgid "ES384 (Asymmetric Encryption)" msgid "ES384 (Asymmetric Encryption)"
msgstr "ES384 (Asymmetrische Verschlüsselung)" msgstr ""
#: authentik/providers/oauth2/models.py #: authentik/providers/oauth2/models.py
msgid "ES512 (Asymmetric Encryption)" msgid "ES512 (Asymmetric Encryption)"
msgstr "ES5122 (Asymmetrische Verschlüsselung)" msgstr ""
#: authentik/providers/oauth2/models.py #: authentik/providers/oauth2/models.py
msgid "Scope used by the client" msgid "Scope used by the client"
@ -2221,11 +2183,11 @@ msgstr "Standard"
#: authentik/providers/scim/models.py #: authentik/providers/scim/models.py
msgid "AWS" msgid "AWS"
msgstr "AWS" msgstr ""
#: authentik/providers/scim/models.py #: authentik/providers/scim/models.py
msgid "Slack" msgid "Slack"
msgstr "Slack" msgstr ""
#: authentik/providers/scim/models.py #: authentik/providers/scim/models.py
msgid "Base URL to SCIM requests, usually ends in /v2" msgid "Base URL to SCIM requests, usually ends in /v2"
@ -2237,7 +2199,7 @@ msgstr "Authentifizierungstoken"
#: authentik/providers/scim/models.py #: authentik/providers/scim/models.py
msgid "SCIM Compatibility Mode" msgid "SCIM Compatibility Mode"
msgstr "SCIM Kompatibilitätsmodus" msgstr ""
#: authentik/providers/scim/models.py #: authentik/providers/scim/models.py
msgid "Alter authentik behavior for vendor-specific SCIM implementations." msgid "Alter authentik behavior for vendor-specific SCIM implementations."
@ -2269,7 +2231,7 @@ msgstr "Rollen"
#: authentik/rbac/models.py #: authentik/rbac/models.py
msgid "Initial Permissions" msgid "Initial Permissions"
msgstr "Initiale Berechtigungen" msgstr ""
#: authentik/rbac/models.py #: authentik/rbac/models.py
msgid "System permission" msgid "System permission"
@ -2525,12 +2487,6 @@ msgid ""
"Active Directory" "Active Directory"
msgstr "" msgstr ""
#: authentik/sources/ldap/models.py
msgid ""
"Delete authentik users and groups which were previously supplied by this "
"source, but are now missing from it."
msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "LDAP Source" msgid "LDAP Source"
msgstr "LDAP Quelle" msgstr "LDAP Quelle"
@ -2548,25 +2504,20 @@ msgid "LDAP Source Property Mappings"
msgstr "LDAP Quelle Eigenschafts-Zuordnungen" msgstr "LDAP Quelle Eigenschafts-Zuordnungen"
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "" msgid "User LDAP Source Connection"
"Unique ID used while checking if this object still exists in the directory."
msgstr "" msgstr ""
#: authentik/sources/ldap/models.py
msgid "User LDAP Source Connection"
msgstr "Benutzer LDAP-Quellverbindung"
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "User LDAP Source Connections" msgid "User LDAP Source Connections"
msgstr "Benutzer LDAP-Quellverbindungen" msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "Group LDAP Source Connection" msgid "Group LDAP Source Connection"
msgstr "LDAP Gruppen Quellverbindung" msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "Group LDAP Source Connections" msgid "Group LDAP Source Connections"
msgstr "LDAP Gruppen Quellverbindungen" msgstr ""
#: authentik/sources/ldap/signals.py #: authentik/sources/ldap/signals.py
msgid "Password does not match Active Directory Complexity." msgid "Password does not match Active Directory Complexity."
@ -2579,7 +2530,7 @@ msgstr "Kein Token empfangen."
#: authentik/sources/oauth/models.py #: authentik/sources/oauth/models.py
msgid "HTTP Basic Authentication" msgid "HTTP Basic Authentication"
msgstr "HTTP Basic Authentifizierung" msgstr ""
#: authentik/sources/oauth/models.py #: authentik/sources/oauth/models.py
msgid "Include the client ID and secret as request parameters" msgid "Include the client ID and secret as request parameters"
@ -2945,11 +2896,6 @@ msgstr "SAML Gruppen Quellverbindung"
msgid "Group SAML Source Connections" msgid "Group SAML Source Connections"
msgstr "SAML Gruppen Quellverbindungen" msgstr "SAML Gruppen Quellverbindungen"
#: authentik/sources/saml/views.py
#, python-brace-format
msgid "Continue to {source_name}"
msgstr ""
#: authentik/sources/scim/models.py #: authentik/sources/scim/models.py
msgid "SCIM Source" msgid "SCIM Source"
msgstr "SCIM Quelle" msgstr "SCIM Quelle"
@ -2984,7 +2930,7 @@ msgstr "Duo Geräte"
#: authentik/stages/authenticator_email/models.py #: authentik/stages/authenticator_email/models.py
msgid "Email OTP" msgid "Email OTP"
msgstr "E-Mail Einmalpasswort" msgstr ""
#: authentik/stages/authenticator_email/models.py #: authentik/stages/authenticator_email/models.py
#: authentik/stages/email/models.py #: authentik/stages/email/models.py
@ -3017,11 +2963,11 @@ msgstr "Beim Rendern der E-Mail-Vorlage ist ein Fehler aufgetreten"
#: authentik/stages/authenticator_email/models.py #: authentik/stages/authenticator_email/models.py
msgid "Email Device" msgid "Email Device"
msgstr "E-Mail Gerät" msgstr ""
#: authentik/stages/authenticator_email/models.py #: authentik/stages/authenticator_email/models.py
msgid "Email Devices" msgid "Email Devices"
msgstr "E-Mail Geräte" msgstr ""
#: authentik/stages/authenticator_email/stage.py #: authentik/stages/authenticator_email/stage.py
#: authentik/stages/authenticator_sms/stage.py #: authentik/stages/authenticator_sms/stage.py
@ -3031,7 +2977,7 @@ msgstr "Code stimmt nicht überein"
#: authentik/stages/authenticator_email/stage.py #: authentik/stages/authenticator_email/stage.py
msgid "Invalid email" msgid "Invalid email"
msgstr "Ungültige E-Mail" msgstr ""
#: authentik/stages/authenticator_email/templates/email/email_otp.html #: authentik/stages/authenticator_email/templates/email/email_otp.html
#: authentik/stages/email/templates/email/password_reset.html #: authentik/stages/email/templates/email/password_reset.html
@ -3327,10 +3273,6 @@ msgstr "Zustimmung der Benutzer"
msgid "User Consents" msgid "User Consents"
msgstr "Zustimmungen der Benutzer" msgstr "Zustimmungen der Benutzer"
#: authentik/stages/consent/stage.py
msgid "Invalid consent token, re-showing prompt"
msgstr ""
#: authentik/stages/deny/models.py #: authentik/stages/deny/models.py
msgid "Deny Stage" msgid "Deny Stage"
msgstr "Verweigerungsstufe" msgstr "Verweigerungsstufe"
@ -3347,14 +3289,6 @@ msgstr "Dummy Stufe"
msgid "Dummy Stages" msgid "Dummy Stages"
msgstr "Dummy Stufen" msgstr "Dummy Stufen"
#: authentik/stages/email/flow.py
msgid "Continue to confirm this email address."
msgstr ""
#: authentik/stages/email/flow.py
msgid "Link was already used, please request a new link."
msgstr ""
#: authentik/stages/email/models.py #: authentik/stages/email/models.py
msgid "Password Reset" msgid "Password Reset"
msgstr "Passwort zurücksetzen" msgstr "Passwort zurücksetzen"
@ -3956,11 +3890,10 @@ msgstr ""
#: authentik/tenants/models.py #: authentik/tenants/models.py
msgid "Reputation cannot decrease lower than this value. Zero or negative." msgid "Reputation cannot decrease lower than this value. Zero or negative."
msgstr "" msgstr ""
"Reputation kann nicht niedriger als dieser Wert sein. Null oder negativ."
#: authentik/tenants/models.py #: authentik/tenants/models.py
msgid "Reputation cannot increase higher than this value. Zero or positive." msgid "Reputation cannot increase higher than this value. Zero or positive."
msgstr "Reputation kann nicht höher als dieser Wert sein. Null oder positiv." msgstr ""
#: authentik/tenants/models.py #: authentik/tenants/models.py
msgid "The option configures the footer links on the flow executor pages." msgid "The option configures the footer links on the flow executor pages."

View File

@ -8,7 +8,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: PACKAGE VERSION\n" "Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-06-02 00:12+0000\n" "POT-Creation-Date: 2025-04-23 09:00+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n" "Language-Team: LANGUAGE <LL@li.org>\n"
@ -93,10 +93,6 @@ msgstr ""
msgid "Web Certificate used by the authentik Core webserver." msgid "Web Certificate used by the authentik Core webserver."
msgstr "" msgstr ""
#: authentik/brands/models.py
msgid "Certificates used for client authentication."
msgstr ""
#: authentik/brands/models.py #: authentik/brands/models.py
msgid "Brand" msgid "Brand"
msgstr "" msgstr ""
@ -620,32 +616,6 @@ msgstr ""
msgid "Verifying your browser..." msgid "Verifying your browser..."
msgstr "" msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid ""
"Configure certificate authorities to validate the certificate against. This "
"option has a higher priority than the `client_certificate` option on `Brand`."
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stage"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stages"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Permissions to pass Certificates for outposts."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "Certificate required but no certificate was given."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "No user found for certificate."
msgstr ""
#: authentik/enterprise/stages/source/models.py #: authentik/enterprise/stages/source/models.py
msgid "" msgid ""
"Amount of time a user can take to return from the source to continue the " "Amount of time a user can take to return from the source to continue the "
@ -961,11 +931,8 @@ msgid "Starting full provider sync"
msgstr "" msgstr ""
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing users" #, python-brace-format
msgstr "" msgid "Syncing page {page} of users"
#: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing groups"
msgstr "" msgstr ""
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
@ -2226,10 +2193,6 @@ msgstr ""
msgid "Consider Objects matching this filter to be Users." msgid "Consider Objects matching this filter to be Users."
msgstr "" msgstr ""
#: authentik/sources/ldap/models.py
msgid "Attribute which matches the value of `group_membership_field`."
msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "Field which contains members of a group." msgid "Field which contains members of a group."
msgstr "" msgstr ""
@ -2259,12 +2222,6 @@ msgid ""
"Active Directory" "Active Directory"
msgstr "" msgstr ""
#: authentik/sources/ldap/models.py
msgid ""
"Delete authentik users and groups which were previously supplied by this "
"source, but are now missing from it."
msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "LDAP Source" msgid "LDAP Source"
msgstr "" msgstr ""
@ -2281,11 +2238,6 @@ msgstr ""
msgid "LDAP Source Property Mappings" msgid "LDAP Source Property Mappings"
msgstr "" msgstr ""
#: authentik/sources/ldap/models.py
msgid ""
"Unique ID used while checking if this object still exists in the directory."
msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "User LDAP Source Connection" msgid "User LDAP Source Connection"
msgstr "" msgstr ""
@ -2657,11 +2609,6 @@ msgstr ""
msgid "Group SAML Source Connections" msgid "Group SAML Source Connections"
msgstr "" msgstr ""
#: authentik/sources/saml/views.py
#, python-brace-format
msgid "Continue to {source_name}"
msgstr ""
#: authentik/sources/scim/models.py #: authentik/sources/scim/models.py
msgid "SCIM Source" msgid "SCIM Source"
msgstr "" msgstr ""
@ -3017,10 +2964,6 @@ msgstr ""
msgid "User Consents" msgid "User Consents"
msgstr "" msgstr ""
#: authentik/stages/consent/stage.py
msgid "Invalid consent token, re-showing prompt"
msgstr ""
#: authentik/stages/deny/models.py #: authentik/stages/deny/models.py
msgid "Deny Stage" msgid "Deny Stage"
msgstr "" msgstr ""
@ -3037,14 +2980,6 @@ msgstr ""
msgid "Dummy Stages" msgid "Dummy Stages"
msgstr "" msgstr ""
#: authentik/stages/email/flow.py
msgid "Continue to confirm this email address."
msgstr ""
#: authentik/stages/email/flow.py
msgid "Link was already used, please request a new link."
msgstr ""
#: authentik/stages/email/models.py #: authentik/stages/email/models.py
msgid "Password Reset" msgid "Password Reset"
msgstr "" msgstr ""
@ -3497,6 +3432,10 @@ msgstr ""
msgid "No Pending user to login." msgid "No Pending user to login."
msgstr "" msgstr ""
#: authentik/stages/user_login/stage.py
msgid "Successfully logged in!"
msgstr ""
#: authentik/stages/user_logout/models.py #: authentik/stages/user_logout/models.py
msgid "User Logout Stage" msgid "User Logout Stage"
msgstr "" msgstr ""

Binary file not shown.

View File

@ -15,7 +15,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: PACKAGE VERSION\n" "Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-05-28 11:25+0000\n" "POT-Creation-Date: 2025-04-23 09:00+0000\n"
"PO-Revision-Date: 2022-09-26 16:47+0000\n" "PO-Revision-Date: 2022-09-26 16:47+0000\n"
"Last-Translator: Jens L. <jens@goauthentik.io>, 2025\n" "Last-Translator: Jens L. <jens@goauthentik.io>, 2025\n"
"Language-Team: Spanish (https://app.transifex.com/authentik/teams/119923/es/)\n" "Language-Team: Spanish (https://app.transifex.com/authentik/teams/119923/es/)\n"
@ -109,10 +109,6 @@ msgstr ""
msgid "Web Certificate used by the authentik Core webserver." msgid "Web Certificate used by the authentik Core webserver."
msgstr "Certificado Web usado por el servidor web Core de authentik" msgstr "Certificado Web usado por el servidor web Core de authentik"
#: authentik/brands/models.py
msgid "Certificates used for client authentication."
msgstr ""
#: authentik/brands/models.py #: authentik/brands/models.py
msgid "Brand" msgid "Brand"
msgstr "Marca" msgstr "Marca"
@ -675,33 +671,6 @@ msgstr "Dispositivos de Punto de Conexión"
msgid "Verifying your browser..." msgid "Verifying your browser..."
msgstr "Verificando tu navegador..." msgstr "Verificando tu navegador..."
#: authentik/enterprise/stages/mtls/models.py
msgid ""
"Configure certificate authorities to validate the certificate against. This "
"option has a higher priority than the `client_certificate` option on "
"`Brand`."
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stage"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stages"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Permissions to pass Certificates for outposts."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "Certificate required but no certificate was given."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "No user found for certificate."
msgstr ""
#: authentik/enterprise/stages/source/models.py #: authentik/enterprise/stages/source/models.py
msgid "" msgid ""
"Amount of time a user can take to return from the source to continue the " "Amount of time a user can take to return from the source to continue the "
@ -1040,12 +1009,9 @@ msgid "Starting full provider sync"
msgstr "Iniciando sincronización completa de proveedor" msgstr "Iniciando sincronización completa de proveedor"
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing users" #, python-brace-format
msgstr "" msgid "Syncing page {page} of users"
msgstr "Sincronizando página {page} de usuarios"
#: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing groups"
msgstr ""
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
#, python-brace-format #, python-brace-format
@ -2486,12 +2452,6 @@ msgid ""
"Active Directory" "Active Directory"
msgstr "" msgstr ""
#: authentik/sources/ldap/models.py
msgid ""
"Delete authentik users and groups which were previously supplied by this "
"source, but are now missing from it."
msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "LDAP Source" msgid "LDAP Source"
msgstr "Fuente de LDAP" msgstr "Fuente de LDAP"
@ -2508,11 +2468,6 @@ msgstr "Asignación de Propiedades de Fuente de LDAP"
msgid "LDAP Source Property Mappings" msgid "LDAP Source Property Mappings"
msgstr "Asignaciones de Propiedades de Fuente de LDAP" msgstr "Asignaciones de Propiedades de Fuente de LDAP"
#: authentik/sources/ldap/models.py
msgid ""
"Unique ID used while checking if this object still exists in the directory."
msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "User LDAP Source Connection" msgid "User LDAP Source Connection"
msgstr "" msgstr ""
@ -2904,11 +2859,6 @@ msgstr "Conexión de Fuente de SAML de Grupo"
msgid "Group SAML Source Connections" msgid "Group SAML Source Connections"
msgstr "Conexiones de Fuente de SAML de Grupo" msgstr "Conexiones de Fuente de SAML de Grupo"
#: authentik/sources/saml/views.py
#, python-brace-format
msgid "Continue to {source_name}"
msgstr ""
#: authentik/sources/scim/models.py #: authentik/sources/scim/models.py
msgid "SCIM Source" msgid "SCIM Source"
msgstr "Fuente de SCIM" msgstr "Fuente de SCIM"
@ -3295,10 +3245,6 @@ msgstr "Consentimiento del usuario"
msgid "User Consents" msgid "User Consents"
msgstr "Consentimientos del usuario" msgstr "Consentimientos del usuario"
#: authentik/stages/consent/stage.py
msgid "Invalid consent token, re-showing prompt"
msgstr ""
#: authentik/stages/deny/models.py #: authentik/stages/deny/models.py
msgid "Deny Stage" msgid "Deny Stage"
msgstr "Etapa de denegación" msgstr "Etapa de denegación"
@ -3315,14 +3261,6 @@ msgstr "Escenario ficticio"
msgid "Dummy Stages" msgid "Dummy Stages"
msgstr "Etapas ficticias" msgstr "Etapas ficticias"
#: authentik/stages/email/flow.py
msgid "Continue to confirm this email address."
msgstr ""
#: authentik/stages/email/flow.py
msgid "Link was already used, please request a new link."
msgstr ""
#: authentik/stages/email/models.py #: authentik/stages/email/models.py
msgid "Password Reset" msgid "Password Reset"
msgstr "Restablecimiento de contraseña" msgstr "Restablecimiento de contraseña"

Binary file not shown.

View File

@ -15,7 +15,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: PACKAGE VERSION\n" "Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-05-28 11:25+0000\n" "POT-Creation-Date: 2025-04-23 09:00+0000\n"
"PO-Revision-Date: 2022-09-26 16:47+0000\n" "PO-Revision-Date: 2022-09-26 16:47+0000\n"
"Last-Translator: Ville Ranki, 2025\n" "Last-Translator: Ville Ranki, 2025\n"
"Language-Team: Finnish (https://app.transifex.com/authentik/teams/119923/fi/)\n" "Language-Team: Finnish (https://app.transifex.com/authentik/teams/119923/fi/)\n"
@ -106,10 +106,6 @@ msgstr ""
msgid "Web Certificate used by the authentik Core webserver." msgid "Web Certificate used by the authentik Core webserver."
msgstr "Web-sertifikaatti, jota authentik Core -verkkopalvelin käyttää." msgstr "Web-sertifikaatti, jota authentik Core -verkkopalvelin käyttää."
#: authentik/brands/models.py
msgid "Certificates used for client authentication."
msgstr ""
#: authentik/brands/models.py #: authentik/brands/models.py
msgid "Brand" msgid "Brand"
msgstr "Brändi" msgstr "Brändi"
@ -662,33 +658,6 @@ msgstr "Päätelaitteet"
msgid "Verifying your browser..." msgid "Verifying your browser..."
msgstr "Selaintasi varmennetaan..." msgstr "Selaintasi varmennetaan..."
#: authentik/enterprise/stages/mtls/models.py
msgid ""
"Configure certificate authorities to validate the certificate against. This "
"option has a higher priority than the `client_certificate` option on "
"`Brand`."
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stage"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stages"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Permissions to pass Certificates for outposts."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "Certificate required but no certificate was given."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "No user found for certificate."
msgstr ""
#: authentik/enterprise/stages/source/models.py #: authentik/enterprise/stages/source/models.py
msgid "" msgid ""
"Amount of time a user can take to return from the source to continue the " "Amount of time a user can take to return from the source to continue the "
@ -1027,12 +996,9 @@ msgid "Starting full provider sync"
msgstr "Käynnistetään palveluntarjoajan täysi synkronisointi" msgstr "Käynnistetään palveluntarjoajan täysi synkronisointi"
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing users" #, python-brace-format
msgstr "" msgid "Syncing page {page} of users"
msgstr "Synkronoidaan käyttäjien sivua {page}"
#: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing groups"
msgstr ""
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
#, python-brace-format #, python-brace-format
@ -2463,12 +2429,6 @@ msgid ""
"Active Directory" "Active Directory"
msgstr "" msgstr ""
#: authentik/sources/ldap/models.py
msgid ""
"Delete authentik users and groups which were previously supplied by this "
"source, but are now missing from it."
msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "LDAP Source" msgid "LDAP Source"
msgstr "LDAP-lähde" msgstr "LDAP-lähde"
@ -2485,11 +2445,6 @@ msgstr "LDAP-lähteen ominaisuuskytkentä"
msgid "LDAP Source Property Mappings" msgid "LDAP Source Property Mappings"
msgstr "LDAP-lähteen ominaisuuskytkennät" msgstr "LDAP-lähteen ominaisuuskytkennät"
#: authentik/sources/ldap/models.py
msgid ""
"Unique ID used while checking if this object still exists in the directory."
msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "User LDAP Source Connection" msgid "User LDAP Source Connection"
msgstr "" msgstr ""
@ -2882,11 +2837,6 @@ msgstr "Ryhmän SAML-lähteen yhteys"
msgid "Group SAML Source Connections" msgid "Group SAML Source Connections"
msgstr "Ryhmän SAML-lähteen yhteydet" msgstr "Ryhmän SAML-lähteen yhteydet"
#: authentik/sources/saml/views.py
#, python-brace-format
msgid "Continue to {source_name}"
msgstr ""
#: authentik/sources/scim/models.py #: authentik/sources/scim/models.py
msgid "SCIM Source" msgid "SCIM Source"
msgstr "SCIM-lähde" msgstr "SCIM-lähde"
@ -3266,10 +3216,6 @@ msgstr "Käyttäjän hyväksyntä"
msgid "User Consents" msgid "User Consents"
msgstr "Käyttäjän hyväksynnät" msgstr "Käyttäjän hyväksynnät"
#: authentik/stages/consent/stage.py
msgid "Invalid consent token, re-showing prompt"
msgstr ""
#: authentik/stages/deny/models.py #: authentik/stages/deny/models.py
msgid "Deny Stage" msgid "Deny Stage"
msgstr "Kieltovaihe" msgstr "Kieltovaihe"
@ -3286,14 +3232,6 @@ msgstr "Valevaihe"
msgid "Dummy Stages" msgid "Dummy Stages"
msgstr "Valevaiheet" msgstr "Valevaiheet"
#: authentik/stages/email/flow.py
msgid "Continue to confirm this email address."
msgstr ""
#: authentik/stages/email/flow.py
msgid "Link was already used, please request a new link."
msgstr ""
#: authentik/stages/email/models.py #: authentik/stages/email/models.py
msgid "Password Reset" msgid "Password Reset"
msgstr "Salasanan nollaus" msgstr "Salasanan nollaus"

View File

@ -19,7 +19,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: PACKAGE VERSION\n" "Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-05-28 11:25+0000\n" "POT-Creation-Date: 2025-04-23 09:00+0000\n"
"PO-Revision-Date: 2022-09-26 16:47+0000\n" "PO-Revision-Date: 2022-09-26 16:47+0000\n"
"Last-Translator: Marc Schmitt, 2025\n" "Last-Translator: Marc Schmitt, 2025\n"
"Language-Team: French (https://app.transifex.com/authentik/teams/119923/fr/)\n" "Language-Team: French (https://app.transifex.com/authentik/teams/119923/fr/)\n"
@ -113,10 +113,6 @@ msgstr ""
msgid "Web Certificate used by the authentik Core webserver." msgid "Web Certificate used by the authentik Core webserver."
msgstr "Certificate Web utilisé par le serveur web d'authentik core." msgstr "Certificate Web utilisé par le serveur web d'authentik core."
#: authentik/brands/models.py
msgid "Certificates used for client authentication."
msgstr "Certificats utilisés pour l'authentification client."
#: authentik/brands/models.py #: authentik/brands/models.py
msgid "Brand" msgid "Brand"
msgstr "Marque" msgstr "Marque"
@ -679,36 +675,6 @@ msgstr "Appareils point de terminaison"
msgid "Verifying your browser..." msgid "Verifying your browser..."
msgstr "Vérification de votre navigateur..." msgstr "Vérification de votre navigateur..."
#: authentik/enterprise/stages/mtls/models.py
msgid ""
"Configure certificate authorities to validate the certificate against. This "
"option has a higher priority than the `client_certificate` option on "
"`Brand`."
msgstr ""
"Configurez les autorités de certification pour valider le certificat. Cette "
"option a une priorité plus élevée que l'option `client_certificate` sur "
"`Marques`."
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stage"
msgstr "Étape TLS mutuel"
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stages"
msgstr "Étapes TLS mutuel"
#: authentik/enterprise/stages/mtls/models.py
msgid "Permissions to pass Certificates for outposts."
msgstr "Autorisations de délivrer des certificats pour les avant-postes."
#: authentik/enterprise/stages/mtls/stage.py
msgid "Certificate required but no certificate was given."
msgstr "Certificat requis mais aucun certificat n'a été fourni."
#: authentik/enterprise/stages/mtls/stage.py
msgid "No user found for certificate."
msgstr "Aucun utilisateur trouvé pour le certificat."
#: authentik/enterprise/stages/source/models.py #: authentik/enterprise/stages/source/models.py
msgid "" msgid ""
"Amount of time a user can take to return from the source to continue the " "Amount of time a user can take to return from the source to continue the "
@ -1056,12 +1022,9 @@ msgid "Starting full provider sync"
msgstr "Démarrage d'une synchronisation complète du fournisseur" msgstr "Démarrage d'une synchronisation complète du fournisseur"
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing users" #, python-brace-format
msgstr "Synchronisation des utilisateurs" msgid "Syncing page {page} of users"
msgstr "Synchronisation de la page {page} d'utilisateurs"
#: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing groups"
msgstr "Synchronisation des groupes"
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
#, python-brace-format #, python-brace-format
@ -2511,14 +2474,6 @@ msgstr ""
"plutôt que sur un attribut de groupe. Cela permet la résolution des groupes " "plutôt que sur un attribut de groupe. Cela permet la résolution des groupes "
"imbriqués sur des systèmes tels que FreeIPA et Active Directory." "imbriqués sur des systèmes tels que FreeIPA et Active Directory."
#: authentik/sources/ldap/models.py
msgid ""
"Delete authentik users and groups which were previously supplied by this "
"source, but are now missing from it."
msgstr ""
"Supprimer les utilisateurs et les groupes authentik qui étaient auparavant "
"fournis par cette source, mais qui en sont maintenant absents."
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "LDAP Source" msgid "LDAP Source"
msgstr "Source LDAP" msgstr "Source LDAP"
@ -2535,13 +2490,6 @@ msgstr "Mappage de propriété source LDAP"
msgid "LDAP Source Property Mappings" msgid "LDAP Source Property Mappings"
msgstr "Mappages de propriété source LDAP" msgstr "Mappages de propriété source LDAP"
#: authentik/sources/ldap/models.py
msgid ""
"Unique ID used while checking if this object still exists in the directory."
msgstr ""
"ID unique utilisé pour vérifier si cet objet existe toujours dans le "
"répertoire."
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "User LDAP Source Connection" msgid "User LDAP Source Connection"
msgstr "Connexion de l'utilisateur à la source LDAP" msgstr "Connexion de l'utilisateur à la source LDAP"
@ -2936,11 +2884,6 @@ msgstr "Connexion du groupe à la source SAML"
msgid "Group SAML Source Connections" msgid "Group SAML Source Connections"
msgstr "Connexions du groupe à la source SAML" msgstr "Connexions du groupe à la source SAML"
#: authentik/sources/saml/views.py
#, python-brace-format
msgid "Continue to {source_name}"
msgstr "Continuer vers {source_name}"
#: authentik/sources/scim/models.py #: authentik/sources/scim/models.py
msgid "SCIM Source" msgid "SCIM Source"
msgstr "Source SCIM" msgstr "Source SCIM"
@ -3331,10 +3274,6 @@ msgstr "Consentement Utilisateur"
msgid "User Consents" msgid "User Consents"
msgstr "Consentements Utilisateur" msgstr "Consentements Utilisateur"
#: authentik/stages/consent/stage.py
msgid "Invalid consent token, re-showing prompt"
msgstr "Jeton de consentement invalide, réaffichage de l'invite"
#: authentik/stages/deny/models.py #: authentik/stages/deny/models.py
msgid "Deny Stage" msgid "Deny Stage"
msgstr "Étape de Refus" msgstr "Étape de Refus"
@ -3351,14 +3290,6 @@ msgstr "Étape factice"
msgid "Dummy Stages" msgid "Dummy Stages"
msgstr "Étapes factices" msgstr "Étapes factices"
#: authentik/stages/email/flow.py
msgid "Continue to confirm this email address."
msgstr "Continuer pour confirmer cette adresse courriel."
#: authentik/stages/email/flow.py
msgid "Link was already used, please request a new link."
msgstr "Ce lien a déjà été utilisé, veuillez en demander un nouveau."
#: authentik/stages/email/models.py #: authentik/stages/email/models.py
msgid "Password Reset" msgid "Password Reset"
msgstr "Réinitialiser le Mot de Passe" msgstr "Réinitialiser le Mot de Passe"

View File

@ -20,7 +20,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: PACKAGE VERSION\n" "Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-05-28 11:25+0000\n" "POT-Creation-Date: 2025-04-23 09:00+0000\n"
"PO-Revision-Date: 2022-09-26 16:47+0000\n" "PO-Revision-Date: 2022-09-26 16:47+0000\n"
"Last-Translator: Kowalski Dragon (kowalski7cc) <kowalski.7cc@gmail.com>, 2025\n" "Last-Translator: Kowalski Dragon (kowalski7cc) <kowalski.7cc@gmail.com>, 2025\n"
"Language-Team: Italian (https://app.transifex.com/authentik/teams/119923/it/)\n" "Language-Team: Italian (https://app.transifex.com/authentik/teams/119923/it/)\n"
@ -114,10 +114,6 @@ msgstr ""
msgid "Web Certificate used by the authentik Core webserver." msgid "Web Certificate used by the authentik Core webserver."
msgstr "Certificato Web utilizzato dal server Web authentik Core." msgstr "Certificato Web utilizzato dal server Web authentik Core."
#: authentik/brands/models.py
msgid "Certificates used for client authentication."
msgstr ""
#: authentik/brands/models.py #: authentik/brands/models.py
msgid "Brand" msgid "Brand"
msgstr "Brand" msgstr "Brand"
@ -676,33 +672,6 @@ msgstr "Dispositivi di Accesso"
msgid "Verifying your browser..." msgid "Verifying your browser..."
msgstr "Verifica del tuo browser..." msgstr "Verifica del tuo browser..."
#: authentik/enterprise/stages/mtls/models.py
msgid ""
"Configure certificate authorities to validate the certificate against. This "
"option has a higher priority than the `client_certificate` option on "
"`Brand`."
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stage"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Mutual TLS Stages"
msgstr ""
#: authentik/enterprise/stages/mtls/models.py
msgid "Permissions to pass Certificates for outposts."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "Certificate required but no certificate was given."
msgstr ""
#: authentik/enterprise/stages/mtls/stage.py
msgid "No user found for certificate."
msgstr ""
#: authentik/enterprise/stages/source/models.py #: authentik/enterprise/stages/source/models.py
msgid "" msgid ""
"Amount of time a user can take to return from the source to continue the " "Amount of time a user can take to return from the source to continue the "
@ -1049,12 +1018,9 @@ msgid "Starting full provider sync"
msgstr "Avvio della sincronizzazione completa del provider" msgstr "Avvio della sincronizzazione completa del provider"
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing users" #, python-brace-format
msgstr "" msgid "Syncing page {page} of users"
msgstr "Sincronizzando pagina {page} degli utenti"
#: authentik/lib/sync/outgoing/tasks.py
msgid "Syncing groups"
msgstr ""
#: authentik/lib/sync/outgoing/tasks.py #: authentik/lib/sync/outgoing/tasks.py
#, python-brace-format #, python-brace-format
@ -2497,12 +2463,6 @@ msgstr ""
"attributo di gruppo. Questo consente la risoluzione di gruppi nidificati su " "attributo di gruppo. Questo consente la risoluzione di gruppi nidificati su "
"sistemi come FreeIPA e Active Directory." "sistemi come FreeIPA e Active Directory."
#: authentik/sources/ldap/models.py
msgid ""
"Delete authentik users and groups which were previously supplied by this "
"source, but are now missing from it."
msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "LDAP Source" msgid "LDAP Source"
msgstr "Sorgente LDAP" msgstr "Sorgente LDAP"
@ -2519,11 +2479,6 @@ msgstr "Mappatura delle proprietà sorgente LDAP"
msgid "LDAP Source Property Mappings" msgid "LDAP Source Property Mappings"
msgstr "Mappature delle proprietà della sorgente LDAP" msgstr "Mappature delle proprietà della sorgente LDAP"
#: authentik/sources/ldap/models.py
msgid ""
"Unique ID used while checking if this object still exists in the directory."
msgstr ""
#: authentik/sources/ldap/models.py #: authentik/sources/ldap/models.py
msgid "User LDAP Source Connection" msgid "User LDAP Source Connection"
msgstr "Connessione Sorgente LDAP Utente" msgstr "Connessione Sorgente LDAP Utente"
@ -2917,11 +2872,6 @@ msgstr "Connessione sorgente SAML di gruppo"
msgid "Group SAML Source Connections" msgid "Group SAML Source Connections"
msgstr "Connessioni sorgente SAML di gruppo" msgstr "Connessioni sorgente SAML di gruppo"
#: authentik/sources/saml/views.py
#, python-brace-format
msgid "Continue to {source_name}"
msgstr ""
#: authentik/sources/scim/models.py #: authentik/sources/scim/models.py
msgid "SCIM Source" msgid "SCIM Source"
msgstr "Sorgente SCIM" msgstr "Sorgente SCIM"
@ -3319,10 +3269,6 @@ msgstr "Consenso utente"
msgid "User Consents" msgid "User Consents"
msgstr "Consensi utente" msgstr "Consensi utente"
#: authentik/stages/consent/stage.py
msgid "Invalid consent token, re-showing prompt"
msgstr ""
#: authentik/stages/deny/models.py #: authentik/stages/deny/models.py
msgid "Deny Stage" msgid "Deny Stage"
msgstr "Fase di negazione" msgstr "Fase di negazione"
@ -3339,14 +3285,6 @@ msgstr "Fase fittizia"
msgid "Dummy Stages" msgid "Dummy Stages"
msgstr "Fasi fittizie" msgstr "Fasi fittizie"
#: authentik/stages/email/flow.py
msgid "Continue to confirm this email address."
msgstr ""
#: authentik/stages/email/flow.py
msgid "Link was already used, please request a new link."
msgstr ""
#: authentik/stages/email/models.py #: authentik/stages/email/models.py
msgid "Password Reset" msgid "Password Reset"
msgstr "Ripristino password" msgstr "Ripristino password"

Some files were not shown because too many files have changed in this diff Show More