Compare commits
	
		
			5 Commits
		
	
	
		
			website/do
			...
			consistent
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 6d1bffc9f9 | |||
| 1a132a733f | |||
| 7d8bcdf8e7 | |||
| 2f2883edb4 | |||
| 95ee88c946 | 
| @ -1,5 +1,5 @@ | ||||
| [bumpversion] | ||||
| current_version = 2025.6.1 | ||||
| current_version = 2025.4.1 | ||||
| tag = True | ||||
| commit = True | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||
|  | ||||
| @ -5,10 +5,8 @@ dist/** | ||||
| build/** | ||||
| build_docs/** | ||||
| *Dockerfile | ||||
| **/*Dockerfile | ||||
| blueprints/local | ||||
| .git | ||||
| !gen-ts-api/node_modules | ||||
| !gen-ts-api/dist/** | ||||
| !gen-go-api/ | ||||
| .venv | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -36,7 +36,7 @@ runs: | ||||
|       with: | ||||
|         go-version-file: "go.mod" | ||||
|     - name: Setup docker cache | ||||
|       uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 | ||||
|       uses: ScribeMD/docker-cache@0.5.0 | ||||
|       with: | ||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} | ||||
|     - name: Setup dependencies | ||||
|  | ||||
							
								
								
									
										7
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -100,13 +100,6 @@ updates: | ||||
|       goauthentik: | ||||
|         patterns: | ||||
|           - "@goauthentik/*" | ||||
|       eslint: | ||||
|         patterns: | ||||
|           - "@eslint/*" | ||||
|           - "@typescript-eslint/*" | ||||
|           - "eslint-*" | ||||
|           - "eslint" | ||||
|           - "typescript-eslint" | ||||
|   - package-ecosystem: npm | ||||
|     directory: "/lifecycle/aws" | ||||
|     schedule: | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -62,7 +62,6 @@ jobs: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|           - 17-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -117,7 +116,6 @@ jobs: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|           - 17-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|  | ||||
							
								
								
									
										62
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										62
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							| @ -41,60 +41,32 @@ jobs: | ||||
|       - name: test | ||||
|         working-directory: website/ | ||||
|         run: npm test | ||||
|   build-container: | ||||
|   build: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     name: ${{ matrix.job }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         job: | ||||
|           - build | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.6.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/dev-docs | ||||
|       - name: Login to Container Registry | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Build Docker Image | ||||
|         id: push | ||||
|         uses: docker/build-push-action@v6 | ||||
|         with: | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           file: website/Dockerfile | ||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache | ||||
|           cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }} | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|           node-version-file: website/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: website/package-lock.json | ||||
|       - working-directory: website/ | ||||
|         run: npm ci | ||||
|       - name: build | ||||
|         working-directory: website/ | ||||
|         run: npm run ${{ matrix.job }} | ||||
|   ci-website-mark: | ||||
|     if: always() | ||||
|     needs: | ||||
|       - lint | ||||
|       - test | ||||
|       - build-container | ||||
|       - build | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: re-actors/alls-green@release/v1 | ||||
|  | ||||
							
								
								
									
										17
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										17
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,7 +7,7 @@ on: | ||||
|       - packages/eslint-config/** | ||||
|       - packages/prettier-config/** | ||||
|       - packages/tsconfig/** | ||||
|       - web/packages/esbuild-plugin-live-reload/** | ||||
|       - packages/web/esbuild-plugin-live-reload/** | ||||
|   workflow_dispatch: | ||||
| jobs: | ||||
|   publish: | ||||
| @ -17,28 +17,27 @@ jobs: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         package: | ||||
|           - packages/docusaurus-config | ||||
|           - packages/eslint-config | ||||
|           - packages/prettier-config | ||||
|           - packages/tsconfig | ||||
|           - web/packages/esbuild-plugin-live-reload | ||||
|           - docusaurus-config | ||||
|           - eslint-config | ||||
|           - prettier-config | ||||
|           - tsconfig | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           fetch-depth: 2 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: ${{ matrix.package }}/package.json | ||||
|           node-version-file: packages/${{ matrix.package }}/package.json | ||||
|           registry-url: "https://registry.npmjs.org" | ||||
|       - name: Get changed files | ||||
|         id: changed-files | ||||
|         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c | ||||
|         with: | ||||
|           files: | | ||||
|             ${{ matrix.package }}/package.json | ||||
|             packages/${{ matrix.package }}/package.json | ||||
|       - name: Publish package | ||||
|         if: steps.changed-files.outputs.any_changed == 'true' | ||||
|         working-directory: ${{ matrix.package }} | ||||
|         working-directory: packages/${{ matrix.package}} | ||||
|         run: | | ||||
|           npm ci | ||||
|           npm run build | ||||
|  | ||||
							
								
								
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -20,49 +20,6 @@ jobs: | ||||
|       release: true | ||||
|       registry_dockerhub: true | ||||
|       registry_ghcr: true | ||||
|   build-docs: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.6.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/docs | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Build Docker Image | ||||
|         id: push | ||||
|         uses: docker/build-push-action@v6 | ||||
|         with: | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           file: website/Dockerfile | ||||
|           push: true | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         if: true | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|   build-outpost: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
| @ -236,6 +193,6 @@ jobs: | ||||
|           SENTRY_ORG: authentik-security-inc | ||||
|           SENTRY_PROJECT: authentik | ||||
|         with: | ||||
|           release: authentik@${{ steps.ev.outputs.version }} | ||||
|           version: authentik@${{ steps.ev.outputs.version }} | ||||
|           sourcemaps: "./web/dist" | ||||
|           url_prefix: "~/static/dist" | ||||
|  | ||||
							
								
								
									
										50
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										50
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,26 @@ | ||||
| # syntax=docker/dockerfile:1 | ||||
|  | ||||
| # Stage 1: Build webui | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24-slim AS node-builder | ||||
| # Stage 1: Build website | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS website-builder | ||||
|  | ||||
| ENV NODE_ENV=production | ||||
|  | ||||
| WORKDIR /work/website | ||||
|  | ||||
| RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \ | ||||
|     --mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \ | ||||
|     --mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \ | ||||
|     npm ci --include=dev | ||||
|  | ||||
| COPY ./website /work/website/ | ||||
| COPY ./blueprints /work/blueprints/ | ||||
| COPY ./schema.yml /work/ | ||||
| COPY ./SECURITY.md /work/ | ||||
|  | ||||
| RUN npm run build-bundled | ||||
|  | ||||
| # Stage 2: Build webui | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS web-builder | ||||
|  | ||||
| ARG GIT_BUILD_HASH | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
| @ -13,7 +32,7 @@ RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | ||||
|     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ | ||||
|     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ | ||||
|     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ | ||||
|     --mount=type=cache,id=npm-ak,sharing=shared,target=/root/.npm \ | ||||
|     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ | ||||
|     npm ci --include=dev | ||||
|  | ||||
| COPY ./package.json /work | ||||
| @ -24,7 +43,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||
| RUN npm run build && \ | ||||
|     npm run build:sfe | ||||
|  | ||||
| # Stage 2: Build go proxy | ||||
| # Stage 3: Build go proxy | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | ||||
|  | ||||
| ARG TARGETOS | ||||
| @ -49,8 +68,8 @@ RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | ||||
| COPY ./cmd /go/src/goauthentik.io/cmd | ||||
| COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib | ||||
| COPY ./web/static.go /go/src/goauthentik.io/web/static.go | ||||
| COPY --from=node-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | ||||
| COPY --from=node-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt | ||||
| COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | ||||
| COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt | ||||
| COPY ./internal /go/src/goauthentik.io/internal | ||||
| COPY ./go.mod /go/src/goauthentik.io/go.mod | ||||
| COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||
| @ -61,7 +80,7 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | ||||
|     CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \ | ||||
|     go build -o /go/authentik ./cmd/server | ||||
|  | ||||
| # Stage 3: MaxMind GeoIP | ||||
| # Stage 4: MaxMind GeoIP | ||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip | ||||
|  | ||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||
| @ -74,10 +93,10 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||
|     mkdir -p /usr/share/GeoIP && \ | ||||
|     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|  | ||||
| # Stage 4: Download uv | ||||
| FROM ghcr.io/astral-sh/uv:0.7.12 AS uv | ||||
| # Stage 5: Base python image | ||||
| FROM ghcr.io/goauthentik/fips-python:3.13.4-slim-bookworm-fips AS python-base | ||||
| # Stage 5: Download uv | ||||
| FROM ghcr.io/astral-sh/uv:0.7.7 AS uv | ||||
| # Stage 6: Base python image | ||||
| FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base | ||||
|  | ||||
| ENV VENV_PATH="/ak-root/.venv" \ | ||||
|     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ | ||||
| @ -90,7 +109,7 @@ WORKDIR /ak-root/ | ||||
|  | ||||
| COPY --from=uv /uv /uvx /bin/ | ||||
|  | ||||
| # Stage 6: Python dependencies | ||||
| # Stage 7: Python dependencies | ||||
| FROM python-base AS python-deps | ||||
|  | ||||
| ARG TARGETARCH | ||||
| @ -125,7 +144,7 @@ RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \ | ||||
|     --mount=type=cache,target=/root/.cache/uv \ | ||||
|     uv sync --frozen --no-install-project --no-dev | ||||
|  | ||||
| # Stage 7: Run | ||||
| # Stage 8: Run | ||||
| FROM python-base AS final-image | ||||
|  | ||||
| ARG VERSION | ||||
| @ -168,8 +187,9 @@ COPY ./lifecycle/ /lifecycle | ||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | ||||
| COPY --from=go-builder /go/authentik /bin/authentik | ||||
| COPY --from=python-deps /ak-root/.venv /ak-root/.venv | ||||
| COPY --from=node-builder /work/web/dist/ /web/dist/ | ||||
| COPY --from=node-builder /work/web/authentik/ /web/authentik/ | ||||
| COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||
| COPY --from=web-builder /work/web/authentik/ /web/authentik/ | ||||
| COPY --from=website-builder /work/website/build/ /website/help/ | ||||
| COPY --from=geoip /usr/share/GeoIP /geoip | ||||
|  | ||||
| USER 1000 | ||||
|  | ||||
							
								
								
									
										2
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								Makefile
									
									
									
									
									
								
							| @ -1,6 +1,6 @@ | ||||
| .PHONY: gen dev-reset all clean test web website | ||||
|  | ||||
| SHELL := /usr/bin/env bash | ||||
| SHELL := /bin/bash | ||||
| .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail | ||||
| PWD = $(shell pwd) | ||||
| UID = $(shell id -u) | ||||
|  | ||||
| @ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | ||||
|  | ||||
| | Version   | Supported | | ||||
| | --------- | --------- | | ||||
| | 2025.2.x  | ✅        | | ||||
| | 2025.4.x  | ✅        | | ||||
| | 2025.6.x  | ✅        | | ||||
|  | ||||
| ## Reporting a Vulnerability | ||||
|  | ||||
|  | ||||
| @ -2,7 +2,7 @@ | ||||
|  | ||||
| from os import environ | ||||
|  | ||||
| __version__ = "2025.6.1" | ||||
| __version__ = "2025.4.1" | ||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||
|  | ||||
|  | ||||
|  | ||||
							
								
								
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,79 @@ | ||||
| """authentik administration metrics""" | ||||
|  | ||||
| from datetime import timedelta | ||||
|  | ||||
| from django.db.models.functions import ExtractHour | ||||
| from drf_spectacular.utils import extend_schema, extend_schema_field | ||||
| from guardian.shortcuts import get_objects_for_user | ||||
| from rest_framework.fields import IntegerField, SerializerMethodField | ||||
| from rest_framework.permissions import IsAuthenticated | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.events.models import EventAction | ||||
|  | ||||
|  | ||||
| class CoordinateSerializer(PassiveSerializer): | ||||
|     """Coordinates for diagrams""" | ||||
|  | ||||
|     x_cord = IntegerField(read_only=True) | ||||
|     y_cord = IntegerField(read_only=True) | ||||
|  | ||||
|  | ||||
| class LoginMetricsSerializer(PassiveSerializer): | ||||
|     """Login Metrics per 1h""" | ||||
|  | ||||
|     logins = SerializerMethodField() | ||||
|     logins_failed = SerializerMethodField() | ||||
|     authorizations = SerializerMethodField() | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_logins(self, _): | ||||
|         """Get successful logins per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         return ( | ||||
|             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.LOGIN | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_logins_failed(self, _): | ||||
|         """Get failed logins per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         return ( | ||||
|             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.LOGIN_FAILED | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_authorizations(self, _): | ||||
|         """Get successful authorizations per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         return ( | ||||
|             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.AUTHORIZE_APPLICATION | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class AdministrationMetricsViewSet(APIView): | ||||
|     """Login Metrics per 1h""" | ||||
|  | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     @extend_schema(responses={200: LoginMetricsSerializer(many=False)}) | ||||
|     def get(self, request: Request) -> Response: | ||||
|         """Login Metrics per 1h""" | ||||
|         serializer = LoginMetricsSerializer(True) | ||||
|         serializer.context["user"] = request.user | ||||
|         return Response(serializer.data) | ||||
| @ -1,7 +1,6 @@ | ||||
| """authentik administration overview""" | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django_tenants.utils import get_public_schema_name | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from packaging.version import parse | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| @ -14,7 +13,6 @@ from authentik import __version__, get_build_hash | ||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.outposts.models import Outpost | ||||
| from authentik.tenants.utils import get_current_tenant | ||||
|  | ||||
|  | ||||
| class VersionSerializer(PassiveSerializer): | ||||
| @ -37,8 +35,6 @@ class VersionSerializer(PassiveSerializer): | ||||
|  | ||||
|     def get_version_latest(self, _) -> str: | ||||
|         """Get latest version from cache""" | ||||
|         if get_current_tenant().schema_name == get_public_schema_name(): | ||||
|             return __version__ | ||||
|         version_in_cache = cache.get(VERSION_CACHE_KEY) | ||||
|         if not version_in_cache:  # pragma: no cover | ||||
|             update_latest_version.delay() | ||||
|  | ||||
| @ -14,19 +14,3 @@ class AuthentikAdminConfig(ManagedAppConfig): | ||||
|     label = "authentik_admin" | ||||
|     verbose_name = "authentik Admin" | ||||
|     default = True | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_global | ||||
|     def clear_update_notifications(self): | ||||
|         """Clear update notifications on startup if the notification was for the version | ||||
|         we're running now.""" | ||||
|         from packaging.version import parse | ||||
|  | ||||
|         from authentik.admin.tasks import LOCAL_VERSION | ||||
|         from authentik.events.models import EventAction, Notification | ||||
|  | ||||
|         for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): | ||||
|             if "new_version" not in notification.event.context: | ||||
|                 continue | ||||
|             notification_version = notification.event.context["new_version"] | ||||
|             if LOCAL_VERSION >= parse(notification_version): | ||||
|                 notification.delete() | ||||
|  | ||||
| @ -1,7 +1,6 @@ | ||||
| """authentik admin settings""" | ||||
|  | ||||
| from celery.schedules import crontab | ||||
| from django_tenants.utils import get_public_schema_name | ||||
|  | ||||
| from authentik.lib.utils.time import fqdn_rand | ||||
|  | ||||
| @ -9,7 +8,6 @@ CELERY_BEAT_SCHEDULE = { | ||||
|     "admin_latest_version": { | ||||
|         "task": "authentik.admin.tasks.update_latest_version", | ||||
|         "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"), | ||||
|         "tenant_schemas": [get_public_schema_name()], | ||||
|         "options": {"queue": "authentik_scheduled"}, | ||||
|     } | ||||
| } | ||||
|  | ||||
| @ -1,6 +1,7 @@ | ||||
| """authentik admin tasks""" | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.db import DatabaseError, InternalError, ProgrammingError | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from packaging.version import parse | ||||
| from requests import RequestException | ||||
| @ -8,7 +9,7 @@ from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik import __version__, get_build_hash | ||||
| from authentik.admin.apps import PROM_INFO | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.events.models import Event, EventAction, Notification | ||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.utils.http import get_http_session | ||||
| @ -32,6 +33,20 @@ def _set_prom_info(): | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task( | ||||
|     throws=(DatabaseError, ProgrammingError, InternalError), | ||||
| ) | ||||
| def clear_update_notifications(): | ||||
|     """Clear update notifications on startup if the notification was for the version | ||||
|     we're running now.""" | ||||
|     for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): | ||||
|         if "new_version" not in notification.event.context: | ||||
|             continue | ||||
|         notification_version = notification.event.context["new_version"] | ||||
|         if LOCAL_VERSION >= parse(notification_version): | ||||
|             notification.delete() | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task(bind=True, base=SystemTask) | ||||
| @prefill_task | ||||
| def update_latest_version(self: SystemTask): | ||||
|  | ||||
| @ -36,6 +36,11 @@ class TestAdminAPI(TestCase): | ||||
|         body = loads(response.content) | ||||
|         self.assertEqual(len(body), 0) | ||||
|  | ||||
|     def test_metrics(self): | ||||
|         """Test metrics API""" | ||||
|         response = self.client.get(reverse("authentik_api:admin_metrics")) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_apps(self): | ||||
|         """Test apps API""" | ||||
|         response = self.client.get(reverse("authentik_api:apps-list")) | ||||
|  | ||||
| @ -1,12 +1,12 @@ | ||||
| """test admin tasks""" | ||||
|  | ||||
| from django.apps import apps | ||||
| from django.core.cache import cache | ||||
| from django.test import TestCase | ||||
| from requests_mock import Mocker | ||||
|  | ||||
| from authentik.admin.tasks import ( | ||||
|     VERSION_CACHE_KEY, | ||||
|     clear_update_notifications, | ||||
|     update_latest_version, | ||||
| ) | ||||
| from authentik.events.models import Event, EventAction | ||||
| @ -72,13 +72,12 @@ class TestAdminTasks(TestCase): | ||||
|  | ||||
|     def test_clear_update_notifications(self): | ||||
|         """Test clear of previous notification""" | ||||
|         admin_config = apps.get_app_config("authentik_admin") | ||||
|         Event.objects.create( | ||||
|             action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"} | ||||
|         ) | ||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) | ||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) | ||||
|         admin_config.clear_update_notifications() | ||||
|         clear_update_notifications() | ||||
|         self.assertFalse( | ||||
|             Event.objects.filter( | ||||
|                 action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" | ||||
|  | ||||
| @ -3,6 +3,7 @@ | ||||
| from django.urls import path | ||||
|  | ||||
| from authentik.admin.api.meta import AppsViewSet, ModelViewSet | ||||
| from authentik.admin.api.metrics import AdministrationMetricsViewSet | ||||
| from authentik.admin.api.system import SystemView | ||||
| from authentik.admin.api.version import VersionView | ||||
| from authentik.admin.api.version_history import VersionHistoryViewSet | ||||
| @ -11,6 +12,11 @@ from authentik.admin.api.workers import WorkerView | ||||
| api_urlpatterns = [ | ||||
|     ("admin/apps", AppsViewSet, "apps"), | ||||
|     ("admin/models", ModelViewSet, "models"), | ||||
|     path( | ||||
|         "admin/metrics/", | ||||
|         AdministrationMetricsViewSet.as_view(), | ||||
|         name="admin_metrics", | ||||
|     ), | ||||
|     path("admin/version/", VersionView.as_view(), name="admin_version"), | ||||
|     ("admin/version/history", VersionHistoryViewSet, "version_history"), | ||||
|     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), | ||||
|  | ||||
| @ -1,13 +1,12 @@ | ||||
| """authentik API AppConfig""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikAPIConfig(ManagedAppConfig): | ||||
| class AuthentikAPIConfig(AppConfig): | ||||
|     """authentik API Config""" | ||||
|  | ||||
|     name = "authentik.api" | ||||
|     label = "authentik_api" | ||||
|     mountpoint = "api/" | ||||
|     verbose_name = "authentik API" | ||||
|     default = True | ||||
|  | ||||
| @ -1,14 +0,0 @@ | ||||
| from django.test import TestCase | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
| from authentik.lib.utils.reflection import get_apps | ||||
|  | ||||
|  | ||||
| class TestManagedAppConfig(TestCase): | ||||
|     def test_apps_use_managed_app_config(self): | ||||
|         for app in get_apps(): | ||||
|             if app.name.startswith("authentik.enterprise"): | ||||
|                 self.assertIn(EnterpriseConfig, app.__class__.__bases__) | ||||
|             else: | ||||
|                 self.assertIn(ManagedAppConfig, app.__class__.__bases__) | ||||
| @ -1,9 +1,9 @@ | ||||
| """authentik brands app""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikBrandsConfig(ManagedAppConfig): | ||||
| class AuthentikBrandsConfig(AppConfig): | ||||
|     """authentik Brand app""" | ||||
|  | ||||
|     name = "authentik.brands" | ||||
| @ -12,4 +12,3 @@ class AuthentikBrandsConfig(ManagedAppConfig): | ||||
|     mountpoints = { | ||||
|         "authentik.brands.urls_root": "", | ||||
|     } | ||||
|     default = True | ||||
|  | ||||
| @ -148,14 +148,3 @@ class TestBrands(APITestCase): | ||||
|                 "default_locale": "", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_custom_css(self): | ||||
|         """Test custom_css""" | ||||
|         brand = create_test_brand() | ||||
|         brand.branding_custom_css = """* { | ||||
|             font-family: "Foo bar"; | ||||
|         }""" | ||||
|         brand.save() | ||||
|         res = self.client.get(reverse("authentik_core:if-user")) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         self.assertIn(brand.branding_custom_css, res.content.decode()) | ||||
|  | ||||
| @ -5,8 +5,6 @@ from typing import Any | ||||
| from django.db.models import F, Q | ||||
| from django.db.models import Value as V | ||||
| from django.http.request import HttpRequest | ||||
| from django.utils.html import _json_script_escapes | ||||
| from django.utils.safestring import mark_safe | ||||
|  | ||||
| from authentik import get_full_version | ||||
| from authentik.brands.models import Brand | ||||
| @ -34,13 +32,8 @@ def context_processor(request: HttpRequest) -> dict[str, Any]: | ||||
|     """Context Processor that injects brand object into every template""" | ||||
|     brand = getattr(request, "brand", DEFAULT_BRAND) | ||||
|     tenant = getattr(request, "tenant", Tenant()) | ||||
|     # similarly to `json_script` we escape everything HTML-related, however django | ||||
|     # only directly exposes this as a function that also wraps it in a <script> tag | ||||
|     # which we dont want for CSS | ||||
|     brand_css = mark_safe(str(brand.branding_custom_css).translate(_json_script_escapes))  # nosec | ||||
|     return { | ||||
|         "brand": brand, | ||||
|         "brand_css": brand_css, | ||||
|         "footer_links": tenant.footer_links, | ||||
|         "html_meta": {**get_http_meta()}, | ||||
|         "version": get_full_version(), | ||||
|  | ||||
| @ -2,9 +2,11 @@ | ||||
|  | ||||
| from collections.abc import Iterator | ||||
| from copy import copy | ||||
| from datetime import timedelta | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.db.models import QuerySet | ||||
| from django.db.models.functions import ExtractHour | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | ||||
| @ -18,6 +20,7 @@ from rest_framework.response import Response | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.admin.api.metrics import CoordinateSerializer | ||||
| from authentik.api.pagination import Pagination | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.core.api.providers import ProviderSerializer | ||||
| @ -25,6 +28,7 @@ from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.core.models import Application, User | ||||
| from authentik.events.logs import LogEventSerializer, capture_logs | ||||
| from authentik.events.models import EventAction | ||||
| from authentik.lib.utils.file import ( | ||||
|     FilePathSerializer, | ||||
|     FileUploadSerializer, | ||||
| @ -317,3 +321,18 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|         """Set application icon (as URL)""" | ||||
|         app: Application = self.get_object() | ||||
|         return set_file_url(request, app, "meta_icon") | ||||
|  | ||||
|     @permission_required("authentik_core.view_application", ["authentik_events.view_event"]) | ||||
|     @extend_schema(responses={200: CoordinateSerializer(many=True)}) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     def metrics(self, request: Request, slug: str): | ||||
|         """Metrics for application logins""" | ||||
|         app = self.get_object() | ||||
|         return Response( | ||||
|             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.AUTHORIZE_APPLICATION, | ||||
|                 context__authorized_application__pk=app.pk.hex, | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
| @ -6,6 +6,7 @@ from typing import Any | ||||
|  | ||||
| from django.contrib.auth import update_session_auth_hash | ||||
| from django.contrib.auth.models import Permission | ||||
| from django.db.models.functions import ExtractHour | ||||
| from django.db.transaction import atomic | ||||
| from django.db.utils import IntegrityError | ||||
| from django.urls import reverse_lazy | ||||
| @ -51,6 +52,7 @@ from rest_framework.validators import UniqueValidator | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.admin.api.metrics import CoordinateSerializer | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| @ -82,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN | ||||
| from authentik.lib.avatars import get_avatar | ||||
| from authentik.rbac.decorators import permission_required | ||||
| from authentik.rbac.models import get_permission_choices | ||||
| from authentik.stages.email.flow import pickle_flow_token_for_email | ||||
| from authentik.stages.email.models import EmailStage | ||||
| from authentik.stages.email.tasks import send_mails | ||||
| from authentik.stages.email.utils import TemplateEmailMessage | ||||
| @ -315,6 +316,53 @@ class SessionUserSerializer(PassiveSerializer): | ||||
|     original = UserSelfSerializer(required=False) | ||||
|  | ||||
|  | ||||
| class UserMetricsSerializer(PassiveSerializer): | ||||
|     """User Metrics""" | ||||
|  | ||||
|     logins = SerializerMethodField() | ||||
|     logins_failed = SerializerMethodField() | ||||
|     authorizations = SerializerMethodField() | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_logins(self, _): | ||||
|         """Get successful logins per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         request = self.context["request"] | ||||
|         return ( | ||||
|             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.LOGIN, user__pk=user.pk | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_logins_failed(self, _): | ||||
|         """Get failed logins per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         request = self.context["request"] | ||||
|         return ( | ||||
|             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.LOGIN_FAILED, context__username=user.username | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_authorizations(self, _): | ||||
|         """Get failed logins per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         request = self.context["request"] | ||||
|         return ( | ||||
|             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class UsersFilter(FilterSet): | ||||
|     """Filter for users""" | ||||
|  | ||||
| @ -403,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|     def list(self, request, *args, **kwargs): | ||||
|         return super().list(request, *args, **kwargs) | ||||
|  | ||||
|     def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: | ||||
|     def _create_recovery_link(self) -> tuple[str, Token]: | ||||
|         """Create a recovery link (when the current brand has a recovery flow set), | ||||
|         that can either be shown to an admin or sent to the user directly""" | ||||
|         brand: Brand = self.request._request.brand | ||||
| @ -425,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|             raise ValidationError( | ||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||
|             ) from None | ||||
|         _plan = FlowToken.pickle(plan) | ||||
|         if for_email: | ||||
|             _plan = pickle_flow_token_for_email(plan) | ||||
|         token, __ = FlowToken.objects.update_or_create( | ||||
|             identifier=f"{user.uid}-password-reset", | ||||
|             defaults={ | ||||
|                 "user": user, | ||||
|                 "flow": flow, | ||||
|                 "_plan": _plan, | ||||
|                 "revoke_on_execution": not for_email, | ||||
|                 "_plan": FlowToken.pickle(plan), | ||||
|             }, | ||||
|         ) | ||||
|         querystring = urlencode({QS_KEY_TOKEN: token.key}) | ||||
| @ -558,6 +602,17 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|             update_session_auth_hash(self.request, user) | ||||
|         return Response(status=204) | ||||
|  | ||||
|     @permission_required("authentik_core.view_user", ["authentik_events.view_event"]) | ||||
|     @extend_schema(responses={200: UserMetricsSerializer(many=False)}) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     def metrics(self, request: Request, pk: int) -> Response: | ||||
|         """User metrics per 1h""" | ||||
|         user: User = self.get_object() | ||||
|         serializer = UserMetricsSerializer(instance={}) | ||||
|         serializer.context["user"] = user | ||||
|         serializer.context["request"] = request | ||||
|         return Response(serializer.data) | ||||
|  | ||||
|     @permission_required("authentik_core.reset_user_password") | ||||
|     @extend_schema( | ||||
|         responses={ | ||||
| @ -593,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|         if for_user.email == "": | ||||
|             LOGGER.debug("User doesn't have an email address") | ||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||
|         link, token = self._create_recovery_link(for_email=True) | ||||
|         link, token = self._create_recovery_link() | ||||
|         # Lookup the email stage to assure the current user can access it | ||||
|         stages = get_objects_for_user( | ||||
|             request.user, "authentik_stages_email.view_emailstage" | ||||
|  | ||||
| @ -79,7 +79,6 @@ def _migrate_session( | ||||
|         AuthenticatedSession.objects.using(db_alias).create( | ||||
|             session=session, | ||||
|             user=old_auth_session.user, | ||||
|             uuid=old_auth_session.uuid, | ||||
|         ) | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -1,81 +1,10 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-14 11:15 | ||||
|  | ||||
| from django.apps.registry import Apps, apps as global_apps | ||||
| from django.apps.registry import Apps | ||||
| from django.db import migrations | ||||
| from django.contrib.contenttypes.management import create_contenttypes | ||||
| from django.contrib.auth.management import create_permissions | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
|  | ||||
|  | ||||
| def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession""" | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
|     # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the | ||||
|     # real config for creating permissions and content types | ||||
|     authentik_core_config = global_apps.get_app_config("authentik_core") | ||||
|     # These are only ran by django after all migrations, but we need them right now. | ||||
|     # `global_apps` is needed, | ||||
|     create_permissions(authentik_core_config, using=db_alias, verbosity=1) | ||||
|     create_contenttypes(authentik_core_config, using=db_alias, verbosity=1) | ||||
|  | ||||
|     # But from now on, this is just a regular migration, so use `apps` | ||||
|     Permission = apps.get_model("auth", "Permission") | ||||
|     ContentType = apps.get_model("contenttypes", "ContentType") | ||||
|  | ||||
|     try: | ||||
|         old_ct = ContentType.objects.using(db_alias).get( | ||||
|             app_label="authentik_core", model="oldauthenticatedsession" | ||||
|         ) | ||||
|         new_ct = ContentType.objects.using(db_alias).get( | ||||
|             app_label="authentik_core", model="authenticatedsession" | ||||
|         ) | ||||
|     except ContentType.DoesNotExist: | ||||
|         # This should exist at this point, but if not, let's cut our losses | ||||
|         return | ||||
|  | ||||
|     # Get all permissions for the old content type | ||||
|     old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct) | ||||
|  | ||||
|     # Create equivalent permissions for the new content type | ||||
|     for old_perm in old_perms: | ||||
|         new_perm = ( | ||||
|             Permission.objects.using(db_alias) | ||||
|             .filter( | ||||
|                 content_type=new_ct, | ||||
|                 codename=old_perm.codename, | ||||
|             ) | ||||
|             .first() | ||||
|         ) | ||||
|         if not new_perm: | ||||
|             # This should exist at this point, but if not, let's cut our losses | ||||
|             continue | ||||
|  | ||||
|         # Global user permissions | ||||
|         User = apps.get_model("authentik_core", "User") | ||||
|         User.user_permissions.through.objects.using(db_alias).filter( | ||||
|             permission=old_perm | ||||
|         ).all().update(permission=new_perm) | ||||
|  | ||||
|         # Global role permissions | ||||
|         DjangoGroup = apps.get_model("auth", "Group") | ||||
|         DjangoGroup.permissions.through.objects.using(db_alias).filter( | ||||
|             permission=old_perm | ||||
|         ).all().update(permission=new_perm) | ||||
|  | ||||
|         # Object user permissions | ||||
|         UserObjectPermission = apps.get_model("guardian", "UserObjectPermission") | ||||
|         UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( | ||||
|             permission=new_perm, content_type=new_ct | ||||
|         ) | ||||
|  | ||||
|         # Object role permissions | ||||
|         GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission") | ||||
|         GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( | ||||
|             permission=new_perm, content_type=new_ct | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def remove_old_authenticated_session_content_type( | ||||
|     apps: Apps, schema_editor: BaseDatabaseSchemaEditor | ||||
| ): | ||||
| @ -92,12 +21,7 @@ class Migration(migrations.Migration): | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython( | ||||
|             code=migrate_authenticated_session_permissions, | ||||
|             reverse_code=migrations.RunPython.noop, | ||||
|         ), | ||||
|         migrations.RunPython( | ||||
|             code=remove_old_authenticated_session_content_type, | ||||
|             reverse_code=migrations.RunPython.noop, | ||||
|         ), | ||||
|     ] | ||||
|  | ||||
| @ -16,7 +16,7 @@ | ||||
|         {% block head_before %} | ||||
|         {% endblock %} | ||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> | ||||
|         <style>{{ brand_css }}</style> | ||||
|         <style>{{ brand.branding_custom_css }}</style> | ||||
|         <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> | ||||
|         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> | ||||
|         {% block head %} | ||||
|  | ||||
| @ -10,7 +10,7 @@ | ||||
| {% endblock %} | ||||
|  | ||||
| {% block body %} | ||||
| <ak-message-container alignment="bottom"></ak-message-container> | ||||
| <ak-message-container></ak-message-container> | ||||
| <ak-interface-admin> | ||||
|     <ak-loading></ak-loading> | ||||
| </ak-interface-admin> | ||||
|  | ||||
| @ -81,6 +81,22 @@ class TestUsersAPI(APITestCase): | ||||
|         response = self.client.get(reverse("authentik_api:user-list"), {"include_groups": "true"}) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_metrics(self): | ||||
|         """Test user's metrics""" | ||||
|         self.client.force_login(self.admin) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_metrics_denied(self): | ||||
|         """Test user's metrics (non-superuser)""" | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 403) | ||||
|  | ||||
|     def test_recovery_no_flow(self): | ||||
|         """Test user recovery link (no recovery flow set)""" | ||||
|         self.client.force_login(self.admin) | ||||
|  | ||||
| @ -1,36 +1,28 @@ | ||||
| """Events API Views""" | ||||
|  | ||||
| from datetime import timedelta | ||||
| from json import loads | ||||
|  | ||||
| import django_filters | ||||
| from django.db.models import Count, ExpressionWrapper, F, QuerySet | ||||
| from django.db.models import DateTimeField as DjangoDateTimeField | ||||
| from django.db.models.aggregates import Count | ||||
| from django.db.models.fields.json import KeyTextTransform, KeyTransform | ||||
| from django.db.models.functions import TruncHour | ||||
| from django.db.models.functions import ExtractDay, ExtractHour | ||||
| from django.db.models.query_utils import Q | ||||
| from django.utils.timezone import now | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import OpenApiParameter, extend_schema | ||||
| from guardian.shortcuts import get_objects_for_user | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.fields import ChoiceField, DateTimeField, DictField, IntegerField | ||||
| from rest_framework.fields import DictField, IntegerField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.admin.api.metrics import CoordinateSerializer | ||||
| from authentik.core.api.object_types import TypeCreateSerializer | ||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||
| from authentik.events.models import Event, EventAction | ||||
|  | ||||
|  | ||||
| class EventVolumeSerializer(PassiveSerializer): | ||||
|     """Count of events of action created on day""" | ||||
|  | ||||
|     action = ChoiceField(choices=EventAction.choices) | ||||
|     time = DateTimeField() | ||||
|     count = IntegerField() | ||||
|  | ||||
|  | ||||
| class EventSerializer(ModelSerializer): | ||||
|     """Event Serializer""" | ||||
|  | ||||
| @ -61,7 +53,7 @@ class EventsFilter(django_filters.FilterSet): | ||||
|     """Filter for events""" | ||||
|  | ||||
|     username = django_filters.CharFilter( | ||||
|         field_name="user", label="Username", method="filter_username" | ||||
|         field_name="user", lookup_expr="username", label="Username" | ||||
|     ) | ||||
|     context_model_pk = django_filters.CharFilter( | ||||
|         field_name="context", | ||||
| @ -86,19 +78,12 @@ class EventsFilter(django_filters.FilterSet): | ||||
|         field_name="action", | ||||
|         lookup_expr="icontains", | ||||
|     ) | ||||
|     actions = django_filters.MultipleChoiceFilter( | ||||
|         field_name="action", | ||||
|         choices=EventAction.choices, | ||||
|     ) | ||||
|     brand_name = django_filters.CharFilter( | ||||
|         field_name="brand", | ||||
|         lookup_expr="name", | ||||
|         label="Brand name", | ||||
|     ) | ||||
|  | ||||
|     def filter_username(self, queryset, name, value): | ||||
|         return queryset.filter(Q(user__username=value) | Q(context__username=value)) | ||||
|  | ||||
|     def filter_context_model_pk(self, queryset, name, value): | ||||
|         """Because we store the PK as UUID.hex, | ||||
|         we need to remove the dashes that a client may send. We can't use a | ||||
| @ -171,37 +156,45 @@ class EventViewSet(ModelViewSet): | ||||
|         return Response(EventTopPerUserSerializer(instance=events, many=True).data) | ||||
|  | ||||
|     @extend_schema( | ||||
|         responses={200: EventVolumeSerializer(many=True)}, | ||||
|         parameters=[ | ||||
|             OpenApiParameter( | ||||
|                 "history_days", | ||||
|                 type=OpenApiTypes.NUMBER, | ||||
|                 location=OpenApiParameter.QUERY, | ||||
|                 required=False, | ||||
|                 default=7, | ||||
|             ), | ||||
|         ], | ||||
|         responses={200: CoordinateSerializer(many=True)}, | ||||
|     ) | ||||
|     @action(detail=False, methods=["GET"], pagination_class=None) | ||||
|     def volume(self, request: Request) -> Response: | ||||
|         """Get event volume for specified filters and timeframe""" | ||||
|         queryset: QuerySet[Event] = self.filter_queryset(self.get_queryset()) | ||||
|         delta = timedelta(days=7) | ||||
|         time_delta = request.query_params.get("history_days", 7) | ||||
|         if time_delta: | ||||
|             delta = timedelta(days=min(int(time_delta), 60)) | ||||
|         queryset = self.filter_queryset(self.get_queryset()) | ||||
|         return Response(queryset.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)) | ||||
|  | ||||
|     @extend_schema( | ||||
|         responses={200: CoordinateSerializer(many=True)}, | ||||
|         filters=[], | ||||
|         parameters=[ | ||||
|             OpenApiParameter( | ||||
|                 "action", | ||||
|                 type=OpenApiTypes.STR, | ||||
|                 location=OpenApiParameter.QUERY, | ||||
|                 required=False, | ||||
|             ), | ||||
|             OpenApiParameter( | ||||
|                 "query", | ||||
|                 type=OpenApiTypes.STR, | ||||
|                 location=OpenApiParameter.QUERY, | ||||
|                 required=False, | ||||
|             ), | ||||
|         ], | ||||
|     ) | ||||
|     @action(detail=False, methods=["GET"], pagination_class=None) | ||||
|     def per_month(self, request: Request): | ||||
|         """Get the count of events per month""" | ||||
|         filtered_action = request.query_params.get("action", EventAction.LOGIN) | ||||
|         try: | ||||
|             query = loads(request.query_params.get("query", "{}")) | ||||
|         except ValueError: | ||||
|             return Response(status=400) | ||||
|         return Response( | ||||
|             queryset.filter(created__gte=now() - delta) | ||||
|             .annotate(hour=TruncHour("created")) | ||||
|             .annotate( | ||||
|                 time=ExpressionWrapper( | ||||
|                     F("hour") - (F("hour__hour") % 6) * timedelta(hours=1), | ||||
|                     output_field=DjangoDateTimeField(), | ||||
|                 ) | ||||
|             ) | ||||
|             .values("time", "action") | ||||
|             .annotate(count=Count("pk")) | ||||
|             .order_by("time", "action") | ||||
|             get_objects_for_user(request.user, "authentik_events.view_event") | ||||
|             .filter(action=filtered_action) | ||||
|             .filter(**query) | ||||
|             .get_events_per(timedelta(weeks=4), ExtractDay, 30) | ||||
|         ) | ||||
|  | ||||
|     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||
|  | ||||
| @ -1,5 +1,7 @@ | ||||
| """authentik events models""" | ||||
|  | ||||
| import time | ||||
| from collections import Counter | ||||
| from datetime import timedelta | ||||
| from difflib import get_close_matches | ||||
| from functools import lru_cache | ||||
| @ -9,6 +11,11 @@ from uuid import uuid4 | ||||
|  | ||||
| from django.apps import apps | ||||
| from django.db import connection, models | ||||
| from django.db.models import Count, ExpressionWrapper, F | ||||
| from django.db.models.fields import DurationField | ||||
| from django.db.models.functions import Extract | ||||
| from django.db.models.manager import Manager | ||||
| from django.db.models.query import QuerySet | ||||
| from django.http import HttpRequest | ||||
| from django.http.request import QueryDict | ||||
| from django.utils.timezone import now | ||||
| @ -117,6 +124,60 @@ class EventAction(models.TextChoices): | ||||
|     CUSTOM_PREFIX = "custom_" | ||||
|  | ||||
|  | ||||
| class EventQuerySet(QuerySet): | ||||
|     """Custom events query set with helper functions""" | ||||
|  | ||||
|     def get_events_per( | ||||
|         self, | ||||
|         time_since: timedelta, | ||||
|         extract: Extract, | ||||
|         data_points: int, | ||||
|     ) -> list[dict[str, int]]: | ||||
|         """Get event count by hour in the last day, fill with zeros""" | ||||
|         _now = now() | ||||
|         max_since = timedelta(days=60) | ||||
|         # Allow maximum of 60 days to limit load | ||||
|         if time_since.total_seconds() > max_since.total_seconds(): | ||||
|             time_since = max_since | ||||
|         date_from = _now - time_since | ||||
|         result = ( | ||||
|             self.filter(created__gte=date_from) | ||||
|             .annotate(age=ExpressionWrapper(_now - F("created"), output_field=DurationField())) | ||||
|             .annotate(age_interval=extract("age")) | ||||
|             .values("age_interval") | ||||
|             .annotate(count=Count("pk")) | ||||
|             .order_by("age_interval") | ||||
|         ) | ||||
|         data = Counter({int(d["age_interval"]): d["count"] for d in result}) | ||||
|         results = [] | ||||
|         interval_delta = time_since / data_points | ||||
|         for interval in range(1, -data_points, -1): | ||||
|             results.append( | ||||
|                 { | ||||
|                     "x_cord": time.mktime((_now + (interval_delta * interval)).timetuple()) * 1000, | ||||
|                     "y_cord": data[interval * -1], | ||||
|                 } | ||||
|             ) | ||||
|         return results | ||||
|  | ||||
|  | ||||
| class EventManager(Manager): | ||||
|     """Custom helper methods for Events""" | ||||
|  | ||||
|     def get_queryset(self) -> QuerySet: | ||||
|         """use custom queryset""" | ||||
|         return EventQuerySet(self.model, using=self._db) | ||||
|  | ||||
|     def get_events_per( | ||||
|         self, | ||||
|         time_since: timedelta, | ||||
|         extract: Extract, | ||||
|         data_points: int, | ||||
|     ) -> list[dict[str, int]]: | ||||
|         """Wrap method from queryset""" | ||||
|         return self.get_queryset().get_events_per(time_since, extract, data_points) | ||||
|  | ||||
|  | ||||
| class Event(SerializerModel, ExpiringModel): | ||||
|     """An individual Audit/Metrics/Notification/Error Event""" | ||||
|  | ||||
| @ -132,6 +193,8 @@ class Event(SerializerModel, ExpiringModel): | ||||
|     # Shadow the expires attribute from ExpiringModel to override the default duration | ||||
|     expires = models.DateTimeField(default=default_event_duration) | ||||
|  | ||||
|     objects = EventManager() | ||||
|  | ||||
|     @staticmethod | ||||
|     def _get_app_from_request(request: HttpRequest) -> str: | ||||
|         if not isinstance(request, HttpRequest): | ||||
|  | ||||
| @ -1,18 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-27 12:52 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_flows", "0027_auto_20231028_1424"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="flowtoken", | ||||
|             name="revoke_on_execution", | ||||
|             field=models.BooleanField(default=True), | ||||
|         ), | ||||
|     ] | ||||
| @ -303,10 +303,9 @@ class FlowToken(Token): | ||||
|  | ||||
|     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) | ||||
|     _plan = models.TextField() | ||||
|     revoke_on_execution = models.BooleanField(default=True) | ||||
|  | ||||
|     @staticmethod | ||||
|     def pickle(plan: "FlowPlan") -> str: | ||||
|     def pickle(plan) -> str: | ||||
|         """Pickle into string""" | ||||
|         data = dumps(plan) | ||||
|         return b64encode(data).decode() | ||||
|  | ||||
| @ -99,10 +99,9 @@ class ChallengeStageView(StageView): | ||||
|             self.logger.debug("Got StageInvalidException", exc=exc) | ||||
|             return self.executor.stage_invalid() | ||||
|         if not challenge.is_valid(): | ||||
|             self.logger.error( | ||||
|             self.logger.warning( | ||||
|                 "f(ch): Invalid challenge", | ||||
|                 errors=challenge.errors, | ||||
|                 challenge=challenge.data, | ||||
|             ) | ||||
|         return HttpChallengeResponse(challenge) | ||||
|  | ||||
|  | ||||
| @ -146,8 +146,7 @@ class FlowExecutorView(APIView): | ||||
|         except (AttributeError, EOFError, ImportError, IndexError) as exc: | ||||
|             LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) | ||||
|         finally: | ||||
|             if token.revoke_on_execution: | ||||
|                 token.delete() | ||||
|             token.delete() | ||||
|         if not isinstance(plan, FlowPlan): | ||||
|             return None | ||||
|         plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||
|  | ||||
| @ -81,6 +81,7 @@ debugger: false | ||||
|  | ||||
| log_level: info | ||||
|  | ||||
| session_storage: cache | ||||
| sessions: | ||||
|   unauthenticated_age: days=1 | ||||
|  | ||||
|  | ||||
| @ -1,7 +1,6 @@ | ||||
| from collections.abc import Callable | ||||
| from dataclasses import asdict | ||||
|  | ||||
| from celery import group | ||||
| from celery.exceptions import Retry | ||||
| from celery.result import allow_join_result | ||||
| from django.core.paginator import Paginator | ||||
| @ -83,41 +82,21 @@ class SyncTasks: | ||||
|                 self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) | ||||
|                 return | ||||
|             try: | ||||
|                 messages.append(_("Syncing users")) | ||||
|                 user_results = ( | ||||
|                     group( | ||||
|                         [ | ||||
|                             sync_objects.signature( | ||||
|                                 args=(class_to_path(User), page, provider_pk), | ||||
|                                 time_limit=PAGE_TIMEOUT, | ||||
|                                 soft_time_limit=PAGE_TIMEOUT, | ||||
|                             ) | ||||
|                             for page in users_paginator.page_range | ||||
|                         ] | ||||
|                     ) | ||||
|                     .apply_async() | ||||
|                     .get() | ||||
|                 ) | ||||
|                 for result in user_results: | ||||
|                     for msg in result: | ||||
|                 for page in users_paginator.page_range: | ||||
|                     messages.append(_("Syncing page {page} of users".format(page=page))) | ||||
|                     for msg in sync_objects.apply_async( | ||||
|                         args=(class_to_path(User), page, provider_pk), | ||||
|                         time_limit=PAGE_TIMEOUT, | ||||
|                         soft_time_limit=PAGE_TIMEOUT, | ||||
|                     ).get(): | ||||
|                         messages.append(LogEvent(**msg)) | ||||
|                 messages.append(_("Syncing groups")) | ||||
|                 group_results = ( | ||||
|                     group( | ||||
|                         [ | ||||
|                             sync_objects.signature( | ||||
|                                 args=(class_to_path(Group), page, provider_pk), | ||||
|                                 time_limit=PAGE_TIMEOUT, | ||||
|                                 soft_time_limit=PAGE_TIMEOUT, | ||||
|                             ) | ||||
|                             for page in groups_paginator.page_range | ||||
|                         ] | ||||
|                     ) | ||||
|                     .apply_async() | ||||
|                     .get() | ||||
|                 ) | ||||
|                 for result in group_results: | ||||
|                     for msg in result: | ||||
|                 for page in groups_paginator.page_range: | ||||
|                     messages.append(_("Syncing page {page} of groups".format(page=page))) | ||||
|                     for msg in sync_objects.apply_async( | ||||
|                         args=(class_to_path(Group), page, provider_pk), | ||||
|                         time_limit=PAGE_TIMEOUT, | ||||
|                         soft_time_limit=PAGE_TIMEOUT, | ||||
|                     ).get(): | ||||
|                         messages.append(LogEvent(**msg)) | ||||
|             except TransientSyncException as exc: | ||||
|                 self.logger.warning("transient sync exception", exc=exc) | ||||
| @ -130,7 +109,7 @@ class SyncTasks: | ||||
|     def sync_objects( | ||||
|         self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter | ||||
|     ): | ||||
|         _object_type: type[Model] = path_to_class(object_type) | ||||
|         _object_type = path_to_class(object_type) | ||||
|         self.logger = get_logger().bind( | ||||
|             provider_type=class_to_path(self._provider_model), | ||||
|             provider_pk=provider_pk, | ||||
| @ -153,19 +132,6 @@ class SyncTasks: | ||||
|             self.logger.debug("starting discover") | ||||
|             client.discover() | ||||
|         self.logger.debug("starting sync for page", page=page) | ||||
|         messages.append( | ||||
|             asdict( | ||||
|                 LogEvent( | ||||
|                     _( | ||||
|                         "Syncing page {page} of {object_type}".format( | ||||
|                             page=page, object_type=_object_type._meta.verbose_name_plural | ||||
|                         ) | ||||
|                     ), | ||||
|                     log_level="info", | ||||
|                     logger=f"{provider._meta.verbose_name}@{object_type}", | ||||
|                 ) | ||||
|             ) | ||||
|         ) | ||||
|         for obj in paginator.page(page).object_list: | ||||
|             obj: Model | ||||
|             try: | ||||
|  | ||||
| @ -37,9 +37,6 @@ class WebsocketMessageInstruction(IntEnum): | ||||
|     # Provider specific message | ||||
|     PROVIDER_SPECIFIC = 3 | ||||
|  | ||||
|     # Session ended | ||||
|     SESSION_END = 4 | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class WebsocketMessage: | ||||
| @ -148,14 +145,6 @@ class OutpostConsumer(JsonWebsocketConsumer): | ||||
|             asdict(WebsocketMessage(instruction=WebsocketMessageInstruction.TRIGGER_UPDATE)) | ||||
|         ) | ||||
|  | ||||
|     def event_session_end(self, event): | ||||
|         """Event handler which is called when a session is ended""" | ||||
|         self.send_json( | ||||
|             asdict( | ||||
|                 WebsocketMessage(instruction=WebsocketMessageInstruction.SESSION_END, args=event) | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     def event_provider_specific(self, event): | ||||
|         """Event handler which can be called by provider-specific | ||||
|         implementations to send specific messages to the outpost""" | ||||
|  | ||||
| @ -1,24 +1,17 @@ | ||||
| """authentik outpost signals""" | ||||
|  | ||||
| from django.contrib.auth.signals import user_logged_out | ||||
| from django.core.cache import cache | ||||
| from django.db.models import Model | ||||
| from django.db.models.signals import m2m_changed, post_save, pre_delete, pre_save | ||||
| from django.dispatch import receiver | ||||
| from django.http import HttpRequest | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.core.models import AuthenticatedSession, Provider, User | ||||
| from authentik.core.models import Provider | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.lib.utils.reflection import class_to_path | ||||
| from authentik.outposts.models import Outpost, OutpostServiceConnection | ||||
| from authentik.outposts.tasks import ( | ||||
|     CACHE_KEY_OUTPOST_DOWN, | ||||
|     outpost_controller, | ||||
|     outpost_post_save, | ||||
|     outpost_session_end, | ||||
| ) | ||||
| from authentik.outposts.tasks import CACHE_KEY_OUTPOST_DOWN, outpost_controller, outpost_post_save | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| UPDATE_TRIGGERING_MODELS = ( | ||||
| @ -80,17 +73,3 @@ def pre_delete_cleanup(sender, instance: Outpost, **_): | ||||
|     instance.user.delete() | ||||
|     cache.set(CACHE_KEY_OUTPOST_DOWN % instance.pk.hex, instance) | ||||
|     outpost_controller.delay(instance.pk.hex, action="down", from_cache=True) | ||||
|  | ||||
|  | ||||
| @receiver(user_logged_out) | ||||
| def logout_revoke_direct(sender: type[User], request: HttpRequest, **_): | ||||
|     """Catch logout by direct logout and forward to providers""" | ||||
|     if not request.session or not request.session.session_key: | ||||
|         return | ||||
|     outpost_session_end.delay(request.session.session_key) | ||||
|  | ||||
|  | ||||
| @receiver(pre_delete, sender=AuthenticatedSession) | ||||
| def logout_revoke(sender: type[AuthenticatedSession], instance: AuthenticatedSession, **_): | ||||
|     """Catch logout by expiring sessions being deleted""" | ||||
|     outpost_session_end.delay(instance.session.session_key) | ||||
|  | ||||
| @ -1,6 +1,5 @@ | ||||
| """outpost tasks""" | ||||
|  | ||||
| from hashlib import sha256 | ||||
| from os import R_OK, access | ||||
| from pathlib import Path | ||||
| from socket import gethostname | ||||
| @ -50,11 +49,6 @@ LOGGER = get_logger() | ||||
| CACHE_KEY_OUTPOST_DOWN = "goauthentik.io/outposts/teardown/%s" | ||||
|  | ||||
|  | ||||
| def hash_session_key(session_key: str) -> str: | ||||
|     """Hash the session key for sending session end signals""" | ||||
|     return sha256(session_key.encode("ascii")).hexdigest() | ||||
|  | ||||
|  | ||||
| def controller_for_outpost(outpost: Outpost) -> type[BaseController] | None: | ||||
|     """Get a controller for the outpost, when a service connection is defined""" | ||||
|     if not outpost.service_connection: | ||||
| @ -295,20 +289,3 @@ def outpost_connection_discovery(self: SystemTask): | ||||
|                 url=unix_socket_path, | ||||
|             ) | ||||
|     self.set_status(TaskStatus.SUCCESSFUL, *messages) | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task() | ||||
| def outpost_session_end(session_id: str): | ||||
|     """Update outpost instances connected to a single outpost""" | ||||
|     layer = get_channel_layer() | ||||
|     hashed_session_id = hash_session_key(session_id) | ||||
|     for outpost in Outpost.objects.all(): | ||||
|         LOGGER.info("Sending session end signal to outpost", outpost=outpost) | ||||
|         group = OUTPOST_GROUP % {"outpost_pk": str(outpost.pk)} | ||||
|         async_to_sync(layer.group_send)( | ||||
|             group, | ||||
|             { | ||||
|                 "type": "event.session.end", | ||||
|                 "session_id": hashed_session_id, | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """Authentik policy dummy app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPolicyDummyConfig(ManagedAppConfig): | ||||
| class AuthentikPolicyDummyConfig(AppConfig): | ||||
|     """Authentik policy_dummy app config""" | ||||
|  | ||||
|     name = "authentik.policies.dummy" | ||||
|     label = "authentik_policies_dummy" | ||||
|     verbose_name = "authentik Policies.Dummy" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik Event Matcher policy app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPoliciesEventMatcherConfig(ManagedAppConfig): | ||||
| class AuthentikPoliciesEventMatcherConfig(AppConfig): | ||||
|     """authentik Event Matcher policy app config""" | ||||
|  | ||||
|     name = "authentik.policies.event_matcher" | ||||
|     label = "authentik_policies_event_matcher" | ||||
|     verbose_name = "authentik Policies.Event Matcher" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """Authentik policy_expiry app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPolicyExpiryConfig(ManagedAppConfig): | ||||
| class AuthentikPolicyExpiryConfig(AppConfig): | ||||
|     """Authentik policy_expiry app config""" | ||||
|  | ||||
|     name = "authentik.policies.expiry" | ||||
|     label = "authentik_policies_expiry" | ||||
|     verbose_name = "authentik Policies.Expiry" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """Authentik policy_expression app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPolicyExpressionConfig(ManagedAppConfig): | ||||
| class AuthentikPolicyExpressionConfig(AppConfig): | ||||
|     """Authentik policy_expression app config""" | ||||
|  | ||||
|     name = "authentik.policies.expression" | ||||
|     label = "authentik_policies_expression" | ||||
|     verbose_name = "authentik Policies.Expression" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """Authentik policy geoip app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPolicyGeoIPConfig(ManagedAppConfig): | ||||
| class AuthentikPolicyGeoIPConfig(AppConfig): | ||||
|     """Authentik policy_geoip app config""" | ||||
|  | ||||
|     name = "authentik.policies.geoip" | ||||
|     label = "authentik_policies_geoip" | ||||
|     verbose_name = "authentik Policies.GeoIP" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik Password policy app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPoliciesPasswordConfig(ManagedAppConfig): | ||||
| class AuthentikPoliciesPasswordConfig(AppConfig): | ||||
|     """authentik Password policy app config""" | ||||
|  | ||||
|     name = "authentik.policies.password" | ||||
|     label = "authentik_policies_password" | ||||
|     verbose_name = "authentik Policies.Password" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik ldap provider app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikProviderLDAPConfig(ManagedAppConfig): | ||||
| class AuthentikProviderLDAPConfig(AppConfig): | ||||
|     """authentik ldap provider app config""" | ||||
|  | ||||
|     name = "authentik.providers.ldap" | ||||
|     label = "authentik_providers_ldap" | ||||
|     verbose_name = "authentik Providers.LDAP" | ||||
|     default = True | ||||
|  | ||||
| @ -10,11 +10,3 @@ class AuthentikProviderProxyConfig(ManagedAppConfig): | ||||
|     label = "authentik_providers_proxy" | ||||
|     verbose_name = "authentik Providers.Proxy" | ||||
|     default = True | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_tenant | ||||
|     def proxy_set_defaults(self): | ||||
|         from authentik.providers.proxy.models import ProxyProvider | ||||
|  | ||||
|         for provider in ProxyProvider.objects.all(): | ||||
|             provider.set_oauth_defaults() | ||||
|             provider.save() | ||||
|  | ||||
							
								
								
									
										23
									
								
								authentik/providers/proxy/signals.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								authentik/providers/proxy/signals.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,23 @@ | ||||
| """Proxy provider signals""" | ||||
|  | ||||
| from django.contrib.auth.signals import user_logged_out | ||||
| from django.db.models.signals import pre_delete | ||||
| from django.dispatch import receiver | ||||
| from django.http import HttpRequest | ||||
|  | ||||
| from authentik.core.models import AuthenticatedSession, User | ||||
| from authentik.providers.proxy.tasks import proxy_on_logout | ||||
|  | ||||
|  | ||||
| @receiver(user_logged_out) | ||||
| def logout_proxy_revoke_direct(sender: type[User], request: HttpRequest, **_): | ||||
|     """Catch logout by direct logout and forward to proxy providers""" | ||||
|     if not request.session or not request.session.session_key: | ||||
|         return | ||||
|     proxy_on_logout.delay(request.session.session_key) | ||||
|  | ||||
|  | ||||
| @receiver(pre_delete, sender=AuthenticatedSession) | ||||
| def logout_proxy_revoke(sender: type[AuthenticatedSession], instance: AuthenticatedSession, **_): | ||||
|     """Catch logout by expiring sessions being deleted""" | ||||
|     proxy_on_logout.delay(instance.session.session_key) | ||||
							
								
								
									
										38
									
								
								authentik/providers/proxy/tasks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								authentik/providers/proxy/tasks.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,38 @@ | ||||
| """proxy provider tasks""" | ||||
|  | ||||
| from asgiref.sync import async_to_sync | ||||
| from channels.layers import get_channel_layer | ||||
| from django.db import DatabaseError, InternalError, ProgrammingError | ||||
|  | ||||
| from authentik.outposts.consumer import OUTPOST_GROUP | ||||
| from authentik.outposts.models import Outpost, OutpostType | ||||
| from authentik.providers.oauth2.id_token import hash_session_key | ||||
| from authentik.providers.proxy.models import ProxyProvider | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task( | ||||
|     throws=(DatabaseError, ProgrammingError, InternalError), | ||||
| ) | ||||
| def proxy_set_defaults(): | ||||
|     """Ensure correct defaults are set for all providers""" | ||||
|     for provider in ProxyProvider.objects.all(): | ||||
|         provider.set_oauth_defaults() | ||||
|         provider.save() | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task() | ||||
| def proxy_on_logout(session_id: str): | ||||
|     """Update outpost instances connected to a single outpost""" | ||||
|     layer = get_channel_layer() | ||||
|     hashed_session_id = hash_session_key(session_id) | ||||
|     for outpost in Outpost.objects.filter(type=OutpostType.PROXY): | ||||
|         group = OUTPOST_GROUP % {"outpost_pk": str(outpost.pk)} | ||||
|         async_to_sync(layer.group_send)( | ||||
|             group, | ||||
|             { | ||||
|                 "type": "event.provider.specific", | ||||
|                 "sub_type": "logout", | ||||
|                 "session_id": hashed_session_id, | ||||
|             }, | ||||
|         ) | ||||
| @ -166,6 +166,7 @@ class ConnectionToken(ExpiringModel): | ||||
|         always_merger.merge(settings, default_settings) | ||||
|         always_merger.merge(settings, self.endpoint.provider.settings) | ||||
|         always_merger.merge(settings, self.endpoint.settings) | ||||
|         always_merger.merge(settings, self.settings) | ||||
|  | ||||
|         def mapping_evaluator(mappings: QuerySet): | ||||
|             for mapping in mappings: | ||||
| @ -190,7 +191,6 @@ class ConnectionToken(ExpiringModel): | ||||
|         mapping_evaluator( | ||||
|             RACPropertyMapping.objects.filter(endpoint__in=[self.endpoint]).order_by("name") | ||||
|         ) | ||||
|         always_merger.merge(settings, self.settings) | ||||
|  | ||||
|         settings["drive-path"] = f"/tmp/connection/{self.token}"  # nosec | ||||
|         settings["create-drive-path"] = "true" | ||||
|  | ||||
| @ -90,6 +90,23 @@ class TestModels(TransactionTestCase): | ||||
|                 "resize-method": "display-update", | ||||
|             }, | ||||
|         ) | ||||
|         # Set settings in token | ||||
|         token.settings = { | ||||
|             "level": "token", | ||||
|         } | ||||
|         token.save() | ||||
|         self.assertEqual( | ||||
|             token.get_settings(), | ||||
|             { | ||||
|                 "hostname": self.endpoint.host.split(":")[0], | ||||
|                 "port": "1324", | ||||
|                 "client-name": f"authentik - {self.user}", | ||||
|                 "drive-path": path, | ||||
|                 "create-drive-path": "true", | ||||
|                 "level": "token", | ||||
|                 "resize-method": "display-update", | ||||
|             }, | ||||
|         ) | ||||
|         # Set settings in property mapping (provider) | ||||
|         mapping = RACPropertyMapping.objects.create( | ||||
|             name=generate_id(), | ||||
| @ -134,22 +151,3 @@ class TestModels(TransactionTestCase): | ||||
|                 "resize-method": "display-update", | ||||
|             }, | ||||
|         ) | ||||
|         # Set settings in token | ||||
|         token.settings = { | ||||
|             "level": "token", | ||||
|         } | ||||
|         token.save() | ||||
|         self.assertEqual( | ||||
|             token.get_settings(), | ||||
|             { | ||||
|                 "hostname": self.endpoint.host.split(":")[0], | ||||
|                 "port": "1324", | ||||
|                 "client-name": f"authentik - {self.user}", | ||||
|                 "drive-path": path, | ||||
|                 "create-drive-path": "true", | ||||
|                 "foo": "true", | ||||
|                 "bar": "6", | ||||
|                 "resize-method": "display-update", | ||||
|                 "level": "token", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik radius provider app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikProviderRadiusConfig(ManagedAppConfig): | ||||
| class AuthentikProviderRadiusConfig(AppConfig): | ||||
|     """authentik radius provider app config""" | ||||
|  | ||||
|     name = "authentik.providers.radius" | ||||
|     label = "authentik_providers_radius" | ||||
|     verbose_name = "authentik Providers.Radius" | ||||
|     default = True | ||||
|  | ||||
| @ -1,13 +1,12 @@ | ||||
| """authentik SAML IdP app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikProviderSAMLConfig(ManagedAppConfig): | ||||
| class AuthentikProviderSAMLConfig(AppConfig): | ||||
|     """authentik SAML IdP app config""" | ||||
|  | ||||
|     name = "authentik.providers.saml" | ||||
|     label = "authentik_providers_saml" | ||||
|     verbose_name = "authentik Providers.SAML" | ||||
|     mountpoint = "application/saml/" | ||||
|     default = True | ||||
|  | ||||
| @ -47,16 +47,15 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]): | ||||
|  | ||||
|     def to_schema(self, obj: Group, connection: SCIMProviderGroup) -> SCIMGroupSchema: | ||||
|         """Convert authentik user into SCIM""" | ||||
|         raw_scim_group = super().to_schema(obj, connection) | ||||
|         raw_scim_group = super().to_schema( | ||||
|             obj, | ||||
|             connection, | ||||
|             schemas=(SCIM_GROUP_SCHEMA,), | ||||
|         ) | ||||
|         try: | ||||
|             scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group)) | ||||
|         except ValidationError as exc: | ||||
|             raise StopSync(exc, obj) from exc | ||||
|         if SCIM_GROUP_SCHEMA not in scim_group.schemas: | ||||
|             scim_group.schemas.insert(0, SCIM_GROUP_SCHEMA) | ||||
|         # As this might be unset, we need to tell pydantic it's set so ensure the schemas | ||||
|         # are included, even if its just the defaults | ||||
|         scim_group.schemas = list(scim_group.schemas) | ||||
|         if not scim_group.externalId: | ||||
|             scim_group.externalId = str(obj.pk) | ||||
|  | ||||
|  | ||||
| @ -31,16 +31,15 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]): | ||||
|  | ||||
|     def to_schema(self, obj: User, connection: SCIMProviderUser) -> SCIMUserSchema: | ||||
|         """Convert authentik user into SCIM""" | ||||
|         raw_scim_user = super().to_schema(obj, connection) | ||||
|         raw_scim_user = super().to_schema( | ||||
|             obj, | ||||
|             connection, | ||||
|             schemas=(SCIM_USER_SCHEMA,), | ||||
|         ) | ||||
|         try: | ||||
|             scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user)) | ||||
|         except ValidationError as exc: | ||||
|             raise StopSync(exc, obj) from exc | ||||
|         if SCIM_USER_SCHEMA not in scim_user.schemas: | ||||
|             scim_user.schemas.insert(0, SCIM_USER_SCHEMA) | ||||
|         # As this might be unset, we need to tell pydantic it's set so ensure the schemas | ||||
|         # are included, even if its just the defaults | ||||
|         scim_user.schemas = list(scim_user.schemas) | ||||
|         if not scim_user.externalId: | ||||
|             scim_user.externalId = str(obj.uid) | ||||
|         return scim_user | ||||
|  | ||||
| @ -91,57 +91,6 @@ class SCIMUserTests(TestCase): | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     @Mocker() | ||||
|     def test_user_create_custom_schema(self, mock: Mocker): | ||||
|         """Test user creation with custom schema""" | ||||
|         schema = SCIMMapping.objects.create( | ||||
|             name="custom_schema", | ||||
|             expression="""return {"schemas": ["foo"]}""", | ||||
|         ) | ||||
|         self.provider.property_mappings.add(schema) | ||||
|         scim_id = generate_id() | ||||
|         mock.get( | ||||
|             "https://localhost/ServiceProviderConfig", | ||||
|             json={}, | ||||
|         ) | ||||
|         mock.post( | ||||
|             "https://localhost/Users", | ||||
|             json={ | ||||
|                 "id": scim_id, | ||||
|             }, | ||||
|         ) | ||||
|         uid = generate_id() | ||||
|         user = User.objects.create( | ||||
|             username=uid, | ||||
|             name=f"{uid} {uid}", | ||||
|             email=f"{uid}@goauthentik.io", | ||||
|         ) | ||||
|         self.assertEqual(mock.call_count, 2) | ||||
|         self.assertEqual(mock.request_history[0].method, "GET") | ||||
|         self.assertEqual(mock.request_history[1].method, "POST") | ||||
|         self.assertJSONEqual( | ||||
|             mock.request_history[1].body, | ||||
|             { | ||||
|                 "schemas": ["urn:ietf:params:scim:schemas:core:2.0:User", "foo"], | ||||
|                 "active": True, | ||||
|                 "emails": [ | ||||
|                     { | ||||
|                         "primary": True, | ||||
|                         "type": "other", | ||||
|                         "value": f"{uid}@goauthentik.io", | ||||
|                     } | ||||
|                 ], | ||||
|                 "externalId": user.uid, | ||||
|                 "name": { | ||||
|                     "familyName": uid, | ||||
|                     "formatted": f"{uid} {uid}", | ||||
|                     "givenName": uid, | ||||
|                 }, | ||||
|                 "displayName": f"{uid} {uid}", | ||||
|                 "userName": uid, | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     @Mocker() | ||||
|     def test_user_create_different_provider_same_id(self, mock: Mocker): | ||||
|         """Test user creation with multiple providers that happen | ||||
| @ -435,7 +384,7 @@ class SCIMUserTests(TestCase): | ||||
|                 self.assertIn(request.method, SAFE_METHODS) | ||||
|         task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first() | ||||
|         self.assertIsNotNone(task) | ||||
|         drop_msg = task.messages[3] | ||||
|         drop_msg = task.messages[2] | ||||
|         self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run") | ||||
|         self.assertIsNotNone(drop_msg["attributes"]["url"]) | ||||
|         self.assertIsNotNone(drop_msg["attributes"]["body"]) | ||||
|  | ||||
| @ -1,29 +1,12 @@ | ||||
| """test decorators api""" | ||||
|  | ||||
| from django.urls import reverse | ||||
| from guardian.shortcuts import assign_perm | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.test import APITestCase | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_user | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.lib.tests.utils import get_request | ||||
| from authentik.rbac.decorators import permission_required | ||||
|  | ||||
|  | ||||
| class MVS(ModelViewSet): | ||||
|  | ||||
|     queryset = Application.objects.all() | ||||
|     lookup_field = "slug" | ||||
|  | ||||
|     @permission_required("authentik_core.view_application", ["authentik_events.view_event"]) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     def test(self, request: Request, slug: str): | ||||
|         self.get_object() | ||||
|         return Response(status=200) | ||||
|  | ||||
|  | ||||
| class TestAPIDecorators(APITestCase): | ||||
| @ -35,33 +18,41 @@ class TestAPIDecorators(APITestCase): | ||||
|  | ||||
|     def test_obj_perm_denied(self): | ||||
|         """Test object perm denied""" | ||||
|         request = get_request("", user=self.user) | ||||
|         self.client.force_login(self.user) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         response = MVS.as_view({"get": "test"})(request, slug=app.slug) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:application-metrics", kwargs={"slug": app.slug}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 403) | ||||
|  | ||||
|     def test_obj_perm_global(self): | ||||
|         """Test object perm successful (global)""" | ||||
|         assign_perm("authentik_core.view_application", self.user) | ||||
|         assign_perm("authentik_events.view_event", self.user) | ||||
|         self.client.force_login(self.user) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         request = get_request("", user=self.user) | ||||
|         response = MVS.as_view({"get": "test"})(request, slug=app.slug) | ||||
|         self.assertEqual(response.status_code, 200, response.data) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:application-metrics", kwargs={"slug": app.slug}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_obj_perm_scoped(self): | ||||
|         """Test object perm successful (scoped)""" | ||||
|         assign_perm("authentik_events.view_event", self.user) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         assign_perm("authentik_core.view_application", self.user, app) | ||||
|         request = get_request("", user=self.user) | ||||
|         response = MVS.as_view({"get": "test"})(request, slug=app.slug) | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:application-metrics", kwargs={"slug": app.slug}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_other_perm_denied(self): | ||||
|         """Test other perm denied""" | ||||
|         self.client.force_login(self.user) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         assign_perm("authentik_core.view_application", self.user, app) | ||||
|         request = get_request("", user=self.user) | ||||
|         response = MVS.as_view({"get": "test"})(request, slug=app.slug) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:application-metrics", kwargs={"slug": app.slug}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 403) | ||||
|  | ||||
| @ -1,13 +1,12 @@ | ||||
| """authentik Recovery app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikRecoveryConfig(ManagedAppConfig): | ||||
| class AuthentikRecoveryConfig(AppConfig): | ||||
|     """authentik Recovery app config""" | ||||
|  | ||||
|     name = "authentik.recovery" | ||||
|     label = "authentik_recovery" | ||||
|     verbose_name = "authentik Recovery" | ||||
|     mountpoint = "recovery/" | ||||
|     default = True | ||||
|  | ||||
| @ -98,7 +98,13 @@ def _get_startup_tasks_default_tenant() -> list[Callable]: | ||||
|  | ||||
| def _get_startup_tasks_all_tenants() -> list[Callable]: | ||||
|     """Get all tasks to be run on startup for all tenants""" | ||||
|     return [] | ||||
|     from authentik.admin.tasks import clear_update_notifications | ||||
|     from authentik.providers.proxy.tasks import proxy_set_defaults | ||||
|  | ||||
|     return [ | ||||
|         clear_update_notifications, | ||||
|         proxy_set_defaults, | ||||
|     ] | ||||
|  | ||||
|  | ||||
| @worker_ready.connect | ||||
|  | ||||
| @ -424,7 +424,7 @@ else: | ||||
|         "BACKEND": "authentik.root.storages.FileStorage", | ||||
|         "OPTIONS": { | ||||
|             "location": Path(CONFIG.get("storage.media.file.path")), | ||||
|             "base_url": CONFIG.get("web.path", "/") + "media/", | ||||
|             "base_url": "/media/", | ||||
|         }, | ||||
|     } | ||||
|     # Compatibility for apps not supporting top-level STORAGES | ||||
|  | ||||
| @ -7,7 +7,6 @@ from unittest import TestCase | ||||
| import pytest | ||||
| from django.conf import settings | ||||
| from django.test.runner import DiscoverRunner | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.sentry import sentry_init | ||||
| @ -23,7 +22,6 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | ||||
|  | ||||
|     def __init__(self, **kwargs): | ||||
|         super().__init__(**kwargs) | ||||
|         self.logger = get_logger().bind(runner="pytest") | ||||
|  | ||||
|         self.args = [] | ||||
|         if self.failfast: | ||||
| @ -33,36 +31,23 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | ||||
|  | ||||
|         if kwargs.get("randomly_seed", None): | ||||
|             self.args.append(f"--randomly-seed={kwargs['randomly_seed']}") | ||||
|         if kwargs.get("no_capture", False): | ||||
|             self.args.append("--capture=no") | ||||
|  | ||||
|         self._setup_test_environment() | ||||
|  | ||||
|     def _setup_test_environment(self): | ||||
|         """Configure test environment settings""" | ||||
|         settings.TEST = True | ||||
|         settings.CELERY["task_always_eager"] = True | ||||
|  | ||||
|         # Test-specific configuration | ||||
|         test_config = { | ||||
|             "events.context_processors.geoip": "tests/GeoLite2-City-Test.mmdb", | ||||
|             "events.context_processors.asn": "tests/GeoLite2-ASN-Test.mmdb", | ||||
|             "blueprints_dir": "./blueprints", | ||||
|             "outposts.container_image_base": f"ghcr.io/goauthentik/dev-%(type)s:{get_docker_tag()}", | ||||
|             "tenants.enabled": False, | ||||
|             "outposts.disable_embedded_outpost": False, | ||||
|             "error_reporting.sample_rate": 0, | ||||
|             "error_reporting.environment": "testing", | ||||
|             "error_reporting.send_pii": True, | ||||
|         } | ||||
|  | ||||
|         for key, value in test_config.items(): | ||||
|             CONFIG.set(key, value) | ||||
|  | ||||
|         CONFIG.set("events.context_processors.geoip", "tests/GeoLite2-City-Test.mmdb") | ||||
|         CONFIG.set("events.context_processors.asn", "tests/GeoLite2-ASN-Test.mmdb") | ||||
|         CONFIG.set("blueprints_dir", "./blueprints") | ||||
|         CONFIG.set( | ||||
|             "outposts.container_image_base", | ||||
|             f"ghcr.io/goauthentik/dev-%(type)s:{get_docker_tag()}", | ||||
|         ) | ||||
|         CONFIG.set("tenants.enabled", False) | ||||
|         CONFIG.set("outposts.disable_embedded_outpost", False) | ||||
|         CONFIG.set("error_reporting.sample_rate", 0) | ||||
|         CONFIG.set("error_reporting.environment", "testing") | ||||
|         CONFIG.set("error_reporting.send_pii", True) | ||||
|         sentry_init() | ||||
|         self.logger.debug("Test environment configured") | ||||
|  | ||||
|         # Send startup signals | ||||
|         pre_startup.send(sender=self, mode="test") | ||||
|         startup.send(sender=self, mode="test") | ||||
|         post_startup.send(sender=self, mode="test") | ||||
| @ -79,27 +64,8 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | ||||
|             "Default behaviour: use random.Random().getrandbits(32), so the seed is" | ||||
|             "different on each run.", | ||||
|         ) | ||||
|         parser.add_argument( | ||||
|             "--no-capture", | ||||
|             action="store_true", | ||||
|             help="Disable any capturing of stdout/stderr during tests.", | ||||
|         ) | ||||
|  | ||||
|     def _validate_test_label(self, label: str) -> bool: | ||||
|         """Validate test label format""" | ||||
|         if not label: | ||||
|             return False | ||||
|  | ||||
|         # Check for invalid characters, but allow forward slashes and colons | ||||
|         # for paths and pytest markers | ||||
|         invalid_chars = set('\\*?"<>|') | ||||
|         if any(c in label for c in invalid_chars): | ||||
|             self.logger.error("Invalid characters in test label", label=label) | ||||
|             return False | ||||
|  | ||||
|         return True | ||||
|  | ||||
|     def run_tests(self, test_labels: list[str], extra_tests=None, **kwargs): | ||||
|     def run_tests(self, test_labels, extra_tests=None, **kwargs): | ||||
|         """Run pytest and return the exitcode. | ||||
|  | ||||
|         It translates some of Django's test command option to pytest's. | ||||
| @ -109,17 +75,10 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | ||||
|         The extra_tests argument has been deprecated since Django 5.x | ||||
|         It is kept for compatibility with PyCharm's Django test runner. | ||||
|         """ | ||||
|         if not test_labels: | ||||
|             self.logger.error("No test files specified") | ||||
|             return 1 | ||||
|  | ||||
|         for label in test_labels: | ||||
|             if not self._validate_test_label(label): | ||||
|                 return 1 | ||||
|  | ||||
|             valid_label_found = False | ||||
|             label_as_path = os.path.abspath(label) | ||||
|  | ||||
|             # File path has been specified | ||||
|             if os.path.exists(label_as_path): | ||||
|                 self.args.append(label_as_path) | ||||
| @ -127,30 +86,24 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | ||||
|             elif "::" in label: | ||||
|                 self.args.append(label) | ||||
|                 valid_label_found = True | ||||
|             # Convert dotted module path to file_path::class::method | ||||
|             else: | ||||
|                 # Check if the label is a dotted module path | ||||
|                 path_pieces = label.split(".") | ||||
|                 # Check whether only class or class and method are specified | ||||
|                 for i in range(-1, -3, -1): | ||||
|                     try: | ||||
|                         path = os.path.join(*path_pieces[:i]) + ".py" | ||||
|                         if os.path.exists(path): | ||||
|                             if i < -1: | ||||
|                                 path_method = path + "::" + "::".join(path_pieces[i:]) | ||||
|                                 self.args.append(path_method) | ||||
|                             else: | ||||
|                                 self.args.append(path) | ||||
|                             valid_label_found = True | ||||
|                             break | ||||
|                     except (TypeError, IndexError): | ||||
|                         continue | ||||
|                     path = os.path.join(*path_pieces[:i]) + ".py" | ||||
|                     label_as_path = os.path.abspath(path) | ||||
|                     if os.path.exists(label_as_path): | ||||
|                         path_method = label_as_path + "::" + "::".join(path_pieces[i:]) | ||||
|                         self.args.append(path_method) | ||||
|                         valid_label_found = True | ||||
|                         break | ||||
|  | ||||
|             if not valid_label_found: | ||||
|                 self.logger.error("Test file not found", label=label) | ||||
|                 return 1 | ||||
|                 raise RuntimeError( | ||||
|                     f"One of the test labels: {label!r}, " | ||||
|                     f"is not supported. Use a dotted module name or " | ||||
|                     f"path instead." | ||||
|                 ) | ||||
|  | ||||
|         self.logger.info("Running tests", test_files=self.args) | ||||
|         try: | ||||
|             return pytest.main(self.args) | ||||
|         except Exception as e: | ||||
|             self.logger.error("Error running tests", error=str(e), test_files=self.args) | ||||
|             return 1 | ||||
|         return pytest.main(self.args) | ||||
|  | ||||
| @ -103,7 +103,6 @@ class LDAPSourceSerializer(SourceSerializer): | ||||
|             "user_object_filter", | ||||
|             "group_object_filter", | ||||
|             "group_membership_field", | ||||
|             "user_membership_attribute", | ||||
|             "object_uniqueness_field", | ||||
|             "password_login_update_internal_password", | ||||
|             "sync_users", | ||||
| @ -112,7 +111,6 @@ class LDAPSourceSerializer(SourceSerializer): | ||||
|             "sync_parent_group", | ||||
|             "connectivity", | ||||
|             "lookup_groups_from_user", | ||||
|             "delete_not_found_objects", | ||||
|         ] | ||||
|         extra_kwargs = {"bind_password": {"write_only": True}} | ||||
|  | ||||
| @ -140,7 +138,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet): | ||||
|         "user_object_filter", | ||||
|         "group_object_filter", | ||||
|         "group_membership_field", | ||||
|         "user_membership_attribute", | ||||
|         "object_uniqueness_field", | ||||
|         "password_login_update_internal_password", | ||||
|         "sync_users", | ||||
| @ -150,7 +147,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet): | ||||
|         "user_property_mappings", | ||||
|         "group_property_mappings", | ||||
|         "lookup_groups_from_user", | ||||
|         "delete_not_found_objects", | ||||
|     ] | ||||
|     search_fields = ["name", "slug"] | ||||
|     ordering = ["name"] | ||||
|  | ||||
| @ -1,48 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-28 08:15 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0048_delete_oldauthenticatedsession_content_type"), | ||||
|         ("authentik_sources_ldap", "0008_groupldapsourceconnection_userldapsourceconnection"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="groupldapsourceconnection", | ||||
|             name="validated_by", | ||||
|             field=models.UUIDField( | ||||
|                 blank=True, | ||||
|                 help_text="Unique ID used while checking if this object still exists in the directory.", | ||||
|                 null=True, | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="ldapsource", | ||||
|             name="delete_not_found_objects", | ||||
|             field=models.BooleanField( | ||||
|                 default=False, | ||||
|                 help_text="Delete authentik users and groups which were previously supplied by this source, but are now missing from it.", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="userldapsourceconnection", | ||||
|             name="validated_by", | ||||
|             field=models.UUIDField( | ||||
|                 blank=True, | ||||
|                 help_text="Unique ID used while checking if this object still exists in the directory.", | ||||
|                 null=True, | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="groupldapsourceconnection", | ||||
|             index=models.Index(fields=["validated_by"], name="authentik_s_validat_b70447_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="userldapsourceconnection", | ||||
|             index=models.Index(fields=["validated_by"], name="authentik_s_validat_ff2ebc_idx"), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,32 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-29 11:22 | ||||
|  | ||||
| from django.apps.registry import Apps | ||||
| from django.db import migrations, models | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
|  | ||||
|  | ||||
| def set_user_membership_attribute(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     LDAPSource = apps.get_model("authentik_sources_ldap", "LDAPSource") | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
|     LDAPSource.objects.using(db_alias).filter(group_membership_field="memberUid").all().update( | ||||
|         user_membership_attribute="ldap_uniq" | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     dependencies = [ | ||||
|         ("authentik_sources_ldap", "0009_groupldapsourceconnection_validated_by_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="ldapsource", | ||||
|             name="user_membership_attribute", | ||||
|             field=models.TextField( | ||||
|                 default="distinguishedName", | ||||
|                 help_text="Attribute which matches the value of `group_membership_field`.", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RunPython(set_user_membership_attribute, migrations.RunPython.noop), | ||||
|     ] | ||||
| @ -100,10 +100,6 @@ class LDAPSource(Source): | ||||
|         default="(objectClass=person)", | ||||
|         help_text=_("Consider Objects matching this filter to be Users."), | ||||
|     ) | ||||
|     user_membership_attribute = models.TextField( | ||||
|         default=LDAP_DISTINGUISHED_NAME, | ||||
|         help_text=_("Attribute which matches the value of `group_membership_field`."), | ||||
|     ) | ||||
|     group_membership_field = models.TextField( | ||||
|         default="member", help_text=_("Field which contains members of a group.") | ||||
|     ) | ||||
| @ -141,14 +137,6 @@ class LDAPSource(Source): | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     delete_not_found_objects = models.BooleanField( | ||||
|         default=False, | ||||
|         help_text=_( | ||||
|             "Delete authentik users and groups which were previously supplied by this source, " | ||||
|             "but are now missing from it." | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         return "ak-source-ldap-form" | ||||
| @ -333,12 +321,6 @@ class LDAPSourcePropertyMapping(PropertyMapping): | ||||
|  | ||||
|  | ||||
| class UserLDAPSourceConnection(UserSourceConnection): | ||||
|     validated_by = models.UUIDField( | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         help_text=_("Unique ID used while checking if this object still exists in the directory."), | ||||
|     ) | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.sources.ldap.api import ( | ||||
| @ -350,18 +332,9 @@ class UserLDAPSourceConnection(UserSourceConnection): | ||||
|     class Meta: | ||||
|         verbose_name = _("User LDAP Source Connection") | ||||
|         verbose_name_plural = _("User LDAP Source Connections") | ||||
|         indexes = [ | ||||
|             models.Index(fields=["validated_by"]), | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class GroupLDAPSourceConnection(GroupSourceConnection): | ||||
|     validated_by = models.UUIDField( | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         help_text=_("Unique ID used while checking if this object still exists in the directory."), | ||||
|     ) | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.sources.ldap.api import ( | ||||
| @ -373,6 +346,3 @@ class GroupLDAPSourceConnection(GroupSourceConnection): | ||||
|     class Meta: | ||||
|         verbose_name = _("Group LDAP Source Connection") | ||||
|         verbose_name_plural = _("Group LDAP Source Connections") | ||||
|         indexes = [ | ||||
|             models.Index(fields=["validated_by"]), | ||||
|         ] | ||||
|  | ||||
| @ -9,7 +9,7 @@ from structlog.stdlib import BoundLogger, get_logger | ||||
| from authentik.core.sources.mapper import SourceMapper | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.sync.mapper import PropertyMappingManager | ||||
| from authentik.sources.ldap.models import LDAPSource, flatten | ||||
| from authentik.sources.ldap.models import LDAPSource | ||||
|  | ||||
|  | ||||
| class BaseLDAPSynchronizer: | ||||
| @ -77,16 +77,6 @@ class BaseLDAPSynchronizer: | ||||
|         """Get objects from LDAP, implemented in subclass""" | ||||
|         raise NotImplementedError() | ||||
|  | ||||
|     def get_attributes(self, object): | ||||
|         if "attributes" not in object: | ||||
|             return | ||||
|         return object.get("attributes", {}) | ||||
|  | ||||
|     def get_identifier(self, attributes: dict): | ||||
|         if not attributes.get(self._source.object_uniqueness_field): | ||||
|             return | ||||
|         return flatten(attributes[self._source.object_uniqueness_field]) | ||||
|  | ||||
|     def search_paginator(  # noqa: PLR0913 | ||||
|         self, | ||||
|         search_base, | ||||
|  | ||||
| @ -1,61 +0,0 @@ | ||||
| from collections.abc import Generator | ||||
| from itertools import batched | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from ldap3 import SUBTREE | ||||
|  | ||||
| from authentik.core.models import Group | ||||
| from authentik.sources.ldap.models import GroupLDAPSourceConnection | ||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE, UPDATE_CHUNK_SIZE | ||||
|  | ||||
|  | ||||
| class GroupLDAPForwardDeletion(BaseLDAPSynchronizer): | ||||
|     """Delete LDAP Groups from authentik""" | ||||
|  | ||||
|     @staticmethod | ||||
|     def name() -> str: | ||||
|         return "group_deletions" | ||||
|  | ||||
|     def get_objects(self, **kwargs) -> Generator: | ||||
|         if not self._source.sync_groups or not self._source.delete_not_found_objects: | ||||
|             self.message("Group syncing is disabled for this Source") | ||||
|             return iter(()) | ||||
|  | ||||
|         uuid = uuid4() | ||||
|         groups = self._source.connection().extend.standard.paged_search( | ||||
|             search_base=self.base_dn_groups, | ||||
|             search_filter=self._source.group_object_filter, | ||||
|             search_scope=SUBTREE, | ||||
|             attributes=[self._source.object_uniqueness_field], | ||||
|             generator=True, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         for batch in batched(groups, UPDATE_CHUNK_SIZE, strict=False): | ||||
|             identifiers = [] | ||||
|             for group in batch: | ||||
|                 if not (attributes := self.get_attributes(group)): | ||||
|                     continue | ||||
|                 if identifier := self.get_identifier(attributes): | ||||
|                     identifiers.append(identifier) | ||||
|             GroupLDAPSourceConnection.objects.filter(identifier__in=identifiers).update( | ||||
|                 validated_by=uuid | ||||
|             ) | ||||
|  | ||||
|         return batched( | ||||
|             GroupLDAPSourceConnection.objects.filter(source=self._source) | ||||
|             .exclude(validated_by=uuid) | ||||
|             .values_list("group", flat=True) | ||||
|             .iterator(chunk_size=DELETE_CHUNK_SIZE), | ||||
|             DELETE_CHUNK_SIZE, | ||||
|             strict=False, | ||||
|         ) | ||||
|  | ||||
|     def sync(self, group_pks: tuple) -> int: | ||||
|         """Delete authentik groups""" | ||||
|         if not self._source.sync_groups or not self._source.delete_not_found_objects: | ||||
|             self.message("Group syncing is disabled for this Source") | ||||
|             return -1 | ||||
|         self._logger.debug("Deleting groups", group_pks=group_pks) | ||||
|         _, deleted_per_type = Group.objects.filter(pk__in=group_pks).delete() | ||||
|         return deleted_per_type.get(Group._meta.label, 0) | ||||
| @ -1,63 +0,0 @@ | ||||
| from collections.abc import Generator | ||||
| from itertools import batched | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from ldap3 import SUBTREE | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.sources.ldap.models import UserLDAPSourceConnection | ||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | ||||
|  | ||||
| UPDATE_CHUNK_SIZE = 10_000 | ||||
| DELETE_CHUNK_SIZE = 50 | ||||
|  | ||||
|  | ||||
| class UserLDAPForwardDeletion(BaseLDAPSynchronizer): | ||||
|     """Delete LDAP Users from authentik""" | ||||
|  | ||||
|     @staticmethod | ||||
|     def name() -> str: | ||||
|         return "user_deletions" | ||||
|  | ||||
|     def get_objects(self, **kwargs) -> Generator: | ||||
|         if not self._source.sync_users or not self._source.delete_not_found_objects: | ||||
|             self.message("User syncing is disabled for this Source") | ||||
|             return iter(()) | ||||
|  | ||||
|         uuid = uuid4() | ||||
|         users = self._source.connection().extend.standard.paged_search( | ||||
|             search_base=self.base_dn_users, | ||||
|             search_filter=self._source.user_object_filter, | ||||
|             search_scope=SUBTREE, | ||||
|             attributes=[self._source.object_uniqueness_field], | ||||
|             generator=True, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         for batch in batched(users, UPDATE_CHUNK_SIZE, strict=False): | ||||
|             identifiers = [] | ||||
|             for user in batch: | ||||
|                 if not (attributes := self.get_attributes(user)): | ||||
|                     continue | ||||
|                 if identifier := self.get_identifier(attributes): | ||||
|                     identifiers.append(identifier) | ||||
|             UserLDAPSourceConnection.objects.filter(identifier__in=identifiers).update( | ||||
|                 validated_by=uuid | ||||
|             ) | ||||
|  | ||||
|         return batched( | ||||
|             UserLDAPSourceConnection.objects.filter(source=self._source) | ||||
|             .exclude(validated_by=uuid) | ||||
|             .values_list("user", flat=True) | ||||
|             .iterator(chunk_size=DELETE_CHUNK_SIZE), | ||||
|             DELETE_CHUNK_SIZE, | ||||
|             strict=False, | ||||
|         ) | ||||
|  | ||||
|     def sync(self, user_pks: tuple) -> int: | ||||
|         """Delete authentik users""" | ||||
|         if not self._source.sync_users or not self._source.delete_not_found_objects: | ||||
|             self.message("User syncing is disabled for this Source") | ||||
|             return -1 | ||||
|         self._logger.debug("Deleting users", user_pks=user_pks) | ||||
|         _, deleted_per_type = User.objects.filter(pk__in=user_pks).delete() | ||||
|         return deleted_per_type.get(User._meta.label, 0) | ||||
| @ -58,16 +58,18 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer): | ||||
|             return -1 | ||||
|         group_count = 0 | ||||
|         for group in page_data: | ||||
|             if (attributes := self.get_attributes(group)) is None: | ||||
|             if "attributes" not in group: | ||||
|                 continue | ||||
|             attributes = group.get("attributes", {}) | ||||
|             group_dn = flatten(flatten(group.get("entryDN", group.get("dn")))) | ||||
|             if not (uniq := self.get_identifier(attributes)): | ||||
|             if not attributes.get(self._source.object_uniqueness_field): | ||||
|                 self.message( | ||||
|                     f"Uniqueness field not found/not set in attributes: '{group_dn}'", | ||||
|                     attributes=attributes.keys(), | ||||
|                     dn=group_dn, | ||||
|                 ) | ||||
|                 continue | ||||
|             uniq = flatten(attributes[self._source.object_uniqueness_field]) | ||||
|             try: | ||||
|                 defaults = { | ||||
|                     k: flatten(v) | ||||
|  | ||||
| @ -63,19 +63,25 @@ class MembershipLDAPSynchronizer(BaseLDAPSynchronizer): | ||||
|                     group_member_dn = group_member.get("dn", {}) | ||||
|                     members.append(group_member_dn) | ||||
|             else: | ||||
|                 if (attributes := self.get_attributes(group)) is None: | ||||
|                 if "attributes" not in group: | ||||
|                     continue | ||||
|                 members = attributes.get(self._source.group_membership_field, []) | ||||
|                 members = group.get("attributes", {}).get(self._source.group_membership_field, []) | ||||
|  | ||||
|             ak_group = self.get_group(group) | ||||
|             if not ak_group: | ||||
|                 continue | ||||
|  | ||||
|             membership_mapping_attribute = LDAP_DISTINGUISHED_NAME | ||||
|             if self._source.group_membership_field == "memberUid": | ||||
|                 # If memberships are based on the posixGroup's 'memberUid' | ||||
|                 # attribute we use the RDN instead of the FDN to lookup members. | ||||
|                 membership_mapping_attribute = LDAP_UNIQUENESS | ||||
|  | ||||
|             users = User.objects.filter( | ||||
|                 Q(**{f"attributes__{self._source.user_membership_attribute}__in": members}) | ||||
|                 Q(**{f"attributes__{membership_mapping_attribute}__in": members}) | ||||
|                 | Q( | ||||
|                     **{ | ||||
|                         f"attributes__{self._source.user_membership_attribute}__isnull": True, | ||||
|                         f"attributes__{membership_mapping_attribute}__isnull": True, | ||||
|                         "ak_groups__in": [ak_group], | ||||
|                     } | ||||
|                 ) | ||||
|  | ||||
| @ -60,16 +60,18 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer): | ||||
|             return -1 | ||||
|         user_count = 0 | ||||
|         for user in page_data: | ||||
|             if (attributes := self.get_attributes(user)) is None: | ||||
|             if "attributes" not in user: | ||||
|                 continue | ||||
|             attributes = user.get("attributes", {}) | ||||
|             user_dn = flatten(user.get("entryDN", user.get("dn"))) | ||||
|             if not (uniq := self.get_identifier(attributes)): | ||||
|             if not attributes.get(self._source.object_uniqueness_field): | ||||
|                 self.message( | ||||
|                     f"Uniqueness field not found/not set in attributes: '{user_dn}'", | ||||
|                     attributes=attributes.keys(), | ||||
|                     dn=user_dn, | ||||
|                 ) | ||||
|                 continue | ||||
|             uniq = flatten(attributes[self._source.object_uniqueness_field]) | ||||
|             try: | ||||
|                 defaults = { | ||||
|                     k: flatten(v) | ||||
|  | ||||
| @ -17,8 +17,6 @@ from authentik.lib.utils.reflection import class_to_path, path_to_class | ||||
| from authentik.root.celery import CELERY_APP | ||||
| from authentik.sources.ldap.models import LDAPSource | ||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.forward_delete_groups import GroupLDAPForwardDeletion | ||||
| from authentik.sources.ldap.sync.forward_delete_users import UserLDAPForwardDeletion | ||||
| from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | ||||
| @ -54,11 +52,11 @@ def ldap_connectivity_check(pk: str | None = None): | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task( | ||||
|     # We take the configured hours timeout time by 3.5 as we run user and | ||||
|     # group in parallel and then membership, then deletions, so 3x is to cover the serial tasks, | ||||
|     # We take the configured hours timeout time by 2.5 as we run user and | ||||
|     # group in parallel and then membership, so 2x is to cover the serial tasks, | ||||
|     # and 0.5x on top of that to give some more leeway | ||||
|     soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, | ||||
|     task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, | ||||
|     soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5, | ||||
|     task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5, | ||||
| ) | ||||
| def ldap_sync_single(source_pk: str): | ||||
|     """Sync a single source""" | ||||
| @ -81,25 +79,6 @@ def ldap_sync_single(source_pk: str): | ||||
|             group( | ||||
|                 ldap_sync_paginator(source, MembershipLDAPSynchronizer), | ||||
|             ), | ||||
|             # Finally, deletions. What we'd really like to do here is something like | ||||
|             # ``` | ||||
|             # user_identifiers = <ldap query> | ||||
|             # User.objects.exclude( | ||||
|             #     usersourceconnection__identifier__in=user_uniqueness_identifiers, | ||||
|             # ).delete() | ||||
|             # ``` | ||||
|             # This runs into performance issues in large installations. So instead we spread the | ||||
|             # work out into three steps: | ||||
|             # 1. Get every object from the LDAP source. | ||||
|             # 2. Mark every object as "safe" in the database. This is quick, but any error could | ||||
|             #    mean deleting users which should not be deleted, so we do it immediately, in | ||||
|             #    large chunks, and only queue the deletion step afterwards. | ||||
|             # 3. Delete every unmarked item. This is slow, so we spread it over many tasks in | ||||
|             #    small chunks. | ||||
|             group( | ||||
|                 ldap_sync_paginator(source, UserLDAPForwardDeletion) | ||||
|                 + ldap_sync_paginator(source, GroupLDAPForwardDeletion), | ||||
|             ), | ||||
|         ) | ||||
|         task() | ||||
|  | ||||
|  | ||||
| @ -2,33 +2,6 @@ | ||||
|  | ||||
| from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server | ||||
|  | ||||
| # The mock modifies these in place, so we have to define them per string | ||||
| user_in_slapd_dn = "cn=user_in_slapd_cn,ou=users,dc=goauthentik,dc=io" | ||||
| user_in_slapd_cn = "user_in_slapd_cn" | ||||
| user_in_slapd_uid = "user_in_slapd_uid" | ||||
| user_in_slapd_object_class = "person" | ||||
| user_in_slapd = { | ||||
|     "dn": user_in_slapd_dn, | ||||
|     "attributes": { | ||||
|         "cn": user_in_slapd_cn, | ||||
|         "uid": user_in_slapd_uid, | ||||
|         "objectClass": user_in_slapd_object_class, | ||||
|     }, | ||||
| } | ||||
| group_in_slapd_dn = "cn=user_in_slapd_cn,ou=groups,dc=goauthentik,dc=io" | ||||
| group_in_slapd_cn = "group_in_slapd_cn" | ||||
| group_in_slapd_uid = "group_in_slapd_uid" | ||||
| group_in_slapd_object_class = "groupOfNames" | ||||
| group_in_slapd = { | ||||
|     "dn": group_in_slapd_dn, | ||||
|     "attributes": { | ||||
|         "cn": group_in_slapd_cn, | ||||
|         "uid": group_in_slapd_uid, | ||||
|         "objectClass": group_in_slapd_object_class, | ||||
|         "member": [user_in_slapd["dn"]], | ||||
|     }, | ||||
| } | ||||
|  | ||||
|  | ||||
| def mock_slapd_connection(password: str) -> Connection: | ||||
|     """Create mock SLAPD connection""" | ||||
| @ -123,14 +96,5 @@ def mock_slapd_connection(password: str) -> Connection: | ||||
|             "objectClass": "posixAccount", | ||||
|         }, | ||||
|     ) | ||||
|     # Known user and group | ||||
|     connection.strategy.add_entry( | ||||
|         user_in_slapd["dn"], | ||||
|         user_in_slapd["attributes"], | ||||
|     ) | ||||
|     connection.strategy.add_entry( | ||||
|         group_in_slapd["dn"], | ||||
|         group_in_slapd["attributes"], | ||||
|     ) | ||||
|     connection.bind() | ||||
|     return connection | ||||
|  | ||||
| @ -13,26 +13,14 @@ from authentik.events.system_tasks import TaskStatus | ||||
| from authentik.lib.generators import generate_id, generate_key | ||||
| from authentik.lib.sync.outgoing.exceptions import StopSync | ||||
| from authentik.lib.utils.reflection import class_to_path | ||||
| from authentik.sources.ldap.models import ( | ||||
|     GroupLDAPSourceConnection, | ||||
|     LDAPSource, | ||||
|     LDAPSourcePropertyMapping, | ||||
|     UserLDAPSourceConnection, | ||||
| ) | ||||
| from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE | ||||
| from authentik.sources.ldap.models import LDAPSource, LDAPSourcePropertyMapping | ||||
| from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | ||||
| from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | ||||
| from authentik.sources.ldap.tasks import ldap_sync, ldap_sync_all | ||||
| from authentik.sources.ldap.tests.mock_ad import mock_ad_connection | ||||
| from authentik.sources.ldap.tests.mock_freeipa import mock_freeipa_connection | ||||
| from authentik.sources.ldap.tests.mock_slapd import ( | ||||
|     group_in_slapd_cn, | ||||
|     group_in_slapd_uid, | ||||
|     mock_slapd_connection, | ||||
|     user_in_slapd_cn, | ||||
|     user_in_slapd_uid, | ||||
| ) | ||||
| from authentik.sources.ldap.tests.mock_slapd import mock_slapd_connection | ||||
|  | ||||
| LDAP_PASSWORD = generate_key() | ||||
|  | ||||
| @ -269,56 +257,12 @@ class LDAPSyncTests(TestCase): | ||||
|         self.source.group_membership_field = "memberUid" | ||||
|         self.source.user_object_filter = "(objectClass=posixAccount)" | ||||
|         self.source.group_object_filter = "(objectClass=posixGroup)" | ||||
|         self.source.user_membership_attribute = "uid" | ||||
|         self.source.user_property_mappings.set( | ||||
|             [ | ||||
|                 *LDAPSourcePropertyMapping.objects.filter( | ||||
|                     Q(managed__startswith="goauthentik.io/sources/ldap/default") | ||||
|                     | Q(managed__startswith="goauthentik.io/sources/ldap/openldap") | ||||
|                 ).all(), | ||||
|                 LDAPSourcePropertyMapping.objects.create( | ||||
|                     name="name", | ||||
|                     expression='return {"attributes": {"uid": list_flatten(ldap.get("uid"))}}', | ||||
|                 ), | ||||
|             ] | ||||
|         ) | ||||
|         self.source.group_property_mappings.set( | ||||
|             LDAPSourcePropertyMapping.objects.filter( | ||||
|                 managed="goauthentik.io/sources/ldap/openldap-cn" | ||||
|                 Q(managed__startswith="goauthentik.io/sources/ldap/default") | ||||
|                 | Q(managed__startswith="goauthentik.io/sources/ldap/openldap") | ||||
|             ) | ||||
|         ) | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             self.source.save() | ||||
|             user_sync = UserLDAPSynchronizer(self.source) | ||||
|             user_sync.sync_full() | ||||
|             group_sync = GroupLDAPSynchronizer(self.source) | ||||
|             group_sync.sync_full() | ||||
|             membership_sync = MembershipLDAPSynchronizer(self.source) | ||||
|             membership_sync.sync_full() | ||||
|             # Test if membership mapping based on memberUid works. | ||||
|             posix_group = Group.objects.filter(name="group-posix").first() | ||||
|             self.assertTrue(posix_group.users.filter(name="user-posix").exists()) | ||||
|  | ||||
|     def test_sync_groups_openldap_posix_group_nonstandard_membership_attribute(self): | ||||
|         """Test posix group sync""" | ||||
|         self.source.object_uniqueness_field = "cn" | ||||
|         self.source.group_membership_field = "memberUid" | ||||
|         self.source.user_object_filter = "(objectClass=posixAccount)" | ||||
|         self.source.group_object_filter = "(objectClass=posixGroup)" | ||||
|         self.source.user_membership_attribute = "cn" | ||||
|         self.source.user_property_mappings.set( | ||||
|             [ | ||||
|                 *LDAPSourcePropertyMapping.objects.filter( | ||||
|                     Q(managed__startswith="goauthentik.io/sources/ldap/default") | ||||
|                     | Q(managed__startswith="goauthentik.io/sources/ldap/openldap") | ||||
|                 ).all(), | ||||
|                 LDAPSourcePropertyMapping.objects.create( | ||||
|                     name="name", | ||||
|                     expression='return {"attributes": {"cn": list_flatten(ldap.get("cn"))}}', | ||||
|                 ), | ||||
|             ] | ||||
|         ) | ||||
|         self.source.group_property_mappings.set( | ||||
|             LDAPSourcePropertyMapping.objects.filter( | ||||
|                 managed="goauthentik.io/sources/ldap/openldap-cn" | ||||
| @ -364,160 +308,3 @@ class LDAPSyncTests(TestCase): | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|  | ||||
|     def test_user_deletion(self): | ||||
|         """Test user deletion""" | ||||
|         user = User.objects.create_user(username="not-in-the-source") | ||||
|         UserLDAPSourceConnection.objects.create( | ||||
|             user=user, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertFalse(User.objects.filter(username="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_user_deletion_still_in_source(self): | ||||
|         """Test that user is not deleted if it's still in the source""" | ||||
|         username = user_in_slapd_cn | ||||
|         identifier = user_in_slapd_uid | ||||
|         user = User.objects.create_user(username=username) | ||||
|         UserLDAPSourceConnection.objects.create( | ||||
|             user=user, source=self.source, identifier=identifier | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(User.objects.filter(username=username).exists()) | ||||
|  | ||||
|     def test_user_deletion_no_sync(self): | ||||
|         """Test that user is not deleted if sync_users is False""" | ||||
|         user = User.objects.create_user(username="not-in-the-source") | ||||
|         UserLDAPSourceConnection.objects.create( | ||||
|             user=user, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.sync_users = False | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(User.objects.filter(username="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_user_deletion_no_delete(self): | ||||
|         """Test that user is not deleted if delete_not_found_objects is False""" | ||||
|         user = User.objects.create_user(username="not-in-the-source") | ||||
|         UserLDAPSourceConnection.objects.create( | ||||
|             user=user, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(User.objects.filter(username="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_group_deletion(self): | ||||
|         """Test group deletion""" | ||||
|         group = Group.objects.create(name="not-in-the-source") | ||||
|         GroupLDAPSourceConnection.objects.create( | ||||
|             group=group, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertFalse(Group.objects.filter(name="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_group_deletion_still_in_source(self): | ||||
|         """Test that group is not deleted if it's still in the source""" | ||||
|         groupname = group_in_slapd_cn | ||||
|         identifier = group_in_slapd_uid | ||||
|         group = Group.objects.create(name=groupname) | ||||
|         GroupLDAPSourceConnection.objects.create( | ||||
|             group=group, source=self.source, identifier=identifier | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(Group.objects.filter(name=groupname).exists()) | ||||
|  | ||||
|     def test_group_deletion_no_sync(self): | ||||
|         """Test that group is not deleted if sync_groups is False""" | ||||
|         group = Group.objects.create(name="not-in-the-source") | ||||
|         GroupLDAPSourceConnection.objects.create( | ||||
|             group=group, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.sync_groups = False | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(Group.objects.filter(name="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_group_deletion_no_delete(self): | ||||
|         """Test that group is not deleted if delete_not_found_objects is False""" | ||||
|         group = Group.objects.create(name="not-in-the-source") | ||||
|         GroupLDAPSourceConnection.objects.create( | ||||
|             group=group, source=self.source, identifier="not-in-the-source" | ||||
|         ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|         self.assertTrue(Group.objects.filter(name="not-in-the-source").exists()) | ||||
|  | ||||
|     def test_batch_deletion(self): | ||||
|         """Test batch deletion""" | ||||
|         BATCH_SIZE = DELETE_CHUNK_SIZE + 1 | ||||
|         for i in range(BATCH_SIZE): | ||||
|             user = User.objects.create_user(username=f"not-in-the-source-{i}") | ||||
|             group = Group.objects.create(name=f"not-in-the-source-{i}") | ||||
|             group.users.add(user) | ||||
|             UserLDAPSourceConnection.objects.create( | ||||
|                 user=user, source=self.source, identifier=f"not-in-the-source-{i}-user" | ||||
|             ) | ||||
|             GroupLDAPSourceConnection.objects.create( | ||||
|                 group=group, source=self.source, identifier=f"not-in-the-source-{i}-group" | ||||
|             ) | ||||
|         self.source.object_uniqueness_field = "uid" | ||||
|         self.source.group_object_filter = "(objectClass=groupOfNames)" | ||||
|         self.source.delete_not_found_objects = True | ||||
|         self.source.save() | ||||
|  | ||||
|         connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD)) | ||||
|         with patch("authentik.sources.ldap.models.LDAPSource.connection", connection): | ||||
|             ldap_sync_all.delay().get() | ||||
|  | ||||
|         self.assertFalse(User.objects.filter(username__startswith="not-in-the-source").exists()) | ||||
|         self.assertFalse(Group.objects.filter(name__startswith="not-in-the-source").exists()) | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik plex config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikSourcePlexConfig(ManagedAppConfig): | ||||
| class AuthentikSourcePlexConfig(AppConfig): | ||||
|     """authentik source plex config""" | ||||
|  | ||||
|     name = "authentik.sources.plex" | ||||
|     label = "authentik_sources_plex" | ||||
|     verbose_name = "authentik Sources.Plex" | ||||
|     default = True | ||||
|  | ||||
| @ -9,7 +9,6 @@ from django.http.response import HttpResponseBadRequest | ||||
| from django.shortcuts import get_object_or_404, redirect | ||||
| from django.utils.decorators import method_decorator | ||||
| from django.utils.http import urlencode | ||||
| from django.utils.translation import gettext as _ | ||||
| from django.views import View | ||||
| from django.views.decorators.csrf import csrf_exempt | ||||
| from structlog.stdlib import get_logger | ||||
| @ -129,9 +128,7 @@ class InitiateView(View): | ||||
|         # otherwise we default to POST_AUTO, with direct redirect | ||||
|         if source.binding_type == SAMLBindingTypes.POST: | ||||
|             injected_stages.append(in_memory_stage(ConsentStageView)) | ||||
|             plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = _( | ||||
|                 "Continue to {source_name}".format(source_name=source.name) | ||||
|             ) | ||||
|             plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = f"Continue to {source.name}" | ||||
|         injected_stages.append(in_memory_stage(AutosubmitStageView)) | ||||
|         return self.handle_login_flow( | ||||
|             source, | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """Authenticator""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikStageAuthenticatorConfig(ManagedAppConfig): | ||||
| class AuthentikStageAuthenticatorConfig(AppConfig): | ||||
|     """Authenticator App config""" | ||||
|  | ||||
|     name = "authentik.stages.authenticator" | ||||
|     label = "authentik_stages_authenticator" | ||||
|     verbose_name = "authentik Stages.Authenticator" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """SMS""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikStageAuthenticatorSMSConfig(ManagedAppConfig): | ||||
| class AuthentikStageAuthenticatorSMSConfig(AppConfig): | ||||
|     """SMS App config""" | ||||
|  | ||||
|     name = "authentik.stages.authenticator_sms" | ||||
|     label = "authentik_stages_authenticator_sms" | ||||
|     verbose_name = "authentik Stages.Authenticator.SMS" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """TOTP""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikStageAuthenticatorTOTPConfig(ManagedAppConfig): | ||||
| class AuthentikStageAuthenticatorTOTPConfig(AppConfig): | ||||
|     """TOTP App config""" | ||||
|  | ||||
|     name = "authentik.stages.authenticator_totp" | ||||
|     label = "authentik_stages_authenticator_totp" | ||||
|     verbose_name = "authentik Stages.Authenticator.TOTP" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """Authenticator Validation Stage""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikStageAuthenticatorValidateConfig(ManagedAppConfig): | ||||
| class AuthentikStageAuthenticatorValidateConfig(AppConfig): | ||||
|     """Authenticator Validation Stage""" | ||||
|  | ||||
|     name = "authentik.stages.authenticator_validate" | ||||
|     label = "authentik_stages_authenticator_validate" | ||||
|     verbose_name = "authentik Stages.Authenticator.Validate" | ||||
|     default = True | ||||
|  | ||||
| @ -151,7 +151,9 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase): | ||||
|             webauthn_user_verification=UserVerification.PREFERRED, | ||||
|         ) | ||||
|         stage.webauthn_allowed_device_types.set( | ||||
|             WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") | ||||
|             WebAuthnDeviceType.objects.filter( | ||||
|                 description="Android Authenticator with SafetyNet Attestation" | ||||
|             ) | ||||
|         ) | ||||
|         session = self.client.session | ||||
|         plan = FlowPlan(flow_pk=flow.pk.hex) | ||||
| @ -337,7 +339,9 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase): | ||||
|             device_classes=[DeviceClasses.WEBAUTHN], | ||||
|         ) | ||||
|         stage.webauthn_allowed_device_types.set( | ||||
|             WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") | ||||
|             WebAuthnDeviceType.objects.filter( | ||||
|                 description="Android Authenticator with SafetyNet Attestation" | ||||
|             ) | ||||
|         ) | ||||
|         session = self.client.session | ||||
|         plan = FlowPlan(flow_pk=flow.pk.hex) | ||||
|  | ||||
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							| @ -141,7 +141,9 @@ class TestAuthenticatorWebAuthnStage(FlowTestCase): | ||||
|         """Test registration with restricted devices (fail)""" | ||||
|         webauthn_mds_import.delay(force=True).get() | ||||
|         self.stage.device_type_restrictions.set( | ||||
|             WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series") | ||||
|             WebAuthnDeviceType.objects.filter( | ||||
|                 description="Android Authenticator with SafetyNet Attestation" | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|         plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]) | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik captcha app""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikStageCaptchaConfig(ManagedAppConfig): | ||||
| class AuthentikStageCaptchaConfig(AppConfig): | ||||
|     """authentik captcha app""" | ||||
|  | ||||
|     name = "authentik.stages.captcha" | ||||
|     label = "authentik_stages_captcha" | ||||
|     verbose_name = "authentik Stages.Captcha" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik consent app""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikStageConsentConfig(ManagedAppConfig): | ||||
| class AuthentikStageConsentConfig(AppConfig): | ||||
|     """authentik consent app""" | ||||
|  | ||||
|     name = "authentik.stages.consent" | ||||
|     label = "authentik_stages_consent" | ||||
|     verbose_name = "authentik Stages.Consent" | ||||
|     default = True | ||||
|  | ||||
| @ -4,8 +4,6 @@ from uuid import uuid4 | ||||
|  | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext as _ | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.fields import CharField | ||||
|  | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| @ -49,11 +47,6 @@ class ConsentChallengeResponse(ChallengeResponse): | ||||
|     component = CharField(default="ak-stage-consent") | ||||
|     token = CharField(required=True) | ||||
|  | ||||
|     def validate_token(self, token: str): | ||||
|         if token != self.stage.executor.request.session[SESSION_KEY_CONSENT_TOKEN]: | ||||
|             raise ValidationError(_("Invalid consent token, re-showing prompt")) | ||||
|         return token | ||||
|  | ||||
|  | ||||
| class ConsentStageView(ChallengeStageView): | ||||
|     """Simple consent checker.""" | ||||
| @ -127,6 +120,9 @@ class ConsentStageView(ChallengeStageView): | ||||
|         return super().get(request, *args, **kwargs) | ||||
|  | ||||
|     def challenge_valid(self, response: ChallengeResponse) -> HttpResponse: | ||||
|         if response.data["token"] != self.request.session[SESSION_KEY_CONSENT_TOKEN]: | ||||
|             self.logger.info("Invalid consent token, re-showing prompt") | ||||
|             return self.get(self.request) | ||||
|         if self.should_always_prompt(): | ||||
|             return self.executor.stage_ok() | ||||
|         current_stage: ConsentStage = self.executor.current_stage | ||||
|  | ||||
| @ -17,7 +17,6 @@ from authentik.flows.views.executor import SESSION_KEY_PLAN | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.stages.consent.models import ConsentMode, ConsentStage, UserConsent | ||||
| from authentik.stages.consent.stage import ( | ||||
|     PLAN_CONTEXT_CONSENT_HEADER, | ||||
|     PLAN_CONTEXT_CONSENT_PERMISSIONS, | ||||
|     SESSION_KEY_CONSENT_TOKEN, | ||||
| ) | ||||
| @ -34,40 +33,6 @@ class TestConsentStage(FlowTestCase): | ||||
|             slug=generate_id(), | ||||
|         ) | ||||
|  | ||||
|     def test_mismatched_token(self): | ||||
|         """Test incorrect token""" | ||||
|         flow = create_test_flow(FlowDesignation.AUTHENTICATION) | ||||
|         stage = ConsentStage.objects.create(name=generate_id(), mode=ConsentMode.ALWAYS_REQUIRE) | ||||
|         binding = FlowStageBinding.objects.create(target=flow, stage=stage, order=2) | ||||
|  | ||||
|         plan = FlowPlan(flow_pk=flow.pk.hex, bindings=[binding], markers=[StageMarker()]) | ||||
|         session = self.client.session | ||||
|         session[SESSION_KEY_PLAN] = plan | ||||
|         session.save() | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|         session = self.client.session | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), | ||||
|             { | ||||
|                 "token": generate_id(), | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertStageResponse( | ||||
|             response, | ||||
|             flow, | ||||
|             component="ak-stage-consent", | ||||
|             response_errors={ | ||||
|                 "token": [{"string": "Invalid consent token, re-showing prompt", "code": "invalid"}] | ||||
|             }, | ||||
|         ) | ||||
|         self.assertFalse(UserConsent.objects.filter(user=self.user).exists()) | ||||
|  | ||||
|     def test_always_required(self): | ||||
|         """Test always required consent""" | ||||
|         flow = create_test_flow(FlowDesignation.AUTHENTICATION) | ||||
| @ -193,7 +158,6 @@ class TestConsentStage(FlowTestCase): | ||||
|             context={ | ||||
|                 PLAN_CONTEXT_APPLICATION: self.application, | ||||
|                 PLAN_CONTEXT_CONSENT_PERMISSIONS: [PermissionDict(id="foo", name="foo-desc")], | ||||
|                 PLAN_CONTEXT_CONSENT_HEADER: "test header", | ||||
|             }, | ||||
|         ) | ||||
|         session = self.client.session | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik deny stage app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikStageDenyConfig(ManagedAppConfig): | ||||
| class AuthentikStageDenyConfig(AppConfig): | ||||
|     """authentik deny stage config""" | ||||
|  | ||||
|     name = "authentik.stages.deny" | ||||
|     label = "authentik_stages_deny" | ||||
|     verbose_name = "authentik Stages.Deny" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik dummy stage config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikStageDummyConfig(ManagedAppConfig): | ||||
| class AuthentikStageDummyConfig(AppConfig): | ||||
|     """authentik dummy stage config""" | ||||
|  | ||||
|     name = "authentik.stages.dummy" | ||||
|     label = "authentik_stages_dummy" | ||||
|     verbose_name = "authentik Stages.Dummy" | ||||
|     default = True | ||||
|  | ||||
| @ -1,38 +0,0 @@ | ||||
| from base64 import b64encode | ||||
| from copy import deepcopy | ||||
| from pickle import dumps  # nosec | ||||
|  | ||||
| from django.utils.translation import gettext as _ | ||||
|  | ||||
| from authentik.flows.models import FlowToken, in_memory_stage | ||||
| from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, FlowPlan | ||||
| from authentik.stages.consent.stage import PLAN_CONTEXT_CONSENT_HEADER, ConsentStageView | ||||
|  | ||||
|  | ||||
| def pickle_flow_token_for_email(plan: FlowPlan): | ||||
|     """Insert a consent stage into the flow plan and pickle it for a FlowToken, | ||||
|     to be sent via Email. This is to prevent automated email scanners, which sometimes | ||||
|     open links in emails in a full browser from breaking the link.""" | ||||
|     plan_copy = deepcopy(plan) | ||||
|     plan_copy.insert_stage(in_memory_stage(EmailTokenRevocationConsentStageView), index=0) | ||||
|     plan_copy.context[PLAN_CONTEXT_CONSENT_HEADER] = _("Continue to confirm this email address.") | ||||
|     data = dumps(plan_copy) | ||||
|     return b64encode(data).decode() | ||||
|  | ||||
|  | ||||
| class EmailTokenRevocationConsentStageView(ConsentStageView): | ||||
|  | ||||
|     def get(self, request, *args, **kwargs): | ||||
|         token: FlowToken = self.executor.plan.context[PLAN_CONTEXT_IS_RESTORED] | ||||
|         try: | ||||
|             token.refresh_from_db() | ||||
|         except FlowToken.DoesNotExist: | ||||
|             return self.executor.stage_invalid( | ||||
|                 _("Link was already used, please request a new link.") | ||||
|             ) | ||||
|         return super().get(request, *args, **kwargs) | ||||
|  | ||||
|     def challenge_valid(self, response): | ||||
|         token: FlowToken = self.executor.plan.context[PLAN_CONTEXT_IS_RESTORED] | ||||
|         token.delete() | ||||
|         return super().challenge_valid(response) | ||||
| @ -23,7 +23,6 @@ from authentik.flows.stage import ChallengeStageView | ||||
| from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY | ||||
| from authentik.lib.utils.errors import exception_to_string | ||||
| from authentik.lib.utils.time import timedelta_from_string | ||||
| from authentik.stages.email.flow import pickle_flow_token_for_email | ||||
| from authentik.stages.email.models import EmailStage | ||||
| from authentik.stages.email.tasks import send_mails | ||||
| from authentik.stages.email.utils import TemplateEmailMessage | ||||
| @ -87,8 +86,7 @@ class EmailStageView(ChallengeStageView): | ||||
|                 user=pending_user, | ||||
|                 identifier=identifier, | ||||
|                 flow=self.executor.flow, | ||||
|                 _plan=pickle_flow_token_for_email(self.executor.plan), | ||||
|                 revoke_on_execution=False, | ||||
|                 _plan=FlowToken.pickle(self.executor.plan), | ||||
|             ) | ||||
|         token = tokens.first() | ||||
|         # Check if token is expired and rotate key if so | ||||
|  | ||||
| @ -100,11 +100,9 @@ def send_mail( | ||||
|         # Because we use the Message-ID as UID for the task, manually assign it | ||||
|         message_object.extra_headers["Message-ID"] = message_id | ||||
|  | ||||
|         # Add the logo if it is used in the email body (we can't add it in the | ||||
|         # previous message since MIMEImage can't be converted to json) | ||||
|         body = get_email_body(message_object) | ||||
|         if "cid:logo" in body: | ||||
|             message_object.attach(logo_data()) | ||||
|         # Add the logo (we can't add it in the previous message since MIMEImage | ||||
|         # can't be converted to json) | ||||
|         message_object.attach(logo_data()) | ||||
|  | ||||
|         if ( | ||||
|             message_object.to | ||||
|  | ||||
| @ -96,7 +96,7 @@ | ||||
|                 <table width="100%" style="background-color: #FFFFFF; border-spacing: 0; margin-top: 15px;"> | ||||
|                   <tr height="80"> | ||||
|                     <td align="center" style="padding: 20px 0;"> | ||||
|                       <img src="{% block logo_url %}cid:logo{% endblock %}" border="0=" alt="authentik logo" class="flexibleImage logo"> | ||||
|                       <img src="{% block logo_url %}cid:logo.png{% endblock %}" border="0=" alt="authentik logo" class="flexibleImage logo"> | ||||
|                     </td> | ||||
|                   </tr> | ||||
|                   {% block content %} | ||||
|  | ||||
| @ -174,5 +174,5 @@ class TestEmailStageSending(FlowTestCase): | ||||
|                 response = self.client.post(url) | ||||
|             response = self.client.post(url) | ||||
|             self.assertEqual(response.status_code, 200) | ||||
|             self.assertGreaterEqual(len(mail.outbox), 1) | ||||
|             self.assertTrue(len(mail.outbox) >= 1) | ||||
|             self.assertEqual(mail.outbox[0].subject, "authentik") | ||||
|  | ||||
| @ -17,7 +17,6 @@ from authentik.flows.tests import FlowTestCase | ||||
| from authentik.flows.views.executor import QS_KEY_TOKEN, SESSION_KEY_PLAN, FlowExecutorView | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.stages.consent.stage import SESSION_KEY_CONSENT_TOKEN | ||||
| from authentik.stages.email.models import EmailStage | ||||
| from authentik.stages.email.stage import PLAN_CONTEXT_EMAIL_OVERRIDE, EmailStageView | ||||
|  | ||||
| @ -161,17 +160,6 @@ class TestEmailStage(FlowTestCase): | ||||
|                     kwargs={"flow_slug": self.flow.slug}, | ||||
|                 ) | ||||
|             ) | ||||
|             self.assertStageResponse(response, self.flow, component="ak-stage-consent") | ||||
|             response = self.client.post( | ||||
|                 reverse( | ||||
|                     "authentik_api:flow-executor", | ||||
|                     kwargs={"flow_slug": self.flow.slug}, | ||||
|                 ), | ||||
|                 data={ | ||||
|                     "token": self.client.session[SESSION_KEY_CONSENT_TOKEN], | ||||
|                 }, | ||||
|                 follow=True, | ||||
|             ) | ||||
|  | ||||
|             self.assertEqual(response.status_code, 200) | ||||
|             self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) | ||||
| @ -194,7 +182,6 @@ class TestEmailStage(FlowTestCase): | ||||
|         # Set flow token user to a different user | ||||
|         token: FlowToken = FlowToken.objects.get(user=self.user) | ||||
|         token.user = create_test_admin_user() | ||||
|         token.revoke_on_execution = True | ||||
|         token.save() | ||||
|  | ||||
|         with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()): | ||||
|  | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	