Compare commits
	
		
			4 Commits
		
	
	
		
			version-20
			...
			safari-adm
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| b19271bb03 | |||
| 88f112db87 | |||
| a1de44cd07 | |||
| 8869df4b1d | 
@ -1,5 +1,5 @@
 | 
			
		||||
[bumpversion]
 | 
			
		||||
current_version = 2025.6.3
 | 
			
		||||
current_version = 2025.2.4
 | 
			
		||||
tag = True
 | 
			
		||||
commit = True
 | 
			
		||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
 | 
			
		||||
@ -21,8 +21,6 @@ optional_value = final
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:package.json]
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:package-lock.json]
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:docker-compose.yml]
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:schema.yml]
 | 
			
		||||
@ -33,4 +31,6 @@ optional_value = final
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:internal/constants/constants.go]
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:web/src/common/constants.ts]
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:lifecycle/aws/template.yaml]
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							@ -36,7 +36,7 @@ runs:
 | 
			
		||||
      with:
 | 
			
		||||
        go-version-file: "go.mod"
 | 
			
		||||
    - name: Setup docker cache
 | 
			
		||||
      uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
 | 
			
		||||
      uses: ScribeMD/docker-cache@0.5.0
 | 
			
		||||
      with:
 | 
			
		||||
        key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
 | 
			
		||||
    - name: Setup dependencies
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										26
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										26
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							@ -23,13 +23,7 @@ updates:
 | 
			
		||||
  - package-ecosystem: npm
 | 
			
		||||
    directories:
 | 
			
		||||
      - "/web"
 | 
			
		||||
      - "/web/packages/sfe"
 | 
			
		||||
      - "/web/packages/core"
 | 
			
		||||
      - "/web/packages/esbuild-plugin-live-reload"
 | 
			
		||||
      - "/packages/prettier-config"
 | 
			
		||||
      - "/packages/tsconfig"
 | 
			
		||||
      - "/packages/docusaurus-config"
 | 
			
		||||
      - "/packages/eslint-config"
 | 
			
		||||
      - "/web/sfe"
 | 
			
		||||
    schedule:
 | 
			
		||||
      interval: daily
 | 
			
		||||
      time: "04:00"
 | 
			
		||||
@ -74,9 +68,6 @@ updates:
 | 
			
		||||
      wdio:
 | 
			
		||||
        patterns:
 | 
			
		||||
          - "@wdio/*"
 | 
			
		||||
      goauthentik:
 | 
			
		||||
        patterns:
 | 
			
		||||
          - "@goauthentik/*"
 | 
			
		||||
  - package-ecosystem: npm
 | 
			
		||||
    directory: "/website"
 | 
			
		||||
    schedule:
 | 
			
		||||
@ -97,9 +88,6 @@ updates:
 | 
			
		||||
          - "swc-*"
 | 
			
		||||
          - "lightningcss*"
 | 
			
		||||
          - "@rspack/binding*"
 | 
			
		||||
      goauthentik:
 | 
			
		||||
        patterns:
 | 
			
		||||
          - "@goauthentik/*"
 | 
			
		||||
  - package-ecosystem: npm
 | 
			
		||||
    directory: "/lifecycle/aws"
 | 
			
		||||
    schedule:
 | 
			
		||||
@ -130,15 +118,3 @@ updates:
 | 
			
		||||
      prefix: "core:"
 | 
			
		||||
    labels:
 | 
			
		||||
      - dependencies
 | 
			
		||||
  - package-ecosystem: docker-compose
 | 
			
		||||
    directories:
 | 
			
		||||
      # - /scripts # Maybe
 | 
			
		||||
      - /tests/e2e
 | 
			
		||||
    schedule:
 | 
			
		||||
      interval: daily
 | 
			
		||||
      time: "04:00"
 | 
			
		||||
    open-pull-requests-limit: 10
 | 
			
		||||
    commit-message:
 | 
			
		||||
      prefix: "core:"
 | 
			
		||||
    labels:
 | 
			
		||||
      - dependencies
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							@ -53,7 +53,6 @@ jobs:
 | 
			
		||||
          signoff: true
 | 
			
		||||
          # ID from https://api.github.com/users/authentik-automation[bot]
 | 
			
		||||
          author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
 | 
			
		||||
          labels: dependencies
 | 
			
		||||
      - uses: peter-evans/enable-pull-request-automerge@v3
 | 
			
		||||
        with:
 | 
			
		||||
          token: ${{ steps.generate_token.outputs.token }}
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										17
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										17
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							@ -62,7 +62,6 @@ jobs:
 | 
			
		||||
        psql:
 | 
			
		||||
          - 15-alpine
 | 
			
		||||
          - 16-alpine
 | 
			
		||||
          - 17-alpine
 | 
			
		||||
        run_id: [1, 2, 3, 4, 5]
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
@ -71,18 +70,22 @@ jobs:
 | 
			
		||||
      - name: checkout stable
 | 
			
		||||
        run: |
 | 
			
		||||
          # Copy current, latest config to local
 | 
			
		||||
          # Temporarly comment the .github backup while migrating to uv
 | 
			
		||||
          cp authentik/lib/default.yml local.env.yml
 | 
			
		||||
          cp -R .github ..
 | 
			
		||||
          # cp -R .github ..
 | 
			
		||||
          cp -R scripts ..
 | 
			
		||||
          git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1)
 | 
			
		||||
          rm -rf .github/ scripts/
 | 
			
		||||
          mv ../.github ../scripts .
 | 
			
		||||
          # rm -rf .github/ scripts/
 | 
			
		||||
          # mv ../.github ../scripts .
 | 
			
		||||
          rm -rf scripts/
 | 
			
		||||
          mv ../scripts .
 | 
			
		||||
      - name: Setup authentik env (stable)
 | 
			
		||||
        uses: ./.github/actions/setup
 | 
			
		||||
        with:
 | 
			
		||||
          postgresql_version: ${{ matrix.psql }}
 | 
			
		||||
        continue-on-error: true
 | 
			
		||||
      - name: run migrations to stable
 | 
			
		||||
        run: uv run python -m lifecycle.migrate
 | 
			
		||||
        run: poetry run python -m lifecycle.migrate
 | 
			
		||||
      - name: checkout current code
 | 
			
		||||
        run: |
 | 
			
		||||
          set -x
 | 
			
		||||
@ -117,7 +120,6 @@ jobs:
 | 
			
		||||
        psql:
 | 
			
		||||
          - 15-alpine
 | 
			
		||||
          - 16-alpine
 | 
			
		||||
          - 17-alpine
 | 
			
		||||
        run_id: [1, 2, 3, 4, 5]
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
@ -202,7 +204,7 @@ jobs:
 | 
			
		||||
        uses: actions/cache@v4
 | 
			
		||||
        with:
 | 
			
		||||
          path: web/dist
 | 
			
		||||
          key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
 | 
			
		||||
          key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
 | 
			
		||||
      - name: prepare web ui
 | 
			
		||||
        if: steps.cache-web.outputs.cache-hit != 'true'
 | 
			
		||||
        working-directory: web
 | 
			
		||||
@ -210,7 +212,6 @@ jobs:
 | 
			
		||||
          npm ci
 | 
			
		||||
          make -C .. gen-client-ts
 | 
			
		||||
          npm run build
 | 
			
		||||
          npm run build:sfe
 | 
			
		||||
      - name: run e2e
 | 
			
		||||
        run: |
 | 
			
		||||
          uv run coverage run manage.py test ${{ matrix.job.glob }}
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										2
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							@ -29,7 +29,7 @@ jobs:
 | 
			
		||||
      - name: Generate API
 | 
			
		||||
        run: make gen-client-go
 | 
			
		||||
      - name: golangci-lint
 | 
			
		||||
        uses: golangci/golangci-lint-action@v8
 | 
			
		||||
        uses: golangci/golangci-lint-action@v7
 | 
			
		||||
        with:
 | 
			
		||||
          version: latest
 | 
			
		||||
          args: --timeout 5000s --verbose
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										1
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							@ -49,7 +49,6 @@ jobs:
 | 
			
		||||
      matrix:
 | 
			
		||||
        job:
 | 
			
		||||
          - build
 | 
			
		||||
          - build:integrations
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
      - uses: actions/setup-node@v4
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							@ -2,7 +2,7 @@ name: "CodeQL"
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
  push:
 | 
			
		||||
    branches: [main, next, version*]
 | 
			
		||||
    branches: [main, "*", next, version*]
 | 
			
		||||
  pull_request:
 | 
			
		||||
    branches: [main]
 | 
			
		||||
  schedule:
 | 
			
		||||
 | 
			
		||||
@ -37,7 +37,6 @@ jobs:
 | 
			
		||||
          signoff: true
 | 
			
		||||
          # ID from https://api.github.com/users/authentik-automation[bot]
 | 
			
		||||
          author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
 | 
			
		||||
          labels: dependencies
 | 
			
		||||
      - uses: peter-evans/enable-pull-request-automerge@v3
 | 
			
		||||
        with:
 | 
			
		||||
          token: ${{ steps.generate_token.outputs.token }}
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							@ -53,7 +53,6 @@ jobs:
 | 
			
		||||
          body: ${{ steps.compress.outputs.markdown }}
 | 
			
		||||
          delete-branch: true
 | 
			
		||||
          signoff: true
 | 
			
		||||
          labels: dependencies
 | 
			
		||||
      - uses: peter-evans/enable-pull-request-automerge@v3
 | 
			
		||||
        if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
 | 
			
		||||
        with:
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										24
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										24
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							@ -3,11 +3,10 @@ on:
 | 
			
		||||
  push:
 | 
			
		||||
    branches: [main]
 | 
			
		||||
    paths:
 | 
			
		||||
      - packages/docusaurus-config/**
 | 
			
		||||
      - packages/eslint-config/**
 | 
			
		||||
      - packages/prettier-config/**
 | 
			
		||||
      - packages/tsconfig/**
 | 
			
		||||
      - web/packages/esbuild-plugin-live-reload/**
 | 
			
		||||
      - packages/docusaurus-config
 | 
			
		||||
      - packages/eslint-config
 | 
			
		||||
      - packages/prettier-config
 | 
			
		||||
      - packages/tsconfig
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
jobs:
 | 
			
		||||
  publish:
 | 
			
		||||
@ -17,28 +16,27 @@ jobs:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      matrix:
 | 
			
		||||
        package:
 | 
			
		||||
          - packages/docusaurus-config
 | 
			
		||||
          - packages/eslint-config
 | 
			
		||||
          - packages/prettier-config
 | 
			
		||||
          - packages/tsconfig
 | 
			
		||||
          - web/packages/esbuild-plugin-live-reload
 | 
			
		||||
          - docusaurus-config
 | 
			
		||||
          - eslint-config
 | 
			
		||||
          - prettier-config
 | 
			
		||||
          - tsconfig
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
        with:
 | 
			
		||||
          fetch-depth: 2
 | 
			
		||||
      - uses: actions/setup-node@v4
 | 
			
		||||
        with:
 | 
			
		||||
          node-version-file: ${{ matrix.package }}/package.json
 | 
			
		||||
          node-version-file: packages/${{ matrix.package }}/package.json
 | 
			
		||||
          registry-url: "https://registry.npmjs.org"
 | 
			
		||||
      - name: Get changed files
 | 
			
		||||
        id: changed-files
 | 
			
		||||
        uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
 | 
			
		||||
        with:
 | 
			
		||||
          files: |
 | 
			
		||||
            ${{ matrix.package }}/package.json
 | 
			
		||||
            packages/${{ matrix.package }}/package.json
 | 
			
		||||
      - name: Publish package
 | 
			
		||||
        if: steps.changed-files.outputs.any_changed == 'true'
 | 
			
		||||
        working-directory: ${{ matrix.package }}
 | 
			
		||||
        working-directory: packages/${{ matrix.package}}
 | 
			
		||||
        run: |
 | 
			
		||||
          npm ci
 | 
			
		||||
          npm run build
 | 
			
		||||
 | 
			
		||||
@ -52,6 +52,3 @@ jobs:
 | 
			
		||||
          body: "core, web: update translations"
 | 
			
		||||
          delete-branch: true
 | 
			
		||||
          signoff: true
 | 
			
		||||
          labels: dependencies
 | 
			
		||||
          # ID from https://api.github.com/users/authentik-automation[bot]
 | 
			
		||||
          author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							@ -15,7 +15,6 @@ jobs:
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
      - id: generate_token
 | 
			
		||||
        uses: tibdex/github-app-token@v2
 | 
			
		||||
        with:
 | 
			
		||||
@ -26,13 +25,23 @@ jobs:
 | 
			
		||||
        env:
 | 
			
		||||
          GH_TOKEN: ${{ steps.generate_token.outputs.token }}
 | 
			
		||||
        run: |
 | 
			
		||||
          title=$(gh pr view ${{ github.event.pull_request.number }} --json  "title" -q ".title")
 | 
			
		||||
          title=$(curl -q -L \
 | 
			
		||||
            -H "Accept: application/vnd.github+json" \
 | 
			
		||||
            -H "Authorization: Bearer ${GH_TOKEN}" \
 | 
			
		||||
            -H "X-GitHub-Api-Version: 2022-11-28" \
 | 
			
		||||
            https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title)
 | 
			
		||||
          echo "title=${title}" >> "$GITHUB_OUTPUT"
 | 
			
		||||
      - name: Rename
 | 
			
		||||
        env:
 | 
			
		||||
          GH_TOKEN: ${{ steps.generate_token.outputs.token }}
 | 
			
		||||
        run: |
 | 
			
		||||
          gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies
 | 
			
		||||
          curl -L \
 | 
			
		||||
            -X PATCH \
 | 
			
		||||
            -H "Accept: application/vnd.github+json" \
 | 
			
		||||
            -H "Authorization: Bearer ${GH_TOKEN}" \
 | 
			
		||||
            -H "X-GitHub-Api-Version: 2022-11-28" \
 | 
			
		||||
            https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
 | 
			
		||||
            -d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
 | 
			
		||||
      - uses: peter-evans/enable-pull-request-automerge@v3
 | 
			
		||||
        with:
 | 
			
		||||
          token: ${{ steps.generate_token.outputs.token }}
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										6
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							@ -16,7 +16,7 @@
 | 
			
		||||
    ],
 | 
			
		||||
    "typescript.preferences.importModuleSpecifier": "non-relative",
 | 
			
		||||
    "typescript.preferences.importModuleSpecifierEnding": "index",
 | 
			
		||||
    "typescript.tsdk": "./node_modules/typescript/lib",
 | 
			
		||||
    "typescript.tsdk": "./web/node_modules/typescript/lib",
 | 
			
		||||
    "typescript.enablePromptUseWorkspaceTsdk": true,
 | 
			
		||||
    "yaml.schemas": {
 | 
			
		||||
        "./blueprints/schema.json": "blueprints/**/*.yaml"
 | 
			
		||||
@ -30,5 +30,7 @@
 | 
			
		||||
        }
 | 
			
		||||
    ],
 | 
			
		||||
    "go.testFlags": ["-count=1"],
 | 
			
		||||
    "github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"]
 | 
			
		||||
    "github-actions.workflows.pinned.workflows": [
 | 
			
		||||
        ".github/workflows/ci-main.yml"
 | 
			
		||||
    ]
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										14
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								Dockerfile
									
									
									
									
									
								
							@ -1,7 +1,7 @@
 | 
			
		||||
# syntax=docker/dockerfile:1
 | 
			
		||||
 | 
			
		||||
# Stage 1: Build website
 | 
			
		||||
FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS website-builder
 | 
			
		||||
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder
 | 
			
		||||
 | 
			
		||||
ENV NODE_ENV=production
 | 
			
		||||
 | 
			
		||||
@ -20,7 +20,7 @@ COPY ./SECURITY.md /work/
 | 
			
		||||
RUN npm run build-bundled
 | 
			
		||||
 | 
			
		||||
# Stage 2: Build webui
 | 
			
		||||
FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS web-builder
 | 
			
		||||
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder
 | 
			
		||||
 | 
			
		||||
ARG GIT_BUILD_HASH
 | 
			
		||||
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
 | 
			
		||||
@ -40,8 +40,7 @@ COPY ./web /work/web/
 | 
			
		||||
COPY ./website /work/website/
 | 
			
		||||
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
 | 
			
		||||
 | 
			
		||||
RUN npm run build && \
 | 
			
		||||
    npm run build:sfe
 | 
			
		||||
RUN npm run build
 | 
			
		||||
 | 
			
		||||
# Stage 3: Build go proxy
 | 
			
		||||
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder
 | 
			
		||||
@ -86,17 +85,18 @@ FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip
 | 
			
		||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
 | 
			
		||||
ENV GEOIPUPDATE_VERBOSE="1"
 | 
			
		||||
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
 | 
			
		||||
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
 | 
			
		||||
 | 
			
		||||
USER root
 | 
			
		||||
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
 | 
			
		||||
    --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
 | 
			
		||||
    mkdir -p /usr/share/GeoIP && \
 | 
			
		||||
    /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
 | 
			
		||||
    /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
 | 
			
		||||
 | 
			
		||||
# Stage 5: Download uv
 | 
			
		||||
FROM ghcr.io/astral-sh/uv:0.7.8 AS uv
 | 
			
		||||
FROM ghcr.io/astral-sh/uv:0.6.16 AS uv
 | 
			
		||||
# Stage 6: Base python image
 | 
			
		||||
FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base
 | 
			
		||||
FROM ghcr.io/goauthentik/fips-python:3.12.10-slim-bookworm-fips AS python-base
 | 
			
		||||
 | 
			
		||||
ENV VENV_PATH="/ak-root/.venv" \
 | 
			
		||||
    PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										55
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										55
									
								
								Makefile
									
									
									
									
									
								
							@ -1,7 +1,6 @@
 | 
			
		||||
.PHONY: gen dev-reset all clean test web website
 | 
			
		||||
 | 
			
		||||
SHELL := /usr/bin/env bash
 | 
			
		||||
.SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail
 | 
			
		||||
.SHELLFLAGS += ${SHELLFLAGS} -e
 | 
			
		||||
PWD = $(shell pwd)
 | 
			
		||||
UID = $(shell id -u)
 | 
			
		||||
GID = $(shell id -g)
 | 
			
		||||
@ -9,9 +8,9 @@ NPM_VERSION = $(shell python -m scripts.generate_semver)
 | 
			
		||||
PY_SOURCES = authentik tests scripts lifecycle .github
 | 
			
		||||
DOCKER_IMAGE ?= "authentik:test"
 | 
			
		||||
 | 
			
		||||
GEN_API_TS = gen-ts-api
 | 
			
		||||
GEN_API_PY = gen-py-api
 | 
			
		||||
GEN_API_GO = gen-go-api
 | 
			
		||||
GEN_API_TS = "gen-ts-api"
 | 
			
		||||
GEN_API_PY = "gen-py-api"
 | 
			
		||||
GEN_API_GO = "gen-go-api"
 | 
			
		||||
 | 
			
		||||
pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null)
 | 
			
		||||
pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null)
 | 
			
		||||
@ -86,10 +85,6 @@ dev-create-db:
 | 
			
		||||
 | 
			
		||||
dev-reset: dev-drop-db dev-create-db migrate  ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
 | 
			
		||||
 | 
			
		||||
update-test-mmdb:  ## Update test GeoIP and ASN Databases
 | 
			
		||||
	curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb -o ${PWD}/tests/GeoLite2-ASN-Test.mmdb
 | 
			
		||||
	curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb -o ${PWD}/tests/GeoLite2-City-Test.mmdb
 | 
			
		||||
 | 
			
		||||
#########################
 | 
			
		||||
## API Schema
 | 
			
		||||
#########################
 | 
			
		||||
@ -122,19 +117,14 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a
 | 
			
		||||
	npx prettier --write diff.md
 | 
			
		||||
 | 
			
		||||
gen-clean-ts:  ## Remove generated API client for Typescript
 | 
			
		||||
	rm -rf ${PWD}/${GEN_API_TS}/
 | 
			
		||||
	rm -rf ${PWD}/web/node_modules/@goauthentik/api/
 | 
			
		||||
	rm -rf ./${GEN_API_TS}/
 | 
			
		||||
	rm -rf ./web/node_modules/@goauthentik/api/
 | 
			
		||||
 | 
			
		||||
gen-clean-go:  ## Remove generated API client for Go
 | 
			
		||||
	mkdir -p ${PWD}/${GEN_API_GO}
 | 
			
		||||
ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),)
 | 
			
		||||
	make -C ${PWD}/${GEN_API_GO} clean
 | 
			
		||||
else
 | 
			
		||||
	rm -rf ${PWD}/${GEN_API_GO}
 | 
			
		||||
endif
 | 
			
		||||
	rm -rf ./${GEN_API_GO}/
 | 
			
		||||
 | 
			
		||||
gen-clean-py:  ## Remove generated API client for Python
 | 
			
		||||
	rm -rf ${PWD}/${GEN_API_PY}/
 | 
			
		||||
	rm -rf ./${GEN_API_PY}/
 | 
			
		||||
 | 
			
		||||
gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients
 | 
			
		||||
 | 
			
		||||
@ -151,8 +141,8 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri
 | 
			
		||||
		--git-repo-id authentik \
 | 
			
		||||
		--git-user-id goauthentik
 | 
			
		||||
	mkdir -p web/node_modules/@goauthentik/api
 | 
			
		||||
	cd ${PWD}/${GEN_API_TS} && npm i
 | 
			
		||||
	\cp -rf ${PWD}/${GEN_API_TS}/* web/node_modules/@goauthentik/api
 | 
			
		||||
	cd ./${GEN_API_TS} && npm i
 | 
			
		||||
	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api
 | 
			
		||||
 | 
			
		||||
gen-client-py: gen-clean-py ## Build and install the authentik API for Python
 | 
			
		||||
	docker run \
 | 
			
		||||
@ -166,17 +156,24 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python
 | 
			
		||||
		--additional-properties=packageVersion=${NPM_VERSION} \
 | 
			
		||||
		--git-repo-id authentik \
 | 
			
		||||
		--git-user-id goauthentik
 | 
			
		||||
	pip install ./${GEN_API_PY}
 | 
			
		||||
 | 
			
		||||
gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang
 | 
			
		||||
	mkdir -p ${PWD}/${GEN_API_GO}
 | 
			
		||||
ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),)
 | 
			
		||||
	git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO}
 | 
			
		||||
else
 | 
			
		||||
	cd ${PWD}/${GEN_API_GO} && git pull
 | 
			
		||||
endif
 | 
			
		||||
	cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO}
 | 
			
		||||
	make -C ${PWD}/${GEN_API_GO} build
 | 
			
		||||
	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates
 | 
			
		||||
	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml
 | 
			
		||||
	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache
 | 
			
		||||
	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache
 | 
			
		||||
	cp schema.yml ./${GEN_API_GO}/
 | 
			
		||||
	docker run \
 | 
			
		||||
		--rm -v ${PWD}/${GEN_API_GO}:/local \
 | 
			
		||||
		--user ${UID}:${GID} \
 | 
			
		||||
		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
 | 
			
		||||
		-i /local/schema.yml \
 | 
			
		||||
		-g go \
 | 
			
		||||
		-o /local/ \
 | 
			
		||||
		-c /local/config.yaml
 | 
			
		||||
	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
 | 
			
		||||
	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
 | 
			
		||||
 | 
			
		||||
gen-dev-config:  ## Generate a local development config file
 | 
			
		||||
	uv run scripts/generate_config.py
 | 
			
		||||
@ -247,7 +244,7 @@ docker:  ## Build a docker image of the current source tree
 | 
			
		||||
	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
 | 
			
		||||
 | 
			
		||||
test-docker:
 | 
			
		||||
	BUILD=true ${PWD}/scripts/test_docker.sh
 | 
			
		||||
	BUILD=true ./scripts/test_docker.sh
 | 
			
		||||
 | 
			
		||||
#########################
 | 
			
		||||
## CI
 | 
			
		||||
 | 
			
		||||
@ -42,4 +42,4 @@ See [SECURITY.md](SECURITY.md)
 | 
			
		||||
 | 
			
		||||
## Adoption and Contributions
 | 
			
		||||
 | 
			
		||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github).
 | 
			
		||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
 | 
			
		||||
 | 
			
		||||
@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
 | 
			
		||||
 | 
			
		||||
| Version   | Supported |
 | 
			
		||||
| --------- | --------- |
 | 
			
		||||
| 2025.4.x  | ✅        |
 | 
			
		||||
| 2025.6.x  | ✅        |
 | 
			
		||||
| 2024.12.x | ✅        |
 | 
			
		||||
| 2025.2.x  | ✅        |
 | 
			
		||||
 | 
			
		||||
## Reporting a Vulnerability
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -2,7 +2,7 @@
 | 
			
		||||
 | 
			
		||||
from os import environ
 | 
			
		||||
 | 
			
		||||
__version__ = "2025.6.3"
 | 
			
		||||
__version__ = "2025.2.4"
 | 
			
		||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,12 +1,9 @@
 | 
			
		||||
"""API Authentication"""
 | 
			
		||||
 | 
			
		||||
from hmac import compare_digest
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
from tempfile import gettempdir
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
from django.conf import settings
 | 
			
		||||
from django.contrib.auth.models import AnonymousUser
 | 
			
		||||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
 | 
			
		||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
 | 
			
		||||
from rest_framework.exceptions import AuthenticationFailed
 | 
			
		||||
@ -14,17 +11,11 @@ from rest_framework.request import Request
 | 
			
		||||
from structlog.stdlib import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.core.middleware import CTX_AUTH_VIA
 | 
			
		||||
from authentik.core.models import Token, TokenIntents, User, UserTypes
 | 
			
		||||
from authentik.core.models import Token, TokenIntents, User
 | 
			
		||||
from authentik.outposts.models import Outpost
 | 
			
		||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
 | 
			
		||||
 | 
			
		||||
LOGGER = get_logger()
 | 
			
		||||
_tmp = Path(gettempdir())
 | 
			
		||||
try:
 | 
			
		||||
    with open(_tmp / "authentik-core-ipc.key") as _f:
 | 
			
		||||
        ipc_key = _f.read()
 | 
			
		||||
except OSError:
 | 
			
		||||
    ipc_key = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def validate_auth(header: bytes) -> str | None:
 | 
			
		||||
@ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None:
 | 
			
		||||
    if user:
 | 
			
		||||
        CTX_AUTH_VIA.set("secret_key")
 | 
			
		||||
        return user
 | 
			
		||||
    # then try to auth via secret key (for embedded outpost/etc)
 | 
			
		||||
    user = token_ipc(auth_credentials)
 | 
			
		||||
    if user:
 | 
			
		||||
        CTX_AUTH_VIA.set("ipc")
 | 
			
		||||
        return user
 | 
			
		||||
    raise AuthenticationFailed("Token invalid/expired")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None:
 | 
			
		||||
    return outpost.user
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class IPCUser(AnonymousUser):
 | 
			
		||||
    """'Virtual' user for IPC communication between authentik core and the authentik router"""
 | 
			
		||||
 | 
			
		||||
    username = "authentik:system"
 | 
			
		||||
    is_active = True
 | 
			
		||||
    is_superuser = True
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def type(self):
 | 
			
		||||
        return UserTypes.INTERNAL_SERVICE_ACCOUNT
 | 
			
		||||
 | 
			
		||||
    def has_perm(self, perm, obj=None):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def has_perms(self, perm_list, obj=None):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def has_module_perms(self, module):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def is_anonymous(self):
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def is_authenticated(self):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def token_ipc(value: str) -> User | None:
 | 
			
		||||
    """Check if the token is the secret key
 | 
			
		||||
    and return the service account for the managed outpost"""
 | 
			
		||||
    if not ipc_key or not compare_digest(value, ipc_key):
 | 
			
		||||
        return None
 | 
			
		||||
    return IPCUser()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TokenAuthentication(BaseAuthentication):
 | 
			
		||||
    """Token-based authentication using HTTP Bearer authentication"""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -54,7 +54,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom
 | 
			
		||||
    return component
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
 | 
			
		||||
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):  # noqa: W0613
 | 
			
		||||
    """Workaround to set a default response for endpoints.
 | 
			
		||||
    Workaround suggested at
 | 
			
		||||
    <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>
 | 
			
		||||
 | 
			
		||||
@ -164,7 +164,9 @@ class BlueprintEntry:
 | 
			
		||||
        """Get the blueprint model, with yaml tags resolved if present"""
 | 
			
		||||
        return str(self.tag_resolver(self.model, blueprint))
 | 
			
		||||
 | 
			
		||||
    def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]:
 | 
			
		||||
    def get_permissions(
 | 
			
		||||
        self, blueprint: "Blueprint"
 | 
			
		||||
    ) -> Generator[BlueprintEntryPermission, None, None]:
 | 
			
		||||
        """Get permissions of this entry, with all yaml tags resolved"""
 | 
			
		||||
        for perm in self.permissions:
 | 
			
		||||
            yield BlueprintEntryPermission(
 | 
			
		||||
 | 
			
		||||
@ -59,7 +59,6 @@ class BrandSerializer(ModelSerializer):
 | 
			
		||||
            "flow_device_code",
 | 
			
		||||
            "default_application",
 | 
			
		||||
            "web_certificate",
 | 
			
		||||
            "client_certificates",
 | 
			
		||||
            "attributes",
 | 
			
		||||
        ]
 | 
			
		||||
        extra_kwargs = {
 | 
			
		||||
@ -121,7 +120,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
        "domain",
 | 
			
		||||
        "branding_title",
 | 
			
		||||
        "web_certificate__name",
 | 
			
		||||
        "client_certificates__name",
 | 
			
		||||
    ]
 | 
			
		||||
    filterset_fields = [
 | 
			
		||||
        "brand_uuid",
 | 
			
		||||
@ -138,7 +136,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
        "flow_user_settings",
 | 
			
		||||
        "flow_device_code",
 | 
			
		||||
        "web_certificate",
 | 
			
		||||
        "client_certificates",
 | 
			
		||||
    ]
 | 
			
		||||
    ordering = ["domain"]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -16,7 +16,7 @@ def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
 | 
			
		||||
    if not path.exists():
 | 
			
		||||
        return
 | 
			
		||||
    css = path.read_text()
 | 
			
		||||
    Brand.objects.using(db_alias).all().update(branding_custom_css=css)
 | 
			
		||||
    Brand.objects.using(db_alias).update(branding_custom_css=css)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Migration(migrations.Migration):
 | 
			
		||||
 | 
			
		||||
@ -1,37 +0,0 @@
 | 
			
		||||
# Generated by Django 5.1.9 on 2025-05-19 15:09
 | 
			
		||||
 | 
			
		||||
import django.db.models.deletion
 | 
			
		||||
from django.db import migrations, models
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Migration(migrations.Migration):
 | 
			
		||||
 | 
			
		||||
    dependencies = [
 | 
			
		||||
        ("authentik_brands", "0009_brand_branding_default_flow_background"),
 | 
			
		||||
        ("authentik_crypto", "0004_alter_certificatekeypair_name"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    operations = [
 | 
			
		||||
        migrations.AddField(
 | 
			
		||||
            model_name="brand",
 | 
			
		||||
            name="client_certificates",
 | 
			
		||||
            field=models.ManyToManyField(
 | 
			
		||||
                blank=True,
 | 
			
		||||
                default=None,
 | 
			
		||||
                help_text="Certificates used for client authentication.",
 | 
			
		||||
                to="authentik_crypto.certificatekeypair",
 | 
			
		||||
            ),
 | 
			
		||||
        ),
 | 
			
		||||
        migrations.AlterField(
 | 
			
		||||
            model_name="brand",
 | 
			
		||||
            name="web_certificate",
 | 
			
		||||
            field=models.ForeignKey(
 | 
			
		||||
                default=None,
 | 
			
		||||
                help_text="Web Certificate used by the authentik Core webserver.",
 | 
			
		||||
                null=True,
 | 
			
		||||
                on_delete=django.db.models.deletion.SET_DEFAULT,
 | 
			
		||||
                related_name="+",
 | 
			
		||||
                to="authentik_crypto.certificatekeypair",
 | 
			
		||||
            ),
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
@ -73,13 +73,6 @@ class Brand(SerializerModel):
 | 
			
		||||
        default=None,
 | 
			
		||||
        on_delete=models.SET_DEFAULT,
 | 
			
		||||
        help_text=_("Web Certificate used by the authentik Core webserver."),
 | 
			
		||||
        related_name="+",
 | 
			
		||||
    )
 | 
			
		||||
    client_certificates = models.ManyToManyField(
 | 
			
		||||
        CertificateKeyPair,
 | 
			
		||||
        default=None,
 | 
			
		||||
        blank=True,
 | 
			
		||||
        help_text=_("Certificates used for client authentication."),
 | 
			
		||||
    )
 | 
			
		||||
    attributes = models.JSONField(default=dict, blank=True)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -148,14 +148,3 @@ class TestBrands(APITestCase):
 | 
			
		||||
                "default_locale": "",
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def test_custom_css(self):
 | 
			
		||||
        """Test custom_css"""
 | 
			
		||||
        brand = create_test_brand()
 | 
			
		||||
        brand.branding_custom_css = """* {
 | 
			
		||||
            font-family: "Foo bar";
 | 
			
		||||
        }"""
 | 
			
		||||
        brand.save()
 | 
			
		||||
        res = self.client.get(reverse("authentik_core:if-user"))
 | 
			
		||||
        self.assertEqual(res.status_code, 200)
 | 
			
		||||
        self.assertIn(brand.branding_custom_css, res.content.decode())
 | 
			
		||||
 | 
			
		||||
@ -5,12 +5,10 @@ from typing import Any
 | 
			
		||||
from django.db.models import F, Q
 | 
			
		||||
from django.db.models import Value as V
 | 
			
		||||
from django.http.request import HttpRequest
 | 
			
		||||
from django.utils.html import _json_script_escapes
 | 
			
		||||
from django.utils.safestring import mark_safe
 | 
			
		||||
from sentry_sdk import get_current_span
 | 
			
		||||
 | 
			
		||||
from authentik import get_full_version
 | 
			
		||||
from authentik.brands.models import Brand
 | 
			
		||||
from authentik.lib.sentry import get_http_meta
 | 
			
		||||
from authentik.tenants.models import Tenant
 | 
			
		||||
 | 
			
		||||
_q_default = Q(default=True)
 | 
			
		||||
@ -34,14 +32,13 @@ def context_processor(request: HttpRequest) -> dict[str, Any]:
 | 
			
		||||
    """Context Processor that injects brand object into every template"""
 | 
			
		||||
    brand = getattr(request, "brand", DEFAULT_BRAND)
 | 
			
		||||
    tenant = getattr(request, "tenant", Tenant())
 | 
			
		||||
    # similarly to `json_script` we escape everything HTML-related, however django
 | 
			
		||||
    # only directly exposes this as a function that also wraps it in a <script> tag
 | 
			
		||||
    # which we dont want for CSS
 | 
			
		||||
    brand_css = mark_safe(str(brand.branding_custom_css).translate(_json_script_escapes))  # nosec
 | 
			
		||||
    trace = ""
 | 
			
		||||
    span = get_current_span()
 | 
			
		||||
    if span:
 | 
			
		||||
        trace = span.to_traceparent()
 | 
			
		||||
    return {
 | 
			
		||||
        "brand": brand,
 | 
			
		||||
        "brand_css": brand_css,
 | 
			
		||||
        "footer_links": tenant.footer_links,
 | 
			
		||||
        "html_meta": {**get_http_meta()},
 | 
			
		||||
        "sentry_trace": trace,
 | 
			
		||||
        "version": get_full_version(),
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@ -99,8 +99,9 @@ class GroupSerializer(ModelSerializer):
 | 
			
		||||
            if superuser
 | 
			
		||||
            else "authentik_core.disable_group_superuser"
 | 
			
		||||
        )
 | 
			
		||||
        if self.instance or superuser:
 | 
			
		||||
            has_perm = user.has_perm(perm) or user.has_perm(perm, self.instance)
 | 
			
		||||
        has_perm = user.has_perm(perm)
 | 
			
		||||
        if self.instance and not has_perm:
 | 
			
		||||
            has_perm = user.has_perm(perm, self.instance)
 | 
			
		||||
        if not has_perm:
 | 
			
		||||
            raise ValidationError(
 | 
			
		||||
                _(
 | 
			
		||||
 | 
			
		||||
@ -84,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN
 | 
			
		||||
from authentik.lib.avatars import get_avatar
 | 
			
		||||
from authentik.rbac.decorators import permission_required
 | 
			
		||||
from authentik.rbac.models import get_permission_choices
 | 
			
		||||
from authentik.stages.email.flow import pickle_flow_token_for_email
 | 
			
		||||
from authentik.stages.email.models import EmailStage
 | 
			
		||||
from authentik.stages.email.tasks import send_mails
 | 
			
		||||
from authentik.stages.email.utils import TemplateEmailMessage
 | 
			
		||||
@ -452,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
    def list(self, request, *args, **kwargs):
 | 
			
		||||
        return super().list(request, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    def _create_recovery_link(self, for_email=False) -> tuple[str, Token]:
 | 
			
		||||
    def _create_recovery_link(self) -> tuple[str, Token]:
 | 
			
		||||
        """Create a recovery link (when the current brand has a recovery flow set),
 | 
			
		||||
        that can either be shown to an admin or sent to the user directly"""
 | 
			
		||||
        brand: Brand = self.request._request.brand
 | 
			
		||||
@ -474,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
            raise ValidationError(
 | 
			
		||||
                {"non_field_errors": "Recovery flow not applicable to user"}
 | 
			
		||||
            ) from None
 | 
			
		||||
        _plan = FlowToken.pickle(plan)
 | 
			
		||||
        if for_email:
 | 
			
		||||
            _plan = pickle_flow_token_for_email(plan)
 | 
			
		||||
        token, __ = FlowToken.objects.update_or_create(
 | 
			
		||||
            identifier=f"{user.uid}-password-reset",
 | 
			
		||||
            defaults={
 | 
			
		||||
                "user": user,
 | 
			
		||||
                "flow": flow,
 | 
			
		||||
                "_plan": _plan,
 | 
			
		||||
                "revoke_on_execution": not for_email,
 | 
			
		||||
                "_plan": FlowToken.pickle(plan),
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        querystring = urlencode({QS_KEY_TOKEN: token.key})
 | 
			
		||||
@ -653,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
        if for_user.email == "":
 | 
			
		||||
            LOGGER.debug("User doesn't have an email address")
 | 
			
		||||
            raise ValidationError({"non_field_errors": "User does not have an email address set."})
 | 
			
		||||
        link, token = self._create_recovery_link(for_email=True)
 | 
			
		||||
        link, token = self._create_recovery_link()
 | 
			
		||||
        # Lookup the email stage to assure the current user can access it
 | 
			
		||||
        stages = get_objects_for_user(
 | 
			
		||||
            request.user, "authentik_stages_email.view_emailstage"
 | 
			
		||||
 | 
			
		||||
@ -2,7 +2,6 @@
 | 
			
		||||
 | 
			
		||||
from django.apps import apps
 | 
			
		||||
from django.contrib.auth.management import create_permissions
 | 
			
		||||
from django.core.management import call_command
 | 
			
		||||
from django.core.management.base import BaseCommand, no_translations
 | 
			
		||||
from guardian.management import create_anonymous_user
 | 
			
		||||
 | 
			
		||||
@ -17,10 +16,6 @@ class Command(BaseCommand):
 | 
			
		||||
        """Check permissions for all apps"""
 | 
			
		||||
        for tenant in Tenant.objects.filter(ready=True):
 | 
			
		||||
            with tenant:
 | 
			
		||||
                # See https://code.djangoproject.com/ticket/28417
 | 
			
		||||
                # Remove potential lingering old permissions
 | 
			
		||||
                call_command("remove_stale_contenttypes", "--no-input")
 | 
			
		||||
 | 
			
		||||
                for app in apps.get_app_configs():
 | 
			
		||||
                    self.stdout.write(f"Checking app {app.name} ({app.label})\n")
 | 
			
		||||
                    create_permissions(app, verbosity=0)
 | 
			
		||||
 | 
			
		||||
@ -31,10 +31,7 @@ class PickleSerializer:
 | 
			
		||||
 | 
			
		||||
    def loads(self, data):
 | 
			
		||||
        """Unpickle data to be loaded from redis"""
 | 
			
		||||
        try:
 | 
			
		||||
        return pickle.loads(data)  # nosec
 | 
			
		||||
        except Exception:
 | 
			
		||||
            return {}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _migrate_session(
 | 
			
		||||
@ -79,7 +76,6 @@ def _migrate_session(
 | 
			
		||||
        AuthenticatedSession.objects.using(db_alias).create(
 | 
			
		||||
            session=session,
 | 
			
		||||
            user=old_auth_session.user,
 | 
			
		||||
            uuid=old_auth_session.uuid,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,103 +0,0 @@
 | 
			
		||||
# Generated by Django 5.1.9 on 2025-05-14 11:15
 | 
			
		||||
 | 
			
		||||
from django.apps.registry import Apps, apps as global_apps
 | 
			
		||||
from django.db import migrations
 | 
			
		||||
from django.contrib.contenttypes.management import create_contenttypes
 | 
			
		||||
from django.contrib.auth.management import create_permissions
 | 
			
		||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
 | 
			
		||||
    """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession"""
 | 
			
		||||
    db_alias = schema_editor.connection.alias
 | 
			
		||||
 | 
			
		||||
    # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the
 | 
			
		||||
    # real config for creating permissions and content types
 | 
			
		||||
    authentik_core_config = global_apps.get_app_config("authentik_core")
 | 
			
		||||
    # These are only ran by django after all migrations, but we need them right now.
 | 
			
		||||
    # `global_apps` is needed,
 | 
			
		||||
    create_permissions(authentik_core_config, using=db_alias, verbosity=1)
 | 
			
		||||
    create_contenttypes(authentik_core_config, using=db_alias, verbosity=1)
 | 
			
		||||
 | 
			
		||||
    # But from now on, this is just a regular migration, so use `apps`
 | 
			
		||||
    Permission = apps.get_model("auth", "Permission")
 | 
			
		||||
    ContentType = apps.get_model("contenttypes", "ContentType")
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        old_ct = ContentType.objects.using(db_alias).get(
 | 
			
		||||
            app_label="authentik_core", model="oldauthenticatedsession"
 | 
			
		||||
        )
 | 
			
		||||
        new_ct = ContentType.objects.using(db_alias).get(
 | 
			
		||||
            app_label="authentik_core", model="authenticatedsession"
 | 
			
		||||
        )
 | 
			
		||||
    except ContentType.DoesNotExist:
 | 
			
		||||
        # This should exist at this point, but if not, let's cut our losses
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    # Get all permissions for the old content type
 | 
			
		||||
    old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct)
 | 
			
		||||
 | 
			
		||||
    # Create equivalent permissions for the new content type
 | 
			
		||||
    for old_perm in old_perms:
 | 
			
		||||
        new_perm = (
 | 
			
		||||
            Permission.objects.using(db_alias)
 | 
			
		||||
            .filter(
 | 
			
		||||
                content_type=new_ct,
 | 
			
		||||
                codename=old_perm.codename,
 | 
			
		||||
            )
 | 
			
		||||
            .first()
 | 
			
		||||
        )
 | 
			
		||||
        if not new_perm:
 | 
			
		||||
            # This should exist at this point, but if not, let's cut our losses
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # Global user permissions
 | 
			
		||||
        User = apps.get_model("authentik_core", "User")
 | 
			
		||||
        User.user_permissions.through.objects.using(db_alias).filter(
 | 
			
		||||
            permission=old_perm
 | 
			
		||||
        ).all().update(permission=new_perm)
 | 
			
		||||
 | 
			
		||||
        # Global role permissions
 | 
			
		||||
        DjangoGroup = apps.get_model("auth", "Group")
 | 
			
		||||
        DjangoGroup.permissions.through.objects.using(db_alias).filter(
 | 
			
		||||
            permission=old_perm
 | 
			
		||||
        ).all().update(permission=new_perm)
 | 
			
		||||
 | 
			
		||||
        # Object user permissions
 | 
			
		||||
        UserObjectPermission = apps.get_model("guardian", "UserObjectPermission")
 | 
			
		||||
        UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update(
 | 
			
		||||
            permission=new_perm, content_type=new_ct
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Object role permissions
 | 
			
		||||
        GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission")
 | 
			
		||||
        GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update(
 | 
			
		||||
            permission=new_perm, content_type=new_ct
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def remove_old_authenticated_session_content_type(
 | 
			
		||||
    apps: Apps, schema_editor: BaseDatabaseSchemaEditor
 | 
			
		||||
):
 | 
			
		||||
    db_alias = schema_editor.connection.alias
 | 
			
		||||
    ContentType = apps.get_model("contenttypes", "ContentType")
 | 
			
		||||
 | 
			
		||||
    ContentType.objects.using(db_alias).filter(model="oldauthenticatedsession").delete()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Migration(migrations.Migration):
 | 
			
		||||
 | 
			
		||||
    dependencies = [
 | 
			
		||||
        ("authentik_core", "0047_delete_oldauthenticatedsession"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    operations = [
 | 
			
		||||
        migrations.RunPython(
 | 
			
		||||
            code=migrate_authenticated_session_permissions,
 | 
			
		||||
            reverse_code=migrations.RunPython.noop,
 | 
			
		||||
        ),
 | 
			
		||||
        migrations.RunPython(
 | 
			
		||||
            code=remove_old_authenticated_session_content_type,
 | 
			
		||||
            reverse_code=migrations.RunPython.noop,
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
@ -16,14 +16,12 @@
 | 
			
		||||
        {% block head_before %}
 | 
			
		||||
        {% endblock %}
 | 
			
		||||
        <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
 | 
			
		||||
        <style>{{ brand_css }}</style>
 | 
			
		||||
        <style>{{ brand.branding_custom_css }}</style>
 | 
			
		||||
        <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
 | 
			
		||||
        <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script>
 | 
			
		||||
        {% block head %}
 | 
			
		||||
        {% endblock %}
 | 
			
		||||
        {% for key, value in html_meta.items %}
 | 
			
		||||
        <meta name="{{key}}" content="{{ value }}" />
 | 
			
		||||
        {% endfor %}
 | 
			
		||||
        <meta name="sentry-trace" content="{{ sentry_trace }}" />
 | 
			
		||||
    </head>
 | 
			
		||||
    <body>
 | 
			
		||||
        {% block body %}
 | 
			
		||||
 | 
			
		||||
@ -124,16 +124,6 @@ class TestGroupsAPI(APITestCase):
 | 
			
		||||
            {"is_superuser": ["User does not have permission to set superuser status to True."]},
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def test_superuser_no_perm_no_superuser(self):
 | 
			
		||||
        """Test creating a group without permission and without superuser flag"""
 | 
			
		||||
        assign_perm("authentik_core.add_group", self.login_user)
 | 
			
		||||
        self.client.force_login(self.login_user)
 | 
			
		||||
        res = self.client.post(
 | 
			
		||||
            reverse("authentik_api:group-list"),
 | 
			
		||||
            data={"name": generate_id(), "is_superuser": False},
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(res.status_code, 201)
 | 
			
		||||
 | 
			
		||||
    def test_superuser_update_no_perm(self):
 | 
			
		||||
        """Test updating a superuser group without permission"""
 | 
			
		||||
        group = Group.objects.create(name=generate_id(), is_superuser=True)
 | 
			
		||||
 | 
			
		||||
@ -30,7 +30,6 @@ from structlog.stdlib import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.core.api.used_by import UsedByMixin
 | 
			
		||||
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
 | 
			
		||||
from authentik.core.models import UserTypes
 | 
			
		||||
from authentik.crypto.apps import MANAGED_KEY
 | 
			
		||||
from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg
 | 
			
		||||
from authentik.crypto.models import CertificateKeyPair
 | 
			
		||||
@ -273,7 +272,6 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
    def view_certificate(self, request: Request, pk: str) -> Response:
 | 
			
		||||
        """Return certificate-key pairs certificate and log access"""
 | 
			
		||||
        certificate: CertificateKeyPair = self.get_object()
 | 
			
		||||
        if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT:
 | 
			
		||||
        Event.new(  # noqa # nosec
 | 
			
		||||
            EventAction.SECRET_VIEW,
 | 
			
		||||
            secret=certificate,
 | 
			
		||||
@ -304,7 +302,6 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
    def view_private_key(self, request: Request, pk: str) -> Response:
 | 
			
		||||
        """Return certificate-key pairs private key and log access"""
 | 
			
		||||
        certificate: CertificateKeyPair = self.get_object()
 | 
			
		||||
        if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT:
 | 
			
		||||
        Event.new(  # noqa # nosec
 | 
			
		||||
            EventAction.SECRET_VIEW,
 | 
			
		||||
            secret=certificate,
 | 
			
		||||
 | 
			
		||||
@ -132,14 +132,13 @@ class LicenseKey:
 | 
			
		||||
        """Get a summarized version of all (not expired) licenses"""
 | 
			
		||||
        total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0)
 | 
			
		||||
        for lic in License.objects.all():
 | 
			
		||||
            if lic.is_valid:
 | 
			
		||||
            total.internal_users += lic.internal_users
 | 
			
		||||
            total.external_users += lic.external_users
 | 
			
		||||
                total.license_flags.extend(lic.status.license_flags)
 | 
			
		||||
            exp_ts = int(mktime(lic.expiry.timetuple()))
 | 
			
		||||
            if total.exp == 0:
 | 
			
		||||
                total.exp = exp_ts
 | 
			
		||||
            total.exp = max(total.exp, exp_ts)
 | 
			
		||||
            total.license_flags.extend(lic.status.license_flags)
 | 
			
		||||
        return total
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
 | 
			
		||||
@ -39,10 +39,6 @@ class License(SerializerModel):
 | 
			
		||||
    internal_users = models.BigIntegerField()
 | 
			
		||||
    external_users = models.BigIntegerField()
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def is_valid(self) -> bool:
 | 
			
		||||
        return self.expiry >= now()
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def serializer(self) -> type[BaseSerializer]:
 | 
			
		||||
        from authentik.enterprise.api import LicenseSerializer
 | 
			
		||||
 | 
			
		||||
@ -25,7 +25,7 @@ class GoogleWorkspaceGroupClient(
 | 
			
		||||
    """Google client for groups"""
 | 
			
		||||
 | 
			
		||||
    connection_type = GoogleWorkspaceProviderGroup
 | 
			
		||||
    connection_attr = "googleworkspaceprovidergroup_set"
 | 
			
		||||
    connection_type_query = "group"
 | 
			
		||||
    can_discover = True
 | 
			
		||||
 | 
			
		||||
    def __init__(self, provider: GoogleWorkspaceProvider) -> None:
 | 
			
		||||
 | 
			
		||||
@ -20,7 +20,7 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP
 | 
			
		||||
    """Sync authentik users into google workspace"""
 | 
			
		||||
 | 
			
		||||
    connection_type = GoogleWorkspaceProviderUser
 | 
			
		||||
    connection_attr = "googleworkspaceprovideruser_set"
 | 
			
		||||
    connection_type_query = "user"
 | 
			
		||||
    can_discover = True
 | 
			
		||||
 | 
			
		||||
    def __init__(self, provider: GoogleWorkspaceProvider) -> None:
 | 
			
		||||
 | 
			
		||||
@ -132,11 +132,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
 | 
			
		||||
        if type == User:
 | 
			
		||||
            # Get queryset of all users with consistent ordering
 | 
			
		||||
            # according to the provider's settings
 | 
			
		||||
            base = (
 | 
			
		||||
                User.objects.prefetch_related("googleworkspaceprovideruser_set")
 | 
			
		||||
                .all()
 | 
			
		||||
                .exclude_anonymous()
 | 
			
		||||
            )
 | 
			
		||||
            base = User.objects.all().exclude_anonymous()
 | 
			
		||||
            if self.exclude_users_service_account:
 | 
			
		||||
                base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
 | 
			
		||||
                    type=UserTypes.INTERNAL_SERVICE_ACCOUNT
 | 
			
		||||
@ -146,11 +142,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
 | 
			
		||||
            return base.order_by("pk")
 | 
			
		||||
        if type == Group:
 | 
			
		||||
            # Get queryset of all groups with consistent ordering
 | 
			
		||||
            return (
 | 
			
		||||
                Group.objects.prefetch_related("googleworkspaceprovidergroup_set")
 | 
			
		||||
                .all()
 | 
			
		||||
                .order_by("pk")
 | 
			
		||||
            )
 | 
			
		||||
            return Group.objects.all().order_by("pk")
 | 
			
		||||
        raise ValueError(f"Invalid type {type}")
 | 
			
		||||
 | 
			
		||||
    def google_credentials(self):
 | 
			
		||||
 | 
			
		||||
@ -29,7 +29,7 @@ class MicrosoftEntraGroupClient(
 | 
			
		||||
    """Microsoft client for groups"""
 | 
			
		||||
 | 
			
		||||
    connection_type = MicrosoftEntraProviderGroup
 | 
			
		||||
    connection_attr = "microsoftentraprovidergroup_set"
 | 
			
		||||
    connection_type_query = "group"
 | 
			
		||||
    can_discover = True
 | 
			
		||||
 | 
			
		||||
    def __init__(self, provider: MicrosoftEntraProvider) -> None:
 | 
			
		||||
 | 
			
		||||
@ -24,7 +24,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
 | 
			
		||||
    """Sync authentik users into microsoft entra"""
 | 
			
		||||
 | 
			
		||||
    connection_type = MicrosoftEntraProviderUser
 | 
			
		||||
    connection_attr = "microsoftentraprovideruser_set"
 | 
			
		||||
    connection_type_query = "user"
 | 
			
		||||
    can_discover = True
 | 
			
		||||
 | 
			
		||||
    def __init__(self, provider: MicrosoftEntraProvider) -> None:
 | 
			
		||||
 | 
			
		||||
@ -121,11 +121,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider):
 | 
			
		||||
        if type == User:
 | 
			
		||||
            # Get queryset of all users with consistent ordering
 | 
			
		||||
            # according to the provider's settings
 | 
			
		||||
            base = (
 | 
			
		||||
                User.objects.prefetch_related("microsoftentraprovideruser_set")
 | 
			
		||||
                .all()
 | 
			
		||||
                .exclude_anonymous()
 | 
			
		||||
            )
 | 
			
		||||
            base = User.objects.all().exclude_anonymous()
 | 
			
		||||
            if self.exclude_users_service_account:
 | 
			
		||||
                base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
 | 
			
		||||
                    type=UserTypes.INTERNAL_SERVICE_ACCOUNT
 | 
			
		||||
@ -135,11 +131,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider):
 | 
			
		||||
            return base.order_by("pk")
 | 
			
		||||
        if type == Group:
 | 
			
		||||
            # Get queryset of all groups with consistent ordering
 | 
			
		||||
            return (
 | 
			
		||||
                Group.objects.prefetch_related("microsoftentraprovidergroup_set")
 | 
			
		||||
                .all()
 | 
			
		||||
                .order_by("pk")
 | 
			
		||||
            )
 | 
			
		||||
            return Group.objects.all().order_by("pk")
 | 
			
		||||
        raise ValueError(f"Invalid type {type}")
 | 
			
		||||
 | 
			
		||||
    def microsoft_credentials(self):
 | 
			
		||||
 | 
			
		||||
@ -19,7 +19,6 @@ TENANT_APPS = [
 | 
			
		||||
    "authentik.enterprise.providers.microsoft_entra",
 | 
			
		||||
    "authentik.enterprise.providers.ssf",
 | 
			
		||||
    "authentik.enterprise.stages.authenticator_endpoint_gdtc",
 | 
			
		||||
    "authentik.enterprise.stages.mtls",
 | 
			
		||||
    "authentik.enterprise.stages.source",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,31 +0,0 @@
 | 
			
		||||
"""Mutual TLS Stage API Views"""
 | 
			
		||||
 | 
			
		||||
from rest_framework.viewsets import ModelViewSet
 | 
			
		||||
 | 
			
		||||
from authentik.core.api.used_by import UsedByMixin
 | 
			
		||||
from authentik.enterprise.api import EnterpriseRequiredMixin
 | 
			
		||||
from authentik.enterprise.stages.mtls.models import MutualTLSStage
 | 
			
		||||
from authentik.flows.api.stages import StageSerializer
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MutualTLSStageSerializer(EnterpriseRequiredMixin, StageSerializer):
 | 
			
		||||
    """MutualTLSStage Serializer"""
 | 
			
		||||
 | 
			
		||||
    class Meta:
 | 
			
		||||
        model = MutualTLSStage
 | 
			
		||||
        fields = StageSerializer.Meta.fields + [
 | 
			
		||||
            "mode",
 | 
			
		||||
            "certificate_authorities",
 | 
			
		||||
            "cert_attribute",
 | 
			
		||||
            "user_attribute",
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MutualTLSStageViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
    """MutualTLSStage Viewset"""
 | 
			
		||||
 | 
			
		||||
    queryset = MutualTLSStage.objects.all()
 | 
			
		||||
    serializer_class = MutualTLSStageSerializer
 | 
			
		||||
    filterset_fields = "__all__"
 | 
			
		||||
    ordering = ["name"]
 | 
			
		||||
    search_fields = ["name"]
 | 
			
		||||
@ -1,12 +0,0 @@
 | 
			
		||||
"""authentik stage app config"""
 | 
			
		||||
 | 
			
		||||
from authentik.enterprise.apps import EnterpriseConfig
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class AuthentikEnterpriseStageMTLSConfig(EnterpriseConfig):
 | 
			
		||||
    """authentik MTLS stage config"""
 | 
			
		||||
 | 
			
		||||
    name = "authentik.enterprise.stages.mtls"
 | 
			
		||||
    label = "authentik_stages_mtls"
 | 
			
		||||
    verbose_name = "authentik Enterprise.Stages.MTLS"
 | 
			
		||||
    default = True
 | 
			
		||||
@ -1,68 +0,0 @@
 | 
			
		||||
# Generated by Django 5.1.9 on 2025-05-19 18:29
 | 
			
		||||
 | 
			
		||||
import django.db.models.deletion
 | 
			
		||||
from django.db import migrations, models
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Migration(migrations.Migration):
 | 
			
		||||
 | 
			
		||||
    initial = True
 | 
			
		||||
 | 
			
		||||
    dependencies = [
 | 
			
		||||
        ("authentik_crypto", "0004_alter_certificatekeypair_name"),
 | 
			
		||||
        ("authentik_flows", "0027_auto_20231028_1424"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    operations = [
 | 
			
		||||
        migrations.CreateModel(
 | 
			
		||||
            name="MutualTLSStage",
 | 
			
		||||
            fields=[
 | 
			
		||||
                (
 | 
			
		||||
                    "stage_ptr",
 | 
			
		||||
                    models.OneToOneField(
 | 
			
		||||
                        auto_created=True,
 | 
			
		||||
                        on_delete=django.db.models.deletion.CASCADE,
 | 
			
		||||
                        parent_link=True,
 | 
			
		||||
                        primary_key=True,
 | 
			
		||||
                        serialize=False,
 | 
			
		||||
                        to="authentik_flows.stage",
 | 
			
		||||
                    ),
 | 
			
		||||
                ),
 | 
			
		||||
                (
 | 
			
		||||
                    "mode",
 | 
			
		||||
                    models.TextField(choices=[("optional", "Optional"), ("required", "Required")]),
 | 
			
		||||
                ),
 | 
			
		||||
                (
 | 
			
		||||
                    "cert_attribute",
 | 
			
		||||
                    models.TextField(
 | 
			
		||||
                        choices=[
 | 
			
		||||
                            ("subject", "Subject"),
 | 
			
		||||
                            ("common_name", "Common Name"),
 | 
			
		||||
                            ("email", "Email"),
 | 
			
		||||
                        ]
 | 
			
		||||
                    ),
 | 
			
		||||
                ),
 | 
			
		||||
                (
 | 
			
		||||
                    "user_attribute",
 | 
			
		||||
                    models.TextField(choices=[("username", "Username"), ("email", "Email")]),
 | 
			
		||||
                ),
 | 
			
		||||
                (
 | 
			
		||||
                    "certificate_authorities",
 | 
			
		||||
                    models.ManyToManyField(
 | 
			
		||||
                        blank=True,
 | 
			
		||||
                        default=None,
 | 
			
		||||
                        help_text="Configure certificate authorities to validate the certificate against. This option has a higher priority than the `client_certificate` option on `Brand`.",
 | 
			
		||||
                        to="authentik_crypto.certificatekeypair",
 | 
			
		||||
                    ),
 | 
			
		||||
                ),
 | 
			
		||||
            ],
 | 
			
		||||
            options={
 | 
			
		||||
                "verbose_name": "Mutual TLS Stage",
 | 
			
		||||
                "verbose_name_plural": "Mutual TLS Stages",
 | 
			
		||||
                "permissions": [
 | 
			
		||||
                    ("pass_outpost_certificate", "Permissions to pass Certificates for outposts.")
 | 
			
		||||
                ],
 | 
			
		||||
            },
 | 
			
		||||
            bases=("authentik_flows.stage",),
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
@ -1,71 +0,0 @@
 | 
			
		||||
from django.db import models
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
from rest_framework.serializers import Serializer
 | 
			
		||||
 | 
			
		||||
from authentik.crypto.models import CertificateKeyPair
 | 
			
		||||
from authentik.flows.models import Stage
 | 
			
		||||
from authentik.flows.stage import StageView
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TLSMode(models.TextChoices):
 | 
			
		||||
    """Modes the TLS Stage can operate in"""
 | 
			
		||||
 | 
			
		||||
    OPTIONAL = "optional"
 | 
			
		||||
    REQUIRED = "required"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CertAttributes(models.TextChoices):
 | 
			
		||||
    """Certificate attribute used for user matching"""
 | 
			
		||||
 | 
			
		||||
    SUBJECT = "subject"
 | 
			
		||||
    COMMON_NAME = "common_name"
 | 
			
		||||
    EMAIL = "email"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UserAttributes(models.TextChoices):
 | 
			
		||||
    """User attribute for user matching"""
 | 
			
		||||
 | 
			
		||||
    USERNAME = "username"
 | 
			
		||||
    EMAIL = "email"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MutualTLSStage(Stage):
 | 
			
		||||
    """Authenticate/enroll users using a client-certificate."""
 | 
			
		||||
 | 
			
		||||
    mode = models.TextField(choices=TLSMode.choices)
 | 
			
		||||
 | 
			
		||||
    certificate_authorities = models.ManyToManyField(
 | 
			
		||||
        CertificateKeyPair,
 | 
			
		||||
        default=None,
 | 
			
		||||
        blank=True,
 | 
			
		||||
        help_text=_(
 | 
			
		||||
            "Configure certificate authorities to validate the certificate against. "
 | 
			
		||||
            "This option has a higher priority than the `client_certificate` option on `Brand`."
 | 
			
		||||
        ),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    cert_attribute = models.TextField(choices=CertAttributes.choices)
 | 
			
		||||
    user_attribute = models.TextField(choices=UserAttributes.choices)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def view(self) -> type[StageView]:
 | 
			
		||||
        from authentik.enterprise.stages.mtls.stage import MTLSStageView
 | 
			
		||||
 | 
			
		||||
        return MTLSStageView
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def serializer(self) -> type[Serializer]:
 | 
			
		||||
        from authentik.enterprise.stages.mtls.api import MutualTLSStageSerializer
 | 
			
		||||
 | 
			
		||||
        return MutualTLSStageSerializer
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def component(self) -> str:
 | 
			
		||||
        return "ak-stage-mtls-form"
 | 
			
		||||
 | 
			
		||||
    class Meta:
 | 
			
		||||
        verbose_name = _("Mutual TLS Stage")
 | 
			
		||||
        verbose_name_plural = _("Mutual TLS Stages")
 | 
			
		||||
        permissions = [
 | 
			
		||||
            ("pass_outpost_certificate", _("Permissions to pass Certificates for outposts.")),
 | 
			
		||||
        ]
 | 
			
		||||
@ -1,230 +0,0 @@
 | 
			
		||||
from binascii import hexlify
 | 
			
		||||
from urllib.parse import unquote_plus
 | 
			
		||||
 | 
			
		||||
from cryptography.exceptions import InvalidSignature
 | 
			
		||||
from cryptography.hazmat.primitives import hashes
 | 
			
		||||
from cryptography.x509 import (
 | 
			
		||||
    Certificate,
 | 
			
		||||
    NameOID,
 | 
			
		||||
    ObjectIdentifier,
 | 
			
		||||
    UnsupportedGeneralNameType,
 | 
			
		||||
    load_pem_x509_certificate,
 | 
			
		||||
)
 | 
			
		||||
from cryptography.x509.verification import PolicyBuilder, Store, VerificationError
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
 | 
			
		||||
from authentik.brands.models import Brand
 | 
			
		||||
from authentik.core.models import User
 | 
			
		||||
from authentik.crypto.models import CertificateKeyPair
 | 
			
		||||
from authentik.enterprise.stages.mtls.models import (
 | 
			
		||||
    CertAttributes,
 | 
			
		||||
    MutualTLSStage,
 | 
			
		||||
    TLSMode,
 | 
			
		||||
    UserAttributes,
 | 
			
		||||
)
 | 
			
		||||
from authentik.flows.challenge import AccessDeniedChallenge
 | 
			
		||||
from authentik.flows.models import FlowDesignation
 | 
			
		||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
 | 
			
		||||
from authentik.flows.stage import ChallengeStageView
 | 
			
		||||
from authentik.root.middleware import ClientIPMiddleware
 | 
			
		||||
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
 | 
			
		||||
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
 | 
			
		||||
 | 
			
		||||
# All of these headers must only be accepted from "trusted" reverse proxies
 | 
			
		||||
# See internal/web/proxy.go:39
 | 
			
		||||
HEADER_PROXY_FORWARDED = "X-Forwarded-Client-Cert"
 | 
			
		||||
HEADER_NGINX_FORWARDED = "SSL-Client-Cert"
 | 
			
		||||
HEADER_TRAEFIK_FORWARDED = "X-Forwarded-TLS-Client-Cert"
 | 
			
		||||
HEADER_OUTPOST_FORWARDED = "X-Authentik-Outpost-Certificate"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
PLAN_CONTEXT_CERTIFICATE = "certificate"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MTLSStageView(ChallengeStageView):
 | 
			
		||||
 | 
			
		||||
    def __parse_single_cert(self, raw: str | None) -> list[Certificate]:
 | 
			
		||||
        """Helper to parse a single certificate"""
 | 
			
		||||
        if not raw:
 | 
			
		||||
            return []
 | 
			
		||||
        try:
 | 
			
		||||
            cert = load_pem_x509_certificate(unquote_plus(raw).encode())
 | 
			
		||||
            return [cert]
 | 
			
		||||
        except ValueError as exc:
 | 
			
		||||
            self.logger.info("Failed to parse certificate", exc=exc)
 | 
			
		||||
            return []
 | 
			
		||||
 | 
			
		||||
    def _parse_cert_xfcc(self) -> list[Certificate]:
 | 
			
		||||
        """Parse certificates in the format given to us in
 | 
			
		||||
        the format of the authentik router/envoy"""
 | 
			
		||||
        xfcc_raw = self.request.headers.get(HEADER_PROXY_FORWARDED)
 | 
			
		||||
        if not xfcc_raw:
 | 
			
		||||
            return []
 | 
			
		||||
        certs = []
 | 
			
		||||
        for r_cert in xfcc_raw.split(","):
 | 
			
		||||
            el = r_cert.split(";")
 | 
			
		||||
            raw_cert = {k.split("=")[0]: k.split("=")[1] for k in el}
 | 
			
		||||
            if "Cert" not in raw_cert:
 | 
			
		||||
                continue
 | 
			
		||||
            certs.extend(self.__parse_single_cert(raw_cert["Cert"]))
 | 
			
		||||
        return certs
 | 
			
		||||
 | 
			
		||||
    def _parse_cert_nginx(self) -> list[Certificate]:
 | 
			
		||||
        """Parse certificates in the format nginx-ingress gives to us"""
 | 
			
		||||
        sslcc_raw = self.request.headers.get(HEADER_NGINX_FORWARDED)
 | 
			
		||||
        return self.__parse_single_cert(sslcc_raw)
 | 
			
		||||
 | 
			
		||||
    def _parse_cert_traefik(self) -> list[Certificate]:
 | 
			
		||||
        """Parse certificates in the format traefik gives to us"""
 | 
			
		||||
        ftcc_raw = self.request.headers.get(HEADER_TRAEFIK_FORWARDED)
 | 
			
		||||
        return self.__parse_single_cert(ftcc_raw)
 | 
			
		||||
 | 
			
		||||
    def _parse_cert_outpost(self) -> list[Certificate]:
 | 
			
		||||
        """Parse certificates in the format outposts give to us. Also authenticates
 | 
			
		||||
        the outpost to ensure it has the permission to do so"""
 | 
			
		||||
        user = ClientIPMiddleware.get_outpost_user(self.request)
 | 
			
		||||
        if not user:
 | 
			
		||||
            return []
 | 
			
		||||
        if not user.has_perm(
 | 
			
		||||
            "pass_outpost_certificate", self.executor.current_stage
 | 
			
		||||
        ) and not user.has_perm("authentik_stages_mtls.pass_outpost_certificate"):
 | 
			
		||||
            return []
 | 
			
		||||
        outpost_raw = self.request.headers.get(HEADER_OUTPOST_FORWARDED)
 | 
			
		||||
        return self.__parse_single_cert(outpost_raw)
 | 
			
		||||
 | 
			
		||||
    def get_authorities(self) -> list[CertificateKeyPair] | None:
 | 
			
		||||
        # We can't access `certificate_authorities` on `self.executor.current_stage`, as that would
 | 
			
		||||
        # load the certificate into the directly referenced foreign key, which we have to pickle
 | 
			
		||||
        # as part of the flow plan, and cryptography certs can't be pickled
 | 
			
		||||
        stage: MutualTLSStage = (
 | 
			
		||||
            MutualTLSStage.objects.filter(pk=self.executor.current_stage.pk)
 | 
			
		||||
            .prefetch_related("certificate_authorities")
 | 
			
		||||
            .first()
 | 
			
		||||
        )
 | 
			
		||||
        if stage.certificate_authorities.exists():
 | 
			
		||||
            return stage.certificate_authorities.order_by("name")
 | 
			
		||||
        brand: Brand = self.request.brand
 | 
			
		||||
        if brand.client_certificates.exists():
 | 
			
		||||
            return brand.client_certificates.order_by("name")
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    def validate_cert(self, authorities: list[CertificateKeyPair], certs: list[Certificate]):
 | 
			
		||||
        authorities_cert = [x.certificate for x in authorities]
 | 
			
		||||
        for _cert in certs:
 | 
			
		||||
            try:
 | 
			
		||||
                PolicyBuilder().store(Store(authorities_cert)).build_client_verifier().verify(
 | 
			
		||||
                    _cert, []
 | 
			
		||||
                )
 | 
			
		||||
                return _cert
 | 
			
		||||
            except (
 | 
			
		||||
                InvalidSignature,
 | 
			
		||||
                TypeError,
 | 
			
		||||
                ValueError,
 | 
			
		||||
                VerificationError,
 | 
			
		||||
                UnsupportedGeneralNameType,
 | 
			
		||||
            ) as exc:
 | 
			
		||||
                self.logger.warning("Discarding invalid certificate", cert=_cert, exc=exc)
 | 
			
		||||
                continue
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    def check_if_user(self, cert: Certificate):
 | 
			
		||||
        stage: MutualTLSStage = self.executor.current_stage
 | 
			
		||||
        cert_attr = None
 | 
			
		||||
        user_attr = None
 | 
			
		||||
        match stage.cert_attribute:
 | 
			
		||||
            case CertAttributes.SUBJECT:
 | 
			
		||||
                cert_attr = cert.subject.rfc4514_string()
 | 
			
		||||
            case CertAttributes.COMMON_NAME:
 | 
			
		||||
                cert_attr = self.get_cert_attribute(cert, NameOID.COMMON_NAME)
 | 
			
		||||
            case CertAttributes.EMAIL:
 | 
			
		||||
                cert_attr = self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS)
 | 
			
		||||
        match stage.user_attribute:
 | 
			
		||||
            case UserAttributes.USERNAME:
 | 
			
		||||
                user_attr = "username"
 | 
			
		||||
            case UserAttributes.EMAIL:
 | 
			
		||||
                user_attr = "email"
 | 
			
		||||
        if not user_attr or not cert_attr:
 | 
			
		||||
            return None
 | 
			
		||||
        return User.objects.filter(**{user_attr: cert_attr}).first()
 | 
			
		||||
 | 
			
		||||
    def _cert_to_dict(self, cert: Certificate) -> dict:
 | 
			
		||||
        """Represent a certificate in a dictionary, as certificate objects cannot be pickled"""
 | 
			
		||||
        return {
 | 
			
		||||
            "serial_number": str(cert.serial_number),
 | 
			
		||||
            "subject": cert.subject.rfc4514_string(),
 | 
			
		||||
            "issuer": cert.issuer.rfc4514_string(),
 | 
			
		||||
            "fingerprint_sha256": hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8"),
 | 
			
		||||
            "fingerprint_sha1": hexlify(cert.fingerprint(hashes.SHA1()), ":").decode(  # nosec
 | 
			
		||||
                "utf-8"
 | 
			
		||||
            ),
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    def auth_user(self, user: User, cert: Certificate):
 | 
			
		||||
        self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] = user
 | 
			
		||||
        self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD, "mtls")
 | 
			
		||||
        self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD_ARGS, {})
 | 
			
		||||
        self.executor.plan.context[PLAN_CONTEXT_METHOD_ARGS].update(
 | 
			
		||||
            {"certificate": self._cert_to_dict(cert)}
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def enroll_prepare_user(self, cert: Certificate):
 | 
			
		||||
        self.executor.plan.context.setdefault(PLAN_CONTEXT_PROMPT, {})
 | 
			
		||||
        self.executor.plan.context[PLAN_CONTEXT_PROMPT].update(
 | 
			
		||||
            {
 | 
			
		||||
                "email": self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS),
 | 
			
		||||
                "name": self.get_cert_attribute(cert, NameOID.COMMON_NAME),
 | 
			
		||||
            }
 | 
			
		||||
        )
 | 
			
		||||
        self.executor.plan.context[PLAN_CONTEXT_CERTIFICATE] = self._cert_to_dict(cert)
 | 
			
		||||
 | 
			
		||||
    def get_cert_attribute(self, cert: Certificate, oid: ObjectIdentifier) -> str | None:
 | 
			
		||||
        attr = cert.subject.get_attributes_for_oid(oid)
 | 
			
		||||
        if len(attr) < 1:
 | 
			
		||||
            return None
 | 
			
		||||
        return str(attr[0].value)
 | 
			
		||||
 | 
			
		||||
    def dispatch(self, request, *args, **kwargs):
 | 
			
		||||
        stage: MutualTLSStage = self.executor.current_stage
 | 
			
		||||
        certs = [
 | 
			
		||||
            *self._parse_cert_xfcc(),
 | 
			
		||||
            *self._parse_cert_nginx(),
 | 
			
		||||
            *self._parse_cert_traefik(),
 | 
			
		||||
            *self._parse_cert_outpost(),
 | 
			
		||||
        ]
 | 
			
		||||
        authorities = self.get_authorities()
 | 
			
		||||
        if not authorities:
 | 
			
		||||
            self.logger.warning("No Certificate authority found")
 | 
			
		||||
            if stage.mode == TLSMode.OPTIONAL:
 | 
			
		||||
                return self.executor.stage_ok()
 | 
			
		||||
            if stage.mode == TLSMode.REQUIRED:
 | 
			
		||||
                return super().dispatch(request, *args, **kwargs)
 | 
			
		||||
        cert = self.validate_cert(authorities, certs)
 | 
			
		||||
        if not cert and stage.mode == TLSMode.REQUIRED:
 | 
			
		||||
            self.logger.warning("Client certificate required but no certificates given")
 | 
			
		||||
            return super().dispatch(
 | 
			
		||||
                request,
 | 
			
		||||
                *args,
 | 
			
		||||
                error_message=_("Certificate required but no certificate was given."),
 | 
			
		||||
                **kwargs,
 | 
			
		||||
            )
 | 
			
		||||
        if not cert and stage.mode == TLSMode.OPTIONAL:
 | 
			
		||||
            self.logger.info("No certificate given, continuing")
 | 
			
		||||
            return self.executor.stage_ok()
 | 
			
		||||
        existing_user = self.check_if_user(cert)
 | 
			
		||||
        if self.executor.flow.designation == FlowDesignation.ENROLLMENT:
 | 
			
		||||
            self.enroll_prepare_user(cert)
 | 
			
		||||
        elif existing_user:
 | 
			
		||||
            self.auth_user(existing_user, cert)
 | 
			
		||||
        else:
 | 
			
		||||
            return super().dispatch(
 | 
			
		||||
                request, *args, error_message=_("No user found for certificate."), **kwargs
 | 
			
		||||
            )
 | 
			
		||||
        return self.executor.stage_ok()
 | 
			
		||||
 | 
			
		||||
    def get_challenge(self, *args, error_message: str | None = None, **kwargs):
 | 
			
		||||
        return AccessDeniedChallenge(
 | 
			
		||||
            data={
 | 
			
		||||
                "component": "ak-stage-access-denied",
 | 
			
		||||
                "error_message": str(error_message or "Unknown error"),
 | 
			
		||||
            }
 | 
			
		||||
        )
 | 
			
		||||
@ -1,31 +0,0 @@
 | 
			
		||||
-----BEGIN CERTIFICATE-----
 | 
			
		||||
MIIFXDCCA0SgAwIBAgIUBmV7zREyC1SPr72/75/L9zpwV18wDQYJKoZIhvcNAQEL
 | 
			
		||||
BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl
 | 
			
		||||
bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNDI3MTgzMDUwWhcNMzUw
 | 
			
		||||
MzA3MTgzMDUwWjBGMRowGAYDVQQDDBFhdXRoZW50aWsgVGVzdCBDQTESMBAGA1UE
 | 
			
		||||
CgwJYXV0aGVudGlrMRQwEgYDVQQLDAtTZWxmLXNpZ25lZDCCAiIwDQYJKoZIhvcN
 | 
			
		||||
AQEBBQADggIPADCCAgoCggIBAMc0NxZj7j1mPu0aRToo8oMPdC3T99xgxnqdr18x
 | 
			
		||||
LV4pWyi/YLghgZHqNQY2xNP6JIlSeUZD6KFUYT2sPL4Av/zSg5zO8bl+/lf7ckje
 | 
			
		||||
O1/Bt5A8xtL0CpmpMDGiI6ibdDElaywM6AohisbxrV29pygSKGq2wugF/urqGtE+
 | 
			
		||||
5z4y5Kt6qMdKkd0iXT+WagbQTIUlykFKgB0+qqTLzDl01lVDa/DoLl8Hqp45mVx2
 | 
			
		||||
pqrGsSa3TCErLIv9hUlZklF7A8UV4ZB4JL20UKcP8dKzQClviNie17tpsUpOuy3A
 | 
			
		||||
SQ6+guWTHTLJNCSdLn1xIqc5q+f5wd2dIDf8zXCTHj+Xp0bJE3Vgaq5R31K9+b+1
 | 
			
		||||
2dDWz1KcNJaLEnw2+b0O8M64wTMLxhqOv7QfLUr6Pmg1ZymghjLcZ6bnU9e31Vza
 | 
			
		||||
hlPKhxjqYQUC4Kq+oaYF6qdUeJy+dsYf0iDv5tTC+eReZDWIjxTPrNpwA773ZwT7
 | 
			
		||||
WVmL7ULGpuP2g9rNvFBcZiN+i6d7CUoN+jd/iRdo79lrI0dfXiyy4bYgW/2HeZfF
 | 
			
		||||
HaOsc1xsoqnJdWbWkX/ooyaCjAfm07kS3HiOzz4q3QW4wgGrwV8lEraLPxYYeOQu
 | 
			
		||||
YcGMOM8NfnVkjc8gmyXUxedCje5Vz/Tu5fKrQEInnCmXxVsWbwr/LzEjMKAM/ivY
 | 
			
		||||
0TXxAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0G
 | 
			
		||||
A1UdDgQWBBTa+Ns6QzqlNvnTGszkouQQtZnVJDANBgkqhkiG9w0BAQsFAAOCAgEA
 | 
			
		||||
NpJEDMXjuEIzSzafkxSshvjnt5sMYmzmvjNoRlkxgN2YcWvPoxbalGAYzcpyggT2
 | 
			
		||||
6xZY8R4tvB1oNTCArqwf860kkofUoJCr88D/pU3Cv4JhjCWs4pmXTsvSqlBSlJbo
 | 
			
		||||
+jPBZwbn6it/6jcit6Be3rW2PtHe8tASd9Lf8/2r1ZvupXwPzcR84R4Z10ve2lqV
 | 
			
		||||
xxcWlMmBh51CaYI0b1/WTe9Ua+wgkCVkxbf9zNcDQXjxw2ICWK+nR/4ld4nmqVm2
 | 
			
		||||
C7nhvXwU8FAHl7ZgR2Z3PLrwPuhd+kd6NXQqNkS9A+n+1vSRLbRjmV8pwIPpdPEq
 | 
			
		||||
nslUAGJJBHDUBArxC3gOJSB+WtmaCfzDu2gepMf9Ng1H2ZhwSF/FH3v3fsJqZkzz
 | 
			
		||||
NBstT9KuNGQRYiCmAPJaoVAc9BoLa+BFML1govtWtpdmbFk8PZEcuUsP7iAZqFF1
 | 
			
		||||
uuldPyZ8huGpQSR6Oq2bILRHowfGY0npTZAyxg0Vs8UMy1HTwNOp9OuRtArMZmsJ
 | 
			
		||||
jFIx1QzRf9S1i6bYpOzOudoXj4ARkS1KmVExGjJFcIT0xlFSSERie2fEKSeEYOyG
 | 
			
		||||
G+PA2qRt/F51FGOMm1ZscjPXqk2kt3C4BFbz6Vvxsq7D3lmhvFLn4jVA8+OidsM0
 | 
			
		||||
YUrVMtWET/RkjEIbADbgRXxNUNo+jtQZDU9C1IiAdfk=
 | 
			
		||||
-----END CERTIFICATE-----
 | 
			
		||||
@ -1,31 +0,0 @@
 | 
			
		||||
-----BEGIN CERTIFICATE-----
 | 
			
		||||
MIIFWTCCA0GgAwIBAgIUDEnKCSmIXG/akySGes7bhOGrN/8wDQYJKoZIhvcNAQEL
 | 
			
		||||
BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl
 | 
			
		||||
bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNTE5MTIzODQ2WhcNMjYw
 | 
			
		||||
NTE1MTIzODQ2WjARMQ8wDQYDVQQDDAZjbGllbnQwggIiMA0GCSqGSIb3DQEBAQUA
 | 
			
		||||
A4ICDwAwggIKAoICAQCkPkS1V6l0gj0ulxMznkxkgrw4p9Tjd8teSsGZt02A2Eo6
 | 
			
		||||
7D8FbJ7pp3d5fYW/TWuEKVBLWTID6rijW5EGcdgTM5Jxf/QR+aZTEK6umQxUd4yO
 | 
			
		||||
mOtp+xVS3KlcsSej2dFpeE5h5VkZizHpvh5xkoAP8W5VtQLOVF0hIeumHnJmaeLj
 | 
			
		||||
+mhK9PBFpO7k9SFrYYhd/uLrYbIdANihbIO2Q74rNEJHewhFNM7oNSjjEWzRd/7S
 | 
			
		||||
qNdQij9JGrVG7u8YJJscEQHqyHMYFVCEMjxmsge5BO6Vx5OWmUE3wXPzb5TbyTS4
 | 
			
		||||
+yg88g9rYTUXrzz+poCyKpaur45qBsdw35lJ8nq69VJj2xJLGQDwoTgGSXRuPciC
 | 
			
		||||
3OilQI+Ma+j8qQGJxJ8WJxISlf1cuhp+V4ZUd1lawlM5hAXyXmHRlH4pun4y+g7O
 | 
			
		||||
O34+fE3pK25JjVCicMT/rC2A/sb95j/fHTzzJpbB70U0I50maTcIsOkyw6aiF//E
 | 
			
		||||
0ShTDz14x22SCMolUc6hxTDZvBB6yrcJHd7d9CCnFH2Sgo13QrtNJ/atXgm13HGh
 | 
			
		||||
wBzRwK38XUGl/J4pJaxAupTVCPriStUM3m0EYHNelRRUE91pbyeGT0rvOuv00uLw
 | 
			
		||||
Rj7K7hJZR8avTKWmKrVBVpq+gSojGW1DwBS0NiDNkZs0d/IjB1wkzczEgdZjXwID
 | 
			
		||||
AQABo3QwcjAfBgNVHSMEGDAWgBTa+Ns6QzqlNvnTGszkouQQtZnVJDAdBgNVHSUE
 | 
			
		||||
FjAUBggrBgEFBQcDAgYIKwYBBQUHAwEwEQYDVR0RBAowCIIGY2xpZW50MB0GA1Ud
 | 
			
		||||
DgQWBBT1xg5sXkypRBwvCxBuyfoanaiZ5jANBgkqhkiG9w0BAQsFAAOCAgEAvUAz
 | 
			
		||||
YwIjxY/0KHZDU8owdILVqKChzfLcy9OHNPyEI3TSOI8X6gNtBO+HE6r8aWGcC9vw
 | 
			
		||||
zzeIsNQ3UEjvRWi2r+vUVbiPTbFdZboNDSZv6ZmGHxwd85VsjXRGoXV6koCT/9zi
 | 
			
		||||
9/lCM1DwqwYSwBphMJdRVFRUMluSYk1oHflGeA18xgGuts4eFivJwhabGm1AdVVQ
 | 
			
		||||
/CYvqCuTxd/DCzWZBdyxYpDru64i/kyeJCt1pThKEFDWmpumFdBI4CxJ0OhxVSGp
 | 
			
		||||
dOXzK+Y6ULepxCvi6/OpSog52jQ6PnNd1ghiYtq7yO1T4GQz65M1vtHHVvQ3gfBE
 | 
			
		||||
AuKYQp6io7ypitRx+LpjsBQenyP4FFGfrq7pm90nLluOBOArfSdF0N+CP2wo/YFV
 | 
			
		||||
9BGf89OtvRi3BXCm2NXkE/Sc4We26tY8x7xNLOmNs8YOT0O3r/EQ690W9GIwRMx0
 | 
			
		||||
m0r/RXWn5V3o4Jib9r8eH9NzaDstD8g9dECcGfM4fHoM/DAGFaRrNcjMsS1APP3L
 | 
			
		||||
jp7+BfBSXtrz9V6rVJ3CBLXlLK0AuSm7bqd1MJsGA9uMLpsVZIUA+KawcmPGdPU+
 | 
			
		||||
NxdpBCtzyurQSUyaTLtVqSeP35gMAwaNzUDph8Uh+vHz+kRwgXS19OQvTaud5LJu
 | 
			
		||||
nQe4JNS+u5e2VDEBWUxt8NTpu6eShDN0iIEHtxA=
 | 
			
		||||
-----END CERTIFICATE-----
 | 
			
		||||
@ -1,228 +0,0 @@
 | 
			
		||||
from unittest.mock import MagicMock, patch
 | 
			
		||||
from urllib.parse import quote_plus
 | 
			
		||||
 | 
			
		||||
from django.urls import reverse
 | 
			
		||||
from guardian.shortcuts import assign_perm
 | 
			
		||||
 | 
			
		||||
from authentik.core.models import User
 | 
			
		||||
from authentik.core.tests.utils import (
 | 
			
		||||
    create_test_brand,
 | 
			
		||||
    create_test_cert,
 | 
			
		||||
    create_test_flow,
 | 
			
		||||
    create_test_user,
 | 
			
		||||
)
 | 
			
		||||
from authentik.crypto.models import CertificateKeyPair
 | 
			
		||||
from authentik.enterprise.stages.mtls.models import (
 | 
			
		||||
    CertAttributes,
 | 
			
		||||
    MutualTLSStage,
 | 
			
		||||
    TLSMode,
 | 
			
		||||
    UserAttributes,
 | 
			
		||||
)
 | 
			
		||||
from authentik.enterprise.stages.mtls.stage import PLAN_CONTEXT_CERTIFICATE
 | 
			
		||||
from authentik.flows.models import FlowDesignation, FlowStageBinding
 | 
			
		||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
 | 
			
		||||
from authentik.flows.tests import FlowTestCase
 | 
			
		||||
from authentik.lib.generators import generate_id
 | 
			
		||||
from authentik.lib.tests.utils import load_fixture
 | 
			
		||||
from authentik.outposts.models import Outpost, OutpostType
 | 
			
		||||
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MTLSStageTests(FlowTestCase):
 | 
			
		||||
 | 
			
		||||
    def setUp(self):
 | 
			
		||||
        super().setUp()
 | 
			
		||||
        self.flow = create_test_flow(FlowDesignation.AUTHENTICATION)
 | 
			
		||||
        self.ca = CertificateKeyPair.objects.create(
 | 
			
		||||
            name=generate_id(),
 | 
			
		||||
            certificate_data=load_fixture("fixtures/ca.pem"),
 | 
			
		||||
        )
 | 
			
		||||
        self.stage = MutualTLSStage.objects.create(
 | 
			
		||||
            name=generate_id(),
 | 
			
		||||
            mode=TLSMode.REQUIRED,
 | 
			
		||||
            cert_attribute=CertAttributes.COMMON_NAME,
 | 
			
		||||
            user_attribute=UserAttributes.USERNAME,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        self.stage.certificate_authorities.add(self.ca)
 | 
			
		||||
        self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=0)
 | 
			
		||||
        self.client_cert = load_fixture("fixtures/cert_client.pem")
 | 
			
		||||
        # User matching the certificate
 | 
			
		||||
        User.objects.filter(username="client").delete()
 | 
			
		||||
        self.cert_user = create_test_user(username="client")
 | 
			
		||||
 | 
			
		||||
    def test_parse_xfcc(self):
 | 
			
		||||
        """Test authentik Proxy/Envoy's XFCC format"""
 | 
			
		||||
        with self.assertFlowFinishes() as plan:
 | 
			
		||||
            res = self.client.get(
 | 
			
		||||
                reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
                headers={"X-Forwarded-Client-Cert": f"Cert={quote_plus(self.client_cert)}"},
 | 
			
		||||
            )
 | 
			
		||||
            self.assertEqual(res.status_code, 200)
 | 
			
		||||
            self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
 | 
			
		||||
        self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
 | 
			
		||||
 | 
			
		||||
    def test_parse_nginx(self):
 | 
			
		||||
        """Test nginx's format"""
 | 
			
		||||
        with self.assertFlowFinishes() as plan:
 | 
			
		||||
            res = self.client.get(
 | 
			
		||||
                reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
                headers={"SSL-Client-Cert": quote_plus(self.client_cert)},
 | 
			
		||||
            )
 | 
			
		||||
            self.assertEqual(res.status_code, 200)
 | 
			
		||||
            self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
 | 
			
		||||
        self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
 | 
			
		||||
 | 
			
		||||
    def test_parse_traefik(self):
 | 
			
		||||
        """Test traefik's format"""
 | 
			
		||||
        with self.assertFlowFinishes() as plan:
 | 
			
		||||
            res = self.client.get(
 | 
			
		||||
                reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
                headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
 | 
			
		||||
            )
 | 
			
		||||
            self.assertEqual(res.status_code, 200)
 | 
			
		||||
            self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
 | 
			
		||||
        self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
 | 
			
		||||
 | 
			
		||||
    def test_parse_outpost_object(self):
 | 
			
		||||
        """Test outposts's format"""
 | 
			
		||||
        outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY)
 | 
			
		||||
        assign_perm("pass_outpost_certificate", outpost.user, self.stage)
 | 
			
		||||
        with patch(
 | 
			
		||||
            "authentik.root.middleware.ClientIPMiddleware.get_outpost_user",
 | 
			
		||||
            MagicMock(return_value=outpost.user),
 | 
			
		||||
        ):
 | 
			
		||||
            with self.assertFlowFinishes() as plan:
 | 
			
		||||
                res = self.client.get(
 | 
			
		||||
                    reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
                    headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)},
 | 
			
		||||
                )
 | 
			
		||||
                self.assertEqual(res.status_code, 200)
 | 
			
		||||
                self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
 | 
			
		||||
            self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
 | 
			
		||||
 | 
			
		||||
    def test_parse_outpost_global(self):
 | 
			
		||||
        """Test outposts's format"""
 | 
			
		||||
        outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY)
 | 
			
		||||
        assign_perm("authentik_stages_mtls.pass_outpost_certificate", outpost.user)
 | 
			
		||||
        with patch(
 | 
			
		||||
            "authentik.root.middleware.ClientIPMiddleware.get_outpost_user",
 | 
			
		||||
            MagicMock(return_value=outpost.user),
 | 
			
		||||
        ):
 | 
			
		||||
            with self.assertFlowFinishes() as plan:
 | 
			
		||||
                res = self.client.get(
 | 
			
		||||
                    reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
                    headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)},
 | 
			
		||||
                )
 | 
			
		||||
                self.assertEqual(res.status_code, 200)
 | 
			
		||||
                self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
 | 
			
		||||
            self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
 | 
			
		||||
 | 
			
		||||
    def test_parse_outpost_no_perm(self):
 | 
			
		||||
        """Test outposts's format"""
 | 
			
		||||
        outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY)
 | 
			
		||||
        with patch(
 | 
			
		||||
            "authentik.root.middleware.ClientIPMiddleware.get_outpost_user",
 | 
			
		||||
            MagicMock(return_value=outpost.user),
 | 
			
		||||
        ):
 | 
			
		||||
            res = self.client.get(
 | 
			
		||||
                reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
                headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)},
 | 
			
		||||
            )
 | 
			
		||||
            self.assertEqual(res.status_code, 200)
 | 
			
		||||
            self.assertStageResponse(res, self.flow, component="ak-stage-access-denied")
 | 
			
		||||
 | 
			
		||||
    def test_invalid_cert(self):
 | 
			
		||||
        """Test invalid certificate"""
 | 
			
		||||
        cert = create_test_cert()
 | 
			
		||||
        with self.assertFlowFinishes() as plan:
 | 
			
		||||
            res = self.client.get(
 | 
			
		||||
                reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
                headers={"X-Forwarded-TLS-Client-Cert": quote_plus(cert.certificate_data)},
 | 
			
		||||
            )
 | 
			
		||||
            self.assertEqual(res.status_code, 200)
 | 
			
		||||
            self.assertStageResponse(res, self.flow, component="ak-stage-access-denied")
 | 
			
		||||
        self.assertNotIn(PLAN_CONTEXT_PENDING_USER, plan().context)
 | 
			
		||||
 | 
			
		||||
    def test_auth_no_user(self):
 | 
			
		||||
        """Test auth with no user"""
 | 
			
		||||
        User.objects.filter(username="client").delete()
 | 
			
		||||
        res = self.client.get(
 | 
			
		||||
            reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
            headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(res.status_code, 200)
 | 
			
		||||
        self.assertStageResponse(res, self.flow, component="ak-stage-access-denied")
 | 
			
		||||
 | 
			
		||||
    def test_brand_ca(self):
 | 
			
		||||
        """Test using a CA from the brand"""
 | 
			
		||||
        self.stage.certificate_authorities.clear()
 | 
			
		||||
 | 
			
		||||
        brand = create_test_brand()
 | 
			
		||||
        brand.client_certificates.add(self.ca)
 | 
			
		||||
        with self.assertFlowFinishes() as plan:
 | 
			
		||||
            res = self.client.get(
 | 
			
		||||
                reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
                headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
 | 
			
		||||
            )
 | 
			
		||||
            self.assertEqual(res.status_code, 200)
 | 
			
		||||
            self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
 | 
			
		||||
        self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
 | 
			
		||||
 | 
			
		||||
    def test_no_ca_optional(self):
 | 
			
		||||
        """Test using no CA Set"""
 | 
			
		||||
        self.stage.mode = TLSMode.OPTIONAL
 | 
			
		||||
        self.stage.certificate_authorities.clear()
 | 
			
		||||
        self.stage.save()
 | 
			
		||||
        res = self.client.get(
 | 
			
		||||
            reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
            headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(res.status_code, 200)
 | 
			
		||||
        self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
 | 
			
		||||
 | 
			
		||||
    def test_no_ca_required(self):
 | 
			
		||||
        """Test using no CA Set"""
 | 
			
		||||
        self.stage.certificate_authorities.clear()
 | 
			
		||||
        self.stage.save()
 | 
			
		||||
        res = self.client.get(
 | 
			
		||||
            reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
            headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(res.status_code, 200)
 | 
			
		||||
        self.assertStageResponse(res, self.flow, component="ak-stage-access-denied")
 | 
			
		||||
 | 
			
		||||
    def test_no_cert_optional(self):
 | 
			
		||||
        """Test using no cert Set"""
 | 
			
		||||
        self.stage.mode = TLSMode.OPTIONAL
 | 
			
		||||
        self.stage.save()
 | 
			
		||||
        res = self.client.get(
 | 
			
		||||
            reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(res.status_code, 200)
 | 
			
		||||
        self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
 | 
			
		||||
 | 
			
		||||
    def test_enroll(self):
 | 
			
		||||
        """Test Enrollment flow"""
 | 
			
		||||
        self.flow.designation = FlowDesignation.ENROLLMENT
 | 
			
		||||
        self.flow.save()
 | 
			
		||||
        with self.assertFlowFinishes() as plan:
 | 
			
		||||
            res = self.client.get(
 | 
			
		||||
                reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
 | 
			
		||||
                headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
 | 
			
		||||
            )
 | 
			
		||||
            self.assertEqual(res.status_code, 200)
 | 
			
		||||
            self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
 | 
			
		||||
        self.assertEqual(plan().context[PLAN_CONTEXT_PROMPT], {"email": None, "name": "client"})
 | 
			
		||||
        self.assertEqual(
 | 
			
		||||
            plan().context[PLAN_CONTEXT_CERTIFICATE],
 | 
			
		||||
            {
 | 
			
		||||
                "fingerprint_sha1": "52:39:ca:1e:3a:1f:78:3a:9f:26:3b:c2:84:99:48:68:99:99:81:8a",
 | 
			
		||||
                "fingerprint_sha256": (
 | 
			
		||||
                    "c1:07:8b:7c:e9:02:57:87:1e:92:e5:81:83:21:bc:92:c7:47:65:e3:97:fb:05:97:6f:36:9e:b5:31:77:98:b7"
 | 
			
		||||
                ),
 | 
			
		||||
                "issuer": "OU=Self-signed,O=authentik,CN=authentik Test CA",
 | 
			
		||||
                "serial_number": "70153443448884702681996102271549704759327537151",
 | 
			
		||||
                "subject": "CN=client",
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
@ -1,5 +0,0 @@
 | 
			
		||||
"""API URLs"""
 | 
			
		||||
 | 
			
		||||
from authentik.enterprise.stages.mtls.api import MutualTLSStageViewSet
 | 
			
		||||
 | 
			
		||||
api_urlpatterns = [("stages/mtls", MutualTLSStageViewSet)]
 | 
			
		||||
@ -8,7 +8,6 @@ from django.test import TestCase
 | 
			
		||||
from django.utils.timezone import now
 | 
			
		||||
from rest_framework.exceptions import ValidationError
 | 
			
		||||
 | 
			
		||||
from authentik.core.models import User
 | 
			
		||||
from authentik.enterprise.license import LicenseKey
 | 
			
		||||
from authentik.enterprise.models import (
 | 
			
		||||
    THRESHOLD_READ_ONLY_WEEKS,
 | 
			
		||||
@ -72,9 +71,9 @@ class TestEnterpriseLicense(TestCase):
 | 
			
		||||
    )
 | 
			
		||||
    def test_valid_multiple(self):
 | 
			
		||||
        """Check license verification"""
 | 
			
		||||
        lic = License.objects.create(key=generate_id(), expiry=expiry_valid)
 | 
			
		||||
        lic = License.objects.create(key=generate_id())
 | 
			
		||||
        self.assertTrue(lic.status.status().is_valid)
 | 
			
		||||
        lic2 = License.objects.create(key=generate_id(), expiry=expiry_valid)
 | 
			
		||||
        lic2 = License.objects.create(key=generate_id())
 | 
			
		||||
        self.assertTrue(lic2.status.status().is_valid)
 | 
			
		||||
        total = LicenseKey.get_total()
 | 
			
		||||
        self.assertEqual(total.internal_users, 200)
 | 
			
		||||
@ -233,9 +232,7 @@ class TestEnterpriseLicense(TestCase):
 | 
			
		||||
    )
 | 
			
		||||
    def test_expiry_expired(self):
 | 
			
		||||
        """Check license verification"""
 | 
			
		||||
        User.objects.all().delete()
 | 
			
		||||
        License.objects.all().delete()
 | 
			
		||||
        License.objects.create(key=generate_id(), expiry=expiry_expired)
 | 
			
		||||
        License.objects.create(key=generate_id())
 | 
			
		||||
        self.assertEqual(LicenseKey.get_total().summary().status, LicenseUsageStatus.EXPIRED)
 | 
			
		||||
 | 
			
		||||
    @patch(
 | 
			
		||||
 | 
			
		||||
@ -15,13 +15,13 @@ class MMDBContextProcessor(EventContextProcessor):
 | 
			
		||||
        self.reader: Reader | None = None
 | 
			
		||||
        self._last_mtime: float = 0.0
 | 
			
		||||
        self.logger = get_logger()
 | 
			
		||||
        self.load()
 | 
			
		||||
        self.open()
 | 
			
		||||
 | 
			
		||||
    def path(self) -> str | None:
 | 
			
		||||
        """Get the path to the MMDB file to load"""
 | 
			
		||||
        raise NotImplementedError
 | 
			
		||||
 | 
			
		||||
    def load(self):
 | 
			
		||||
    def open(self):
 | 
			
		||||
        """Get GeoIP Reader, if configured, otherwise none"""
 | 
			
		||||
        path = self.path()
 | 
			
		||||
        if path == "" or not path:
 | 
			
		||||
@ -44,7 +44,7 @@ class MMDBContextProcessor(EventContextProcessor):
 | 
			
		||||
            diff = self._last_mtime < mtime
 | 
			
		||||
            if diff > 0:
 | 
			
		||||
                self.logger.info("Found new MMDB Database, reopening", diff=diff, path=path)
 | 
			
		||||
                self.load()
 | 
			
		||||
                self.open()
 | 
			
		||||
        except OSError as exc:
 | 
			
		||||
            self.logger.warning("Failed to check MMDB age", exc=exc)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -57,7 +57,7 @@ class LogEventSerializer(PassiveSerializer):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@contextmanager
 | 
			
		||||
def capture_logs(log_default_output=True) -> Generator[list[LogEvent]]:
 | 
			
		||||
def capture_logs(log_default_output=True) -> Generator[list[LogEvent], None, None]:
 | 
			
		||||
    """Capture log entries created"""
 | 
			
		||||
    logs = []
 | 
			
		||||
    cap = LogCapture()
 | 
			
		||||
 | 
			
		||||
@ -1,18 +0,0 @@
 | 
			
		||||
# Generated by Django 5.1.9 on 2025-05-27 12:52
 | 
			
		||||
 | 
			
		||||
from django.db import migrations, models
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Migration(migrations.Migration):
 | 
			
		||||
 | 
			
		||||
    dependencies = [
 | 
			
		||||
        ("authentik_flows", "0027_auto_20231028_1424"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    operations = [
 | 
			
		||||
        migrations.AddField(
 | 
			
		||||
            model_name="flowtoken",
 | 
			
		||||
            name="revoke_on_execution",
 | 
			
		||||
            field=models.BooleanField(default=True),
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
@ -303,10 +303,9 @@ class FlowToken(Token):
 | 
			
		||||
 | 
			
		||||
    flow = models.ForeignKey(Flow, on_delete=models.CASCADE)
 | 
			
		||||
    _plan = models.TextField()
 | 
			
		||||
    revoke_on_execution = models.BooleanField(default=True)
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def pickle(plan: "FlowPlan") -> str:
 | 
			
		||||
    def pickle(plan) -> str:
 | 
			
		||||
        """Pickle into string"""
 | 
			
		||||
        data = dumps(plan)
 | 
			
		||||
        return b64encode(data).decode()
 | 
			
		||||
 | 
			
		||||
@ -99,10 +99,9 @@ class ChallengeStageView(StageView):
 | 
			
		||||
            self.logger.debug("Got StageInvalidException", exc=exc)
 | 
			
		||||
            return self.executor.stage_invalid()
 | 
			
		||||
        if not challenge.is_valid():
 | 
			
		||||
            self.logger.error(
 | 
			
		||||
            self.logger.warning(
 | 
			
		||||
                "f(ch): Invalid challenge",
 | 
			
		||||
                errors=challenge.errors,
 | 
			
		||||
                challenge=challenge.data,
 | 
			
		||||
            )
 | 
			
		||||
        return HttpChallengeResponse(challenge)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -15,7 +15,6 @@
 | 
			
		||||
        {% endblock %}
 | 
			
		||||
        <link rel="stylesheet" type="text/css" href="{% static 'dist/sfe/bootstrap.min.css' %}">
 | 
			
		||||
        <meta name="sentry-trace" content="{{ sentry_trace }}" />
 | 
			
		||||
        <link rel="prefetch" href="{{ flow_background_url }}" />
 | 
			
		||||
        {% include "base/header_js.html" %}
 | 
			
		||||
        <style>
 | 
			
		||||
          html,
 | 
			
		||||
@ -23,7 +22,7 @@
 | 
			
		||||
            height: 100%;
 | 
			
		||||
          }
 | 
			
		||||
          body {
 | 
			
		||||
            background-image: url("{{ flow_background_url }}");
 | 
			
		||||
            background-image: url("{{ flow.background_url }}");
 | 
			
		||||
            background-repeat: no-repeat;
 | 
			
		||||
            background-size: cover;
 | 
			
		||||
          }
 | 
			
		||||
 | 
			
		||||
@ -5,9 +5,9 @@
 | 
			
		||||
 | 
			
		||||
{% block head_before %}
 | 
			
		||||
{{ block.super }}
 | 
			
		||||
<link rel="prefetch" href="{{ flow_background_url }}" />
 | 
			
		||||
<link rel="prefetch" href="{{ flow.background_url }}" />
 | 
			
		||||
{% if flow.compatibility_mode and not inspector %}
 | 
			
		||||
<script>ShadyDOM = { force: true };</script>
 | 
			
		||||
<script>ShadyDOM = { force: !navigator.webdriver };</script>
 | 
			
		||||
{% endif %}
 | 
			
		||||
{% include "base/header_js.html" %}
 | 
			
		||||
<script>
 | 
			
		||||
@ -21,7 +21,7 @@ window.authentik.flow = {
 | 
			
		||||
<script src="{% versioned_script 'dist/flow/FlowInterface-%v.js' %}" type="module"></script>
 | 
			
		||||
<style>
 | 
			
		||||
:root {
 | 
			
		||||
    --ak-flow-background: url("{{ flow_background_url }}");
 | 
			
		||||
    --ak-flow-background: url("{{ flow.background_url }}");
 | 
			
		||||
}
 | 
			
		||||
</style>
 | 
			
		||||
{% endblock %}
 | 
			
		||||
 | 
			
		||||
@ -1,10 +1,7 @@
 | 
			
		||||
"""Test helpers"""
 | 
			
		||||
 | 
			
		||||
from collections.abc import Callable, Generator
 | 
			
		||||
from contextlib import contextmanager
 | 
			
		||||
from json import loads
 | 
			
		||||
from typing import Any
 | 
			
		||||
from unittest.mock import MagicMock, patch
 | 
			
		||||
 | 
			
		||||
from django.http.response import HttpResponse
 | 
			
		||||
from django.urls.base import reverse
 | 
			
		||||
@ -12,8 +9,6 @@ from rest_framework.test import APITestCase
 | 
			
		||||
 | 
			
		||||
from authentik.core.models import User
 | 
			
		||||
from authentik.flows.models import Flow
 | 
			
		||||
from authentik.flows.planner import FlowPlan
 | 
			
		||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class FlowTestCase(APITestCase):
 | 
			
		||||
@ -49,12 +44,3 @@ class FlowTestCase(APITestCase):
 | 
			
		||||
    def assertStageRedirects(self, response: HttpResponse, to: str) -> dict[str, Any]:
 | 
			
		||||
        """Wrapper around assertStageResponse that checks for a redirect"""
 | 
			
		||||
        return self.assertStageResponse(response, component="xak-flow-redirect", to=to)
 | 
			
		||||
 | 
			
		||||
    @contextmanager
 | 
			
		||||
    def assertFlowFinishes(self) -> Generator[Callable[[], FlowPlan]]:
 | 
			
		||||
        """Capture the flow plan before the flow finishes and return it"""
 | 
			
		||||
        try:
 | 
			
		||||
            with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()):
 | 
			
		||||
                yield lambda: self.client.session.get(SESSION_KEY_PLAN)
 | 
			
		||||
        finally:
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
@ -146,7 +146,6 @@ class FlowExecutorView(APIView):
 | 
			
		||||
        except (AttributeError, EOFError, ImportError, IndexError) as exc:
 | 
			
		||||
            LOGGER.warning("f(exec): Failed to restore token plan", exc=exc)
 | 
			
		||||
        finally:
 | 
			
		||||
            if token.revoke_on_execution:
 | 
			
		||||
            token.delete()
 | 
			
		||||
        if not isinstance(plan, FlowPlan):
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
@ -13,9 +13,7 @@ class FlowInterfaceView(InterfaceView):
 | 
			
		||||
    """Flow interface"""
 | 
			
		||||
 | 
			
		||||
    def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
 | 
			
		||||
        flow = get_object_or_404(Flow, slug=self.kwargs.get("flow_slug"))
 | 
			
		||||
        kwargs["flow"] = flow
 | 
			
		||||
        kwargs["flow_background_url"] = flow.background_url(self.request)
 | 
			
		||||
        kwargs["flow"] = get_object_or_404(Flow, slug=self.kwargs.get("flow_slug"))
 | 
			
		||||
        kwargs["inspector"] = "inspector" in self.request.GET
 | 
			
		||||
        return super().get_context_data(**kwargs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -363,9 +363,6 @@ def django_db_config(config: ConfigLoader | None = None) -> dict:
 | 
			
		||||
        pool_options = config.get_dict_from_b64_json("postgresql.pool_options", True)
 | 
			
		||||
        if not pool_options:
 | 
			
		||||
            pool_options = True
 | 
			
		||||
    # FIXME: Temporarily force pool to be deactivated.
 | 
			
		||||
    # See https://github.com/goauthentik/authentik/issues/14320
 | 
			
		||||
    pool_options = False
 | 
			
		||||
 | 
			
		||||
    db = {
 | 
			
		||||
        "default": {
 | 
			
		||||
 | 
			
		||||
@ -81,6 +81,7 @@ debugger: false
 | 
			
		||||
 | 
			
		||||
log_level: info
 | 
			
		||||
 | 
			
		||||
session_storage: cache
 | 
			
		||||
sessions:
 | 
			
		||||
  unauthenticated_age: days=1
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -17,7 +17,7 @@ from ldap3.core.exceptions import LDAPException
 | 
			
		||||
from redis.exceptions import ConnectionError as RedisConnectionError
 | 
			
		||||
from redis.exceptions import RedisError, ResponseError
 | 
			
		||||
from rest_framework.exceptions import APIException
 | 
			
		||||
from sentry_sdk import HttpTransport, get_current_scope
 | 
			
		||||
from sentry_sdk import HttpTransport
 | 
			
		||||
from sentry_sdk import init as sentry_sdk_init
 | 
			
		||||
from sentry_sdk.api import set_tag
 | 
			
		||||
from sentry_sdk.integrations.argv import ArgvIntegration
 | 
			
		||||
@ -27,7 +27,6 @@ from sentry_sdk.integrations.redis import RedisIntegration
 | 
			
		||||
from sentry_sdk.integrations.socket import SocketIntegration
 | 
			
		||||
from sentry_sdk.integrations.stdlib import StdlibIntegration
 | 
			
		||||
from sentry_sdk.integrations.threading import ThreadingIntegration
 | 
			
		||||
from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME
 | 
			
		||||
from structlog.stdlib import get_logger
 | 
			
		||||
from websockets.exceptions import WebSocketException
 | 
			
		||||
 | 
			
		||||
@ -96,8 +95,6 @@ def traces_sampler(sampling_context: dict) -> float:
 | 
			
		||||
        return 0
 | 
			
		||||
    if _type == "websocket":
 | 
			
		||||
        return 0
 | 
			
		||||
    if CONFIG.get_bool("debug"):
 | 
			
		||||
        return 1
 | 
			
		||||
    return float(CONFIG.get("error_reporting.sample_rate", 0.1))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -170,14 +167,3 @@ def before_send(event: dict, hint: dict) -> dict | None:
 | 
			
		||||
    if settings.DEBUG:
 | 
			
		||||
        return None
 | 
			
		||||
    return event
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_http_meta():
 | 
			
		||||
    """Get sentry-related meta key-values"""
 | 
			
		||||
    scope = get_current_scope()
 | 
			
		||||
    meta = {
 | 
			
		||||
        SENTRY_TRACE_HEADER_NAME: scope.get_traceparent() or "",
 | 
			
		||||
    }
 | 
			
		||||
    if bag := scope.get_baggage():
 | 
			
		||||
        meta[BAGGAGE_HEADER_NAME] = bag.serialize()
 | 
			
		||||
    return meta
 | 
			
		||||
 | 
			
		||||
@ -59,7 +59,7 @@ class PropertyMappingManager:
 | 
			
		||||
        request: HttpRequest | None,
 | 
			
		||||
        return_mapping: bool = False,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
    ) -> Generator[tuple[dict, PropertyMapping]]:
 | 
			
		||||
    ) -> Generator[tuple[dict, PropertyMapping], None]:
 | 
			
		||||
        """Iterate over all mappings that were pre-compiled and
 | 
			
		||||
        execute all of them with the given context"""
 | 
			
		||||
        if not self.__has_compiled:
 | 
			
		||||
 | 
			
		||||
@ -23,6 +23,7 @@ if TYPE_CHECKING:
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Direction(StrEnum):
 | 
			
		||||
 | 
			
		||||
    add = "add"
 | 
			
		||||
    remove = "remove"
 | 
			
		||||
 | 
			
		||||
@ -36,16 +37,13 @@ SAFE_METHODS = [
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BaseOutgoingSyncClient[
 | 
			
		||||
    TModel: "Model",
 | 
			
		||||
    TConnection: "Model",
 | 
			
		||||
    TSchema: dict,
 | 
			
		||||
    TProvider: "OutgoingSyncProvider",
 | 
			
		||||
    TModel: "Model", TConnection: "Model", TSchema: dict, TProvider: "OutgoingSyncProvider"
 | 
			
		||||
]:
 | 
			
		||||
    """Basic Outgoing sync client Client"""
 | 
			
		||||
 | 
			
		||||
    provider: TProvider
 | 
			
		||||
    connection_type: type[TConnection]
 | 
			
		||||
    connection_attr: str
 | 
			
		||||
    connection_type_query: str
 | 
			
		||||
    mapper: PropertyMappingManager
 | 
			
		||||
 | 
			
		||||
    can_discover = False
 | 
			
		||||
@ -65,7 +63,9 @@ class BaseOutgoingSyncClient[
 | 
			
		||||
    def write(self, obj: TModel) -> tuple[TConnection, bool]:
 | 
			
		||||
        """Write object to destination. Uses self.create and self.update, but
 | 
			
		||||
        can be overwritten for further logic"""
 | 
			
		||||
        connection = getattr(obj, self.connection_attr).filter(provider=self.provider).first()
 | 
			
		||||
        connection = self.connection_type.objects.filter(
 | 
			
		||||
            provider=self.provider, **{self.connection_type_query: obj}
 | 
			
		||||
        ).first()
 | 
			
		||||
        try:
 | 
			
		||||
            if not connection:
 | 
			
		||||
                connection = self.create(obj)
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,6 @@
 | 
			
		||||
from collections.abc import Callable
 | 
			
		||||
from dataclasses import asdict
 | 
			
		||||
 | 
			
		||||
from celery import group
 | 
			
		||||
from celery.exceptions import Retry
 | 
			
		||||
from celery.result import allow_join_result
 | 
			
		||||
from django.core.paginator import Paginator
 | 
			
		||||
@ -83,41 +82,21 @@ class SyncTasks:
 | 
			
		||||
                self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name)
 | 
			
		||||
                return
 | 
			
		||||
            try:
 | 
			
		||||
                messages.append(_("Syncing users"))
 | 
			
		||||
                user_results = (
 | 
			
		||||
                    group(
 | 
			
		||||
                        [
 | 
			
		||||
                            sync_objects.signature(
 | 
			
		||||
                for page in users_paginator.page_range:
 | 
			
		||||
                    messages.append(_("Syncing page {page} of users".format(page=page)))
 | 
			
		||||
                    for msg in sync_objects.apply_async(
 | 
			
		||||
                        args=(class_to_path(User), page, provider_pk),
 | 
			
		||||
                        time_limit=PAGE_TIMEOUT,
 | 
			
		||||
                        soft_time_limit=PAGE_TIMEOUT,
 | 
			
		||||
                            )
 | 
			
		||||
                            for page in users_paginator.page_range
 | 
			
		||||
                        ]
 | 
			
		||||
                    )
 | 
			
		||||
                    .apply_async()
 | 
			
		||||
                    .get()
 | 
			
		||||
                )
 | 
			
		||||
                for result in user_results:
 | 
			
		||||
                    for msg in result:
 | 
			
		||||
                    ).get():
 | 
			
		||||
                        messages.append(LogEvent(**msg))
 | 
			
		||||
                messages.append(_("Syncing groups"))
 | 
			
		||||
                group_results = (
 | 
			
		||||
                    group(
 | 
			
		||||
                        [
 | 
			
		||||
                            sync_objects.signature(
 | 
			
		||||
                for page in groups_paginator.page_range:
 | 
			
		||||
                    messages.append(_("Syncing page {page} of groups".format(page=page)))
 | 
			
		||||
                    for msg in sync_objects.apply_async(
 | 
			
		||||
                        args=(class_to_path(Group), page, provider_pk),
 | 
			
		||||
                        time_limit=PAGE_TIMEOUT,
 | 
			
		||||
                        soft_time_limit=PAGE_TIMEOUT,
 | 
			
		||||
                            )
 | 
			
		||||
                            for page in groups_paginator.page_range
 | 
			
		||||
                        ]
 | 
			
		||||
                    )
 | 
			
		||||
                    .apply_async()
 | 
			
		||||
                    .get()
 | 
			
		||||
                )
 | 
			
		||||
                for result in group_results:
 | 
			
		||||
                    for msg in result:
 | 
			
		||||
                    ).get():
 | 
			
		||||
                        messages.append(LogEvent(**msg))
 | 
			
		||||
            except TransientSyncException as exc:
 | 
			
		||||
                self.logger.warning("transient sync exception", exc=exc)
 | 
			
		||||
@ -130,7 +109,7 @@ class SyncTasks:
 | 
			
		||||
    def sync_objects(
 | 
			
		||||
        self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter
 | 
			
		||||
    ):
 | 
			
		||||
        _object_type: type[Model] = path_to_class(object_type)
 | 
			
		||||
        _object_type = path_to_class(object_type)
 | 
			
		||||
        self.logger = get_logger().bind(
 | 
			
		||||
            provider_type=class_to_path(self._provider_model),
 | 
			
		||||
            provider_pk=provider_pk,
 | 
			
		||||
@ -153,19 +132,6 @@ class SyncTasks:
 | 
			
		||||
            self.logger.debug("starting discover")
 | 
			
		||||
            client.discover()
 | 
			
		||||
        self.logger.debug("starting sync for page", page=page)
 | 
			
		||||
        messages.append(
 | 
			
		||||
            asdict(
 | 
			
		||||
                LogEvent(
 | 
			
		||||
                    _(
 | 
			
		||||
                        "Syncing page {page} of {object_type}".format(
 | 
			
		||||
                            page=page, object_type=_object_type._meta.verbose_name_plural
 | 
			
		||||
                        )
 | 
			
		||||
                    ),
 | 
			
		||||
                    log_level="info",
 | 
			
		||||
                    logger=f"{provider._meta.verbose_name}@{object_type}",
 | 
			
		||||
                )
 | 
			
		||||
            )
 | 
			
		||||
        )
 | 
			
		||||
        for obj in paginator.page(page).object_list:
 | 
			
		||||
            obj: Model
 | 
			
		||||
            try:
 | 
			
		||||
 | 
			
		||||
@ -494,88 +494,86 @@ class TestConfig(TestCase):
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # FIXME: Temporarily force pool to be deactivated.
 | 
			
		||||
    # See https://github.com/goauthentik/authentik/issues/14320
 | 
			
		||||
    # def test_db_pool(self):
 | 
			
		||||
    #     """Test DB Config with pool"""
 | 
			
		||||
    #     config = ConfigLoader()
 | 
			
		||||
    #     config.set("postgresql.host", "foo")
 | 
			
		||||
    #     config.set("postgresql.name", "foo")
 | 
			
		||||
    #     config.set("postgresql.user", "foo")
 | 
			
		||||
    #     config.set("postgresql.password", "foo")
 | 
			
		||||
    #     config.set("postgresql.port", "foo")
 | 
			
		||||
    #     config.set("postgresql.test.name", "foo")
 | 
			
		||||
    #     config.set("postgresql.use_pool", True)
 | 
			
		||||
    #     conf = django_db_config(config)
 | 
			
		||||
    #     self.assertEqual(
 | 
			
		||||
    #         conf,
 | 
			
		||||
    #         {
 | 
			
		||||
    #             "default": {
 | 
			
		||||
    #                 "ENGINE": "authentik.root.db",
 | 
			
		||||
    #                 "HOST": "foo",
 | 
			
		||||
    #                 "NAME": "foo",
 | 
			
		||||
    #                 "OPTIONS": {
 | 
			
		||||
    #                     "pool": True,
 | 
			
		||||
    #                     "sslcert": None,
 | 
			
		||||
    #                     "sslkey": None,
 | 
			
		||||
    #                     "sslmode": None,
 | 
			
		||||
    #                     "sslrootcert": None,
 | 
			
		||||
    #                 },
 | 
			
		||||
    #                 "PASSWORD": "foo",
 | 
			
		||||
    #                 "PORT": "foo",
 | 
			
		||||
    #                 "TEST": {"NAME": "foo"},
 | 
			
		||||
    #                 "USER": "foo",
 | 
			
		||||
    #                 "CONN_MAX_AGE": 0,
 | 
			
		||||
    #                 "CONN_HEALTH_CHECKS": False,
 | 
			
		||||
    #                 "DISABLE_SERVER_SIDE_CURSORS": False,
 | 
			
		||||
    #             }
 | 
			
		||||
    #         },
 | 
			
		||||
    #     )
 | 
			
		||||
    def test_db_pool(self):
 | 
			
		||||
        """Test DB Config with pool"""
 | 
			
		||||
        config = ConfigLoader()
 | 
			
		||||
        config.set("postgresql.host", "foo")
 | 
			
		||||
        config.set("postgresql.name", "foo")
 | 
			
		||||
        config.set("postgresql.user", "foo")
 | 
			
		||||
        config.set("postgresql.password", "foo")
 | 
			
		||||
        config.set("postgresql.port", "foo")
 | 
			
		||||
        config.set("postgresql.test.name", "foo")
 | 
			
		||||
        config.set("postgresql.use_pool", True)
 | 
			
		||||
        conf = django_db_config(config)
 | 
			
		||||
        self.assertEqual(
 | 
			
		||||
            conf,
 | 
			
		||||
            {
 | 
			
		||||
                "default": {
 | 
			
		||||
                    "ENGINE": "authentik.root.db",
 | 
			
		||||
                    "HOST": "foo",
 | 
			
		||||
                    "NAME": "foo",
 | 
			
		||||
                    "OPTIONS": {
 | 
			
		||||
                        "pool": True,
 | 
			
		||||
                        "sslcert": None,
 | 
			
		||||
                        "sslkey": None,
 | 
			
		||||
                        "sslmode": None,
 | 
			
		||||
                        "sslrootcert": None,
 | 
			
		||||
                    },
 | 
			
		||||
                    "PASSWORD": "foo",
 | 
			
		||||
                    "PORT": "foo",
 | 
			
		||||
                    "TEST": {"NAME": "foo"},
 | 
			
		||||
                    "USER": "foo",
 | 
			
		||||
                    "CONN_MAX_AGE": 0,
 | 
			
		||||
                    "CONN_HEALTH_CHECKS": False,
 | 
			
		||||
                    "DISABLE_SERVER_SIDE_CURSORS": False,
 | 
			
		||||
                }
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # def test_db_pool_options(self):
 | 
			
		||||
    #     """Test DB Config with pool"""
 | 
			
		||||
    #     config = ConfigLoader()
 | 
			
		||||
    #     config.set("postgresql.host", "foo")
 | 
			
		||||
    #     config.set("postgresql.name", "foo")
 | 
			
		||||
    #     config.set("postgresql.user", "foo")
 | 
			
		||||
    #     config.set("postgresql.password", "foo")
 | 
			
		||||
    #     config.set("postgresql.port", "foo")
 | 
			
		||||
    #     config.set("postgresql.test.name", "foo")
 | 
			
		||||
    #     config.set("postgresql.use_pool", True)
 | 
			
		||||
    #     config.set(
 | 
			
		||||
    #         "postgresql.pool_options",
 | 
			
		||||
    #         base64.b64encode(
 | 
			
		||||
    #             dumps(
 | 
			
		||||
    #                 {
 | 
			
		||||
    #                     "max_size": 15,
 | 
			
		||||
    #                 }
 | 
			
		||||
    #             ).encode()
 | 
			
		||||
    #         ).decode(),
 | 
			
		||||
    #     )
 | 
			
		||||
    #     conf = django_db_config(config)
 | 
			
		||||
    #     self.assertEqual(
 | 
			
		||||
    #         conf,
 | 
			
		||||
    #         {
 | 
			
		||||
    #             "default": {
 | 
			
		||||
    #                 "ENGINE": "authentik.root.db",
 | 
			
		||||
    #                 "HOST": "foo",
 | 
			
		||||
    #                 "NAME": "foo",
 | 
			
		||||
    #                 "OPTIONS": {
 | 
			
		||||
    #                     "pool": {
 | 
			
		||||
    #                         "max_size": 15,
 | 
			
		||||
    #                     },
 | 
			
		||||
    #                     "sslcert": None,
 | 
			
		||||
    #                     "sslkey": None,
 | 
			
		||||
    #                     "sslmode": None,
 | 
			
		||||
    #                     "sslrootcert": None,
 | 
			
		||||
    #                 },
 | 
			
		||||
    #                 "PASSWORD": "foo",
 | 
			
		||||
    #                 "PORT": "foo",
 | 
			
		||||
    #                 "TEST": {"NAME": "foo"},
 | 
			
		||||
    #                 "USER": "foo",
 | 
			
		||||
    #                 "CONN_MAX_AGE": 0,
 | 
			
		||||
    #                 "CONN_HEALTH_CHECKS": False,
 | 
			
		||||
    #                 "DISABLE_SERVER_SIDE_CURSORS": False,
 | 
			
		||||
    #             }
 | 
			
		||||
    #         },
 | 
			
		||||
    #     )
 | 
			
		||||
    def test_db_pool_options(self):
 | 
			
		||||
        """Test DB Config with pool"""
 | 
			
		||||
        config = ConfigLoader()
 | 
			
		||||
        config.set("postgresql.host", "foo")
 | 
			
		||||
        config.set("postgresql.name", "foo")
 | 
			
		||||
        config.set("postgresql.user", "foo")
 | 
			
		||||
        config.set("postgresql.password", "foo")
 | 
			
		||||
        config.set("postgresql.port", "foo")
 | 
			
		||||
        config.set("postgresql.test.name", "foo")
 | 
			
		||||
        config.set("postgresql.use_pool", True)
 | 
			
		||||
        config.set(
 | 
			
		||||
            "postgresql.pool_options",
 | 
			
		||||
            base64.b64encode(
 | 
			
		||||
                dumps(
 | 
			
		||||
                    {
 | 
			
		||||
                        "max_size": 15,
 | 
			
		||||
                    }
 | 
			
		||||
                ).encode()
 | 
			
		||||
            ).decode(),
 | 
			
		||||
        )
 | 
			
		||||
        conf = django_db_config(config)
 | 
			
		||||
        self.assertEqual(
 | 
			
		||||
            conf,
 | 
			
		||||
            {
 | 
			
		||||
                "default": {
 | 
			
		||||
                    "ENGINE": "authentik.root.db",
 | 
			
		||||
                    "HOST": "foo",
 | 
			
		||||
                    "NAME": "foo",
 | 
			
		||||
                    "OPTIONS": {
 | 
			
		||||
                        "pool": {
 | 
			
		||||
                            "max_size": 15,
 | 
			
		||||
                        },
 | 
			
		||||
                        "sslcert": None,
 | 
			
		||||
                        "sslkey": None,
 | 
			
		||||
                        "sslmode": None,
 | 
			
		||||
                        "sslrootcert": None,
 | 
			
		||||
                    },
 | 
			
		||||
                    "PASSWORD": "foo",
 | 
			
		||||
                    "PORT": "foo",
 | 
			
		||||
                    "TEST": {"NAME": "foo"},
 | 
			
		||||
                    "USER": "foo",
 | 
			
		||||
                    "CONN_MAX_AGE": 0,
 | 
			
		||||
                    "CONN_HEALTH_CHECKS": False,
 | 
			
		||||
                    "DISABLE_SERVER_SIDE_CURSORS": False,
 | 
			
		||||
                }
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
@ -74,8 +74,6 @@ class OutpostConfig:
 | 
			
		||||
    kubernetes_ingress_annotations: dict[str, str] = field(default_factory=dict)
 | 
			
		||||
    kubernetes_ingress_secret_name: str = field(default="authentik-outpost-tls")
 | 
			
		||||
    kubernetes_ingress_class_name: str | None = field(default=None)
 | 
			
		||||
    kubernetes_httproute_annotations: dict[str, str] = field(default_factory=dict)
 | 
			
		||||
    kubernetes_httproute_parent_refs: list[dict[str, str]] = field(default_factory=list)
 | 
			
		||||
    kubernetes_service_type: str = field(default="ClusterIP")
 | 
			
		||||
    kubernetes_disabled_components: list[str] = field(default_factory=list)
 | 
			
		||||
    kubernetes_image_pull_secrets: list[str] = field(default_factory=list)
 | 
			
		||||
 | 
			
		||||
@ -38,7 +38,6 @@ class TestOutpostWS(TransactionTestCase):
 | 
			
		||||
        )
 | 
			
		||||
        connected, _ = await communicator.connect()
 | 
			
		||||
        self.assertFalse(connected)
 | 
			
		||||
        await communicator.disconnect()
 | 
			
		||||
 | 
			
		||||
    async def test_auth_valid(self):
 | 
			
		||||
        """Test auth with token"""
 | 
			
		||||
@ -49,7 +48,6 @@ class TestOutpostWS(TransactionTestCase):
 | 
			
		||||
        )
 | 
			
		||||
        connected, _ = await communicator.connect()
 | 
			
		||||
        self.assertTrue(connected)
 | 
			
		||||
        await communicator.disconnect()
 | 
			
		||||
 | 
			
		||||
    async def test_send(self):
 | 
			
		||||
        """Test sending of Hello"""
 | 
			
		||||
 | 
			
		||||
@ -7,8 +7,10 @@ from django.db import migrations
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
 | 
			
		||||
    from authentik.core.models import User
 | 
			
		||||
    from django.apps import apps as real_apps
 | 
			
		||||
    from django.contrib.auth.management import create_permissions
 | 
			
		||||
    from guardian.shortcuts import UserObjectPermission
 | 
			
		||||
 | 
			
		||||
    db_alias = schema_editor.connection.alias
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -50,4 +50,3 @@ AMR_PASSWORD = "pwd"  # nosec
 | 
			
		||||
AMR_MFA = "mfa"
 | 
			
		||||
AMR_OTP = "otp"
 | 
			
		||||
AMR_WEBAUTHN = "user"
 | 
			
		||||
AMR_SMART_CARD = "sc"
 | 
			
		||||
 | 
			
		||||
@ -16,7 +16,6 @@ from authentik.providers.oauth2.constants import (
 | 
			
		||||
    ACR_AUTHENTIK_DEFAULT,
 | 
			
		||||
    AMR_MFA,
 | 
			
		||||
    AMR_PASSWORD,
 | 
			
		||||
    AMR_SMART_CARD,
 | 
			
		||||
    AMR_WEBAUTHN,
 | 
			
		||||
)
 | 
			
		||||
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
 | 
			
		||||
@ -140,9 +139,8 @@ class IDToken:
 | 
			
		||||
                amr.append(AMR_PASSWORD)
 | 
			
		||||
            if method == "auth_webauthn_pwl":
 | 
			
		||||
                amr.append(AMR_WEBAUTHN)
 | 
			
		||||
            if "certificate" in method_args:
 | 
			
		||||
                amr.append(AMR_SMART_CARD)
 | 
			
		||||
            if "mfa_devices" in method_args:
 | 
			
		||||
                if len(amr) > 0:
 | 
			
		||||
                    amr.append(AMR_MFA)
 | 
			
		||||
            if amr:
 | 
			
		||||
                id_token.amr = amr
 | 
			
		||||
 | 
			
		||||
@ -1,234 +0,0 @@
 | 
			
		||||
from dataclasses import asdict, dataclass, field
 | 
			
		||||
from typing import TYPE_CHECKING
 | 
			
		||||
from urllib.parse import urlparse
 | 
			
		||||
 | 
			
		||||
from dacite.core import from_dict
 | 
			
		||||
from kubernetes.client import ApiextensionsV1Api, CustomObjectsApi, V1ObjectMeta
 | 
			
		||||
 | 
			
		||||
from authentik.outposts.controllers.base import FIELD_MANAGER
 | 
			
		||||
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
 | 
			
		||||
from authentik.outposts.controllers.k8s.triggers import NeedsUpdate
 | 
			
		||||
from authentik.outposts.controllers.kubernetes import KubernetesController
 | 
			
		||||
from authentik.providers.proxy.models import ProxyMode, ProxyProvider
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from authentik.outposts.controllers.kubernetes import KubernetesController
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass(slots=True)
 | 
			
		||||
class RouteBackendRef:
 | 
			
		||||
    name: str
 | 
			
		||||
    port: int
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass(slots=True)
 | 
			
		||||
class RouteSpecParentRefs:
 | 
			
		||||
    name: str
 | 
			
		||||
    sectionName: str | None = None
 | 
			
		||||
    port: int | None = None
 | 
			
		||||
    namespace: str | None = None
 | 
			
		||||
    kind: str = "Gateway"
 | 
			
		||||
    group: str = "gateway.networking.k8s.io"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass(slots=True)
 | 
			
		||||
class HTTPRouteSpecRuleMatchPath:
 | 
			
		||||
    type: str
 | 
			
		||||
    value: str
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass(slots=True)
 | 
			
		||||
class HTTPRouteSpecRuleMatchHeader:
 | 
			
		||||
    name: str
 | 
			
		||||
    value: str
 | 
			
		||||
    type: str = "Exact"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass(slots=True)
 | 
			
		||||
class HTTPRouteSpecRuleMatch:
 | 
			
		||||
    path: HTTPRouteSpecRuleMatchPath
 | 
			
		||||
    headers: list[HTTPRouteSpecRuleMatchHeader]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass(slots=True)
 | 
			
		||||
class HTTPRouteSpecRule:
 | 
			
		||||
    backendRefs: list[RouteBackendRef]
 | 
			
		||||
    matches: list[HTTPRouteSpecRuleMatch]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass(slots=True)
 | 
			
		||||
class HTTPRouteSpec:
 | 
			
		||||
    parentRefs: list[RouteSpecParentRefs]
 | 
			
		||||
    hostnames: list[str]
 | 
			
		||||
    rules: list[HTTPRouteSpecRule]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass(slots=True)
 | 
			
		||||
class HTTPRouteMetadata:
 | 
			
		||||
    name: str
 | 
			
		||||
    namespace: str
 | 
			
		||||
    annotations: dict = field(default_factory=dict)
 | 
			
		||||
    labels: dict = field(default_factory=dict)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass(slots=True)
 | 
			
		||||
class HTTPRoute:
 | 
			
		||||
    apiVersion: str
 | 
			
		||||
    kind: str
 | 
			
		||||
    metadata: HTTPRouteMetadata
 | 
			
		||||
    spec: HTTPRouteSpec
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class HTTPRouteReconciler(KubernetesObjectReconciler):
 | 
			
		||||
    """Kubernetes Gateway API HTTPRoute Reconciler"""
 | 
			
		||||
 | 
			
		||||
    def __init__(self, controller: "KubernetesController") -> None:
 | 
			
		||||
        super().__init__(controller)
 | 
			
		||||
        self.api_ex = ApiextensionsV1Api(controller.client)
 | 
			
		||||
        self.api = CustomObjectsApi(controller.client)
 | 
			
		||||
        self.crd_group = "gateway.networking.k8s.io"
 | 
			
		||||
        self.crd_version = "v1"
 | 
			
		||||
        self.crd_plural = "httproutes"
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def reconciler_name() -> str:
 | 
			
		||||
        return "httproute"
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def noop(self) -> bool:
 | 
			
		||||
        if not self.crd_exists():
 | 
			
		||||
            self.logger.debug("CRD doesn't exist")
 | 
			
		||||
            return True
 | 
			
		||||
        if not self.controller.outpost.config.kubernetes_httproute_parent_refs:
 | 
			
		||||
            self.logger.debug("HTTPRoute parentRefs not set.")
 | 
			
		||||
            return True
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    def crd_exists(self) -> bool:
 | 
			
		||||
        """Check if the Gateway API resources exists"""
 | 
			
		||||
        return bool(
 | 
			
		||||
            len(
 | 
			
		||||
                self.api_ex.list_custom_resource_definition(
 | 
			
		||||
                    field_selector=f"metadata.name={self.crd_plural}.{self.crd_group}"
 | 
			
		||||
                ).items
 | 
			
		||||
            )
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def reconcile(self, current: HTTPRoute, reference: HTTPRoute):
 | 
			
		||||
        super().reconcile(current, reference)
 | 
			
		||||
        if current.metadata.annotations != reference.metadata.annotations:
 | 
			
		||||
            raise NeedsUpdate()
 | 
			
		||||
        if current.spec.parentRefs != reference.spec.parentRefs:
 | 
			
		||||
            raise NeedsUpdate()
 | 
			
		||||
        if current.spec.hostnames != reference.spec.hostnames:
 | 
			
		||||
            raise NeedsUpdate()
 | 
			
		||||
        if current.spec.rules != reference.spec.rules:
 | 
			
		||||
            raise NeedsUpdate()
 | 
			
		||||
 | 
			
		||||
    def get_object_meta(self, **kwargs) -> V1ObjectMeta:
 | 
			
		||||
        return super().get_object_meta(
 | 
			
		||||
            **kwargs,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def get_reference_object(self) -> HTTPRoute:
 | 
			
		||||
        hostnames = []
 | 
			
		||||
        rules = []
 | 
			
		||||
 | 
			
		||||
        for proxy_provider in ProxyProvider.objects.filter(outpost__in=[self.controller.outpost]):
 | 
			
		||||
            proxy_provider: ProxyProvider
 | 
			
		||||
            external_host_name = urlparse(proxy_provider.external_host)
 | 
			
		||||
            if proxy_provider.mode in [ProxyMode.FORWARD_SINGLE, ProxyMode.FORWARD_DOMAIN]:
 | 
			
		||||
                rule = HTTPRouteSpecRule(
 | 
			
		||||
                    backendRefs=[RouteBackendRef(name=self.name, port=9000)],
 | 
			
		||||
                    matches=[
 | 
			
		||||
                        HTTPRouteSpecRuleMatch(
 | 
			
		||||
                            headers=[
 | 
			
		||||
                                HTTPRouteSpecRuleMatchHeader(
 | 
			
		||||
                                    name="Host",
 | 
			
		||||
                                    value=external_host_name.hostname,
 | 
			
		||||
                                )
 | 
			
		||||
                            ],
 | 
			
		||||
                            path=HTTPRouteSpecRuleMatchPath(
 | 
			
		||||
                                type="PathPrefix", value="/outpost.goauthentik.io"
 | 
			
		||||
                            ),
 | 
			
		||||
                        )
 | 
			
		||||
                    ],
 | 
			
		||||
                )
 | 
			
		||||
            else:
 | 
			
		||||
                rule = HTTPRouteSpecRule(
 | 
			
		||||
                    backendRefs=[RouteBackendRef(name=self.name, port=9000)],
 | 
			
		||||
                    matches=[
 | 
			
		||||
                        HTTPRouteSpecRuleMatch(
 | 
			
		||||
                            headers=[
 | 
			
		||||
                                HTTPRouteSpecRuleMatchHeader(
 | 
			
		||||
                                    name="Host",
 | 
			
		||||
                                    value=external_host_name.hostname,
 | 
			
		||||
                                )
 | 
			
		||||
                            ],
 | 
			
		||||
                            path=HTTPRouteSpecRuleMatchPath(type="PathPrefix", value="/"),
 | 
			
		||||
                        )
 | 
			
		||||
                    ],
 | 
			
		||||
                )
 | 
			
		||||
            hostnames.append(external_host_name.hostname)
 | 
			
		||||
            rules.append(rule)
 | 
			
		||||
 | 
			
		||||
        return HTTPRoute(
 | 
			
		||||
            apiVersion=f"{self.crd_group}/{self.crd_version}",
 | 
			
		||||
            kind="HTTPRoute",
 | 
			
		||||
            metadata=HTTPRouteMetadata(
 | 
			
		||||
                name=self.name,
 | 
			
		||||
                namespace=self.namespace,
 | 
			
		||||
                annotations=self.controller.outpost.config.kubernetes_httproute_annotations,
 | 
			
		||||
                labels=self.get_object_meta().labels,
 | 
			
		||||
            ),
 | 
			
		||||
            spec=HTTPRouteSpec(
 | 
			
		||||
                parentRefs=[
 | 
			
		||||
                    from_dict(RouteSpecParentRefs, spec)
 | 
			
		||||
                    for spec in self.controller.outpost.config.kubernetes_httproute_parent_refs
 | 
			
		||||
                ],
 | 
			
		||||
                hostnames=hostnames,
 | 
			
		||||
                rules=rules,
 | 
			
		||||
            ),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def create(self, reference: HTTPRoute):
 | 
			
		||||
        return self.api.create_namespaced_custom_object(
 | 
			
		||||
            group=self.crd_group,
 | 
			
		||||
            version=self.crd_version,
 | 
			
		||||
            plural=self.crd_plural,
 | 
			
		||||
            namespace=self.namespace,
 | 
			
		||||
            body=asdict(reference),
 | 
			
		||||
            field_manager=FIELD_MANAGER,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def delete(self, reference: HTTPRoute):
 | 
			
		||||
        return self.api.delete_namespaced_custom_object(
 | 
			
		||||
            group=self.crd_group,
 | 
			
		||||
            version=self.crd_version,
 | 
			
		||||
            plural=self.crd_plural,
 | 
			
		||||
            namespace=self.namespace,
 | 
			
		||||
            name=self.name,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def retrieve(self) -> HTTPRoute:
 | 
			
		||||
        return from_dict(
 | 
			
		||||
            HTTPRoute,
 | 
			
		||||
            self.api.get_namespaced_custom_object(
 | 
			
		||||
                group=self.crd_group,
 | 
			
		||||
                version=self.crd_version,
 | 
			
		||||
                plural=self.crd_plural,
 | 
			
		||||
                namespace=self.namespace,
 | 
			
		||||
                name=self.name,
 | 
			
		||||
            ),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def update(self, current: HTTPRoute, reference: HTTPRoute):
 | 
			
		||||
        return self.api.patch_namespaced_custom_object(
 | 
			
		||||
            group=self.crd_group,
 | 
			
		||||
            version=self.crd_version,
 | 
			
		||||
            plural=self.crd_plural,
 | 
			
		||||
            namespace=self.namespace,
 | 
			
		||||
            name=self.name,
 | 
			
		||||
            body=asdict(reference),
 | 
			
		||||
            field_manager=FIELD_MANAGER,
 | 
			
		||||
        )
 | 
			
		||||
@ -47,8 +47,6 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]):
 | 
			
		||||
    def reconcile(self, current: V1Ingress, reference: V1Ingress):
 | 
			
		||||
        super().reconcile(current, reference)
 | 
			
		||||
        self._check_annotations(current, reference)
 | 
			
		||||
        if current.spec.ingress_class_name != reference.spec.ingress_class_name:
 | 
			
		||||
            raise NeedsUpdate()
 | 
			
		||||
        # Create a list of all expected host and tls hosts
 | 
			
		||||
        expected_hosts = []
 | 
			
		||||
        expected_hosts_tls = []
 | 
			
		||||
 | 
			
		||||
@ -3,7 +3,6 @@
 | 
			
		||||
from authentik.outposts.controllers.base import DeploymentPort
 | 
			
		||||
from authentik.outposts.controllers.kubernetes import KubernetesController
 | 
			
		||||
from authentik.outposts.models import KubernetesServiceConnection, Outpost
 | 
			
		||||
from authentik.providers.proxy.controllers.k8s.httproute import HTTPRouteReconciler
 | 
			
		||||
from authentik.providers.proxy.controllers.k8s.ingress import IngressReconciler
 | 
			
		||||
from authentik.providers.proxy.controllers.k8s.traefik import TraefikMiddlewareReconciler
 | 
			
		||||
 | 
			
		||||
@ -19,10 +18,8 @@ class ProxyKubernetesController(KubernetesController):
 | 
			
		||||
            DeploymentPort(9443, "https", "tcp"),
 | 
			
		||||
        ]
 | 
			
		||||
        self.reconcilers[IngressReconciler.reconciler_name()] = IngressReconciler
 | 
			
		||||
        self.reconcilers[HTTPRouteReconciler.reconciler_name()] = HTTPRouteReconciler
 | 
			
		||||
        self.reconcilers[TraefikMiddlewareReconciler.reconciler_name()] = (
 | 
			
		||||
            TraefikMiddlewareReconciler
 | 
			
		||||
        )
 | 
			
		||||
        self.reconcile_order.append(IngressReconciler.reconciler_name())
 | 
			
		||||
        self.reconcile_order.append(HTTPRouteReconciler.reconciler_name())
 | 
			
		||||
        self.reconcile_order.append(TraefikMiddlewareReconciler.reconciler_name())
 | 
			
		||||
 | 
			
		||||
@ -66,10 +66,7 @@ class RACClientConsumer(AsyncWebsocketConsumer):
 | 
			
		||||
    def init_outpost_connection(self):
 | 
			
		||||
        """Initialize guac connection settings"""
 | 
			
		||||
        self.token = (
 | 
			
		||||
            ConnectionToken.filter_not_expired(
 | 
			
		||||
                token=self.scope["url_route"]["kwargs"]["token"],
 | 
			
		||||
                session__session__session_key=self.scope["session"].session_key,
 | 
			
		||||
            )
 | 
			
		||||
            ConnectionToken.filter_not_expired(token=self.scope["url_route"]["kwargs"]["token"])
 | 
			
		||||
            .select_related("endpoint", "provider", "session", "session__user")
 | 
			
		||||
            .first()
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
@ -166,6 +166,7 @@ class ConnectionToken(ExpiringModel):
 | 
			
		||||
        always_merger.merge(settings, default_settings)
 | 
			
		||||
        always_merger.merge(settings, self.endpoint.provider.settings)
 | 
			
		||||
        always_merger.merge(settings, self.endpoint.settings)
 | 
			
		||||
        always_merger.merge(settings, self.settings)
 | 
			
		||||
 | 
			
		||||
        def mapping_evaluator(mappings: QuerySet):
 | 
			
		||||
            for mapping in mappings:
 | 
			
		||||
@ -190,7 +191,6 @@ class ConnectionToken(ExpiringModel):
 | 
			
		||||
        mapping_evaluator(
 | 
			
		||||
            RACPropertyMapping.objects.filter(endpoint__in=[self.endpoint]).order_by("name")
 | 
			
		||||
        )
 | 
			
		||||
        always_merger.merge(settings, self.settings)
 | 
			
		||||
 | 
			
		||||
        settings["drive-path"] = f"/tmp/connection/{self.token}"  # nosec
 | 
			
		||||
        settings["create-drive-path"] = "true"
 | 
			
		||||
 | 
			
		||||
@ -90,6 +90,23 @@ class TestModels(TransactionTestCase):
 | 
			
		||||
                "resize-method": "display-update",
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        # Set settings in token
 | 
			
		||||
        token.settings = {
 | 
			
		||||
            "level": "token",
 | 
			
		||||
        }
 | 
			
		||||
        token.save()
 | 
			
		||||
        self.assertEqual(
 | 
			
		||||
            token.get_settings(),
 | 
			
		||||
            {
 | 
			
		||||
                "hostname": self.endpoint.host.split(":")[0],
 | 
			
		||||
                "port": "1324",
 | 
			
		||||
                "client-name": f"authentik - {self.user}",
 | 
			
		||||
                "drive-path": path,
 | 
			
		||||
                "create-drive-path": "true",
 | 
			
		||||
                "level": "token",
 | 
			
		||||
                "resize-method": "display-update",
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        # Set settings in property mapping (provider)
 | 
			
		||||
        mapping = RACPropertyMapping.objects.create(
 | 
			
		||||
            name=generate_id(),
 | 
			
		||||
@ -134,22 +151,3 @@ class TestModels(TransactionTestCase):
 | 
			
		||||
                "resize-method": "display-update",
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        # Set settings in token
 | 
			
		||||
        token.settings = {
 | 
			
		||||
            "level": "token",
 | 
			
		||||
        }
 | 
			
		||||
        token.save()
 | 
			
		||||
        self.assertEqual(
 | 
			
		||||
            token.get_settings(),
 | 
			
		||||
            {
 | 
			
		||||
                "hostname": self.endpoint.host.split(":")[0],
 | 
			
		||||
                "port": "1324",
 | 
			
		||||
                "client-name": f"authentik - {self.user}",
 | 
			
		||||
                "drive-path": path,
 | 
			
		||||
                "create-drive-path": "true",
 | 
			
		||||
                "foo": "true",
 | 
			
		||||
                "bar": "6",
 | 
			
		||||
                "resize-method": "display-update",
 | 
			
		||||
                "level": "token",
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
@ -87,22 +87,3 @@ class TestRACViews(APITestCase):
 | 
			
		||||
        )
 | 
			
		||||
        body = loads(flow_response.content)
 | 
			
		||||
        self.assertEqual(body["component"], "ak-stage-access-denied")
 | 
			
		||||
 | 
			
		||||
    def test_different_session(self):
 | 
			
		||||
        """Test request"""
 | 
			
		||||
        self.client.force_login(self.user)
 | 
			
		||||
        response = self.client.get(
 | 
			
		||||
            reverse(
 | 
			
		||||
                "authentik_providers_rac:start",
 | 
			
		||||
                kwargs={"app": self.app.slug, "endpoint": str(self.endpoint.pk)},
 | 
			
		||||
            )
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(response.status_code, 302)
 | 
			
		||||
        flow_response = self.client.get(
 | 
			
		||||
            reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
 | 
			
		||||
        )
 | 
			
		||||
        body = loads(flow_response.content)
 | 
			
		||||
        next_url = body["to"]
 | 
			
		||||
        self.client.logout()
 | 
			
		||||
        final_response = self.client.get(next_url)
 | 
			
		||||
        self.assertEqual(final_response.url, reverse("authentik_core:if-user"))
 | 
			
		||||
 | 
			
		||||
@ -20,9 +20,6 @@ from authentik.lib.utils.time import timedelta_from_string
 | 
			
		||||
from authentik.policies.engine import PolicyEngine
 | 
			
		||||
from authentik.policies.views import PolicyAccessView
 | 
			
		||||
from authentik.providers.rac.models import ConnectionToken, Endpoint, RACProvider
 | 
			
		||||
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
 | 
			
		||||
 | 
			
		||||
PLAN_CONNECTION_SETTINGS = "connection_settings"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class RACStartView(PolicyAccessView):
 | 
			
		||||
@ -68,10 +65,7 @@ class RACInterface(InterfaceView):
 | 
			
		||||
 | 
			
		||||
    def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
 | 
			
		||||
        # Early sanity check to ensure token still exists
 | 
			
		||||
        token = ConnectionToken.filter_not_expired(
 | 
			
		||||
            token=self.kwargs["token"],
 | 
			
		||||
            session__session__session_key=request.session.session_key,
 | 
			
		||||
        ).first()
 | 
			
		||||
        token = ConnectionToken.filter_not_expired(token=self.kwargs["token"]).first()
 | 
			
		||||
        if not token:
 | 
			
		||||
            return redirect("authentik_core:if-user")
 | 
			
		||||
        self.token = token
 | 
			
		||||
@ -115,15 +109,10 @@ class RACFinalStage(RedirectStage):
 | 
			
		||||
        return super().dispatch(request, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    def get_challenge(self, *args, **kwargs) -> RedirectChallenge:
 | 
			
		||||
        settings = self.executor.plan.context.get(PLAN_CONNECTION_SETTINGS)
 | 
			
		||||
        if not settings:
 | 
			
		||||
            settings = self.executor.plan.context.get(PLAN_CONTEXT_PROMPT, {}).get(
 | 
			
		||||
                PLAN_CONNECTION_SETTINGS
 | 
			
		||||
            )
 | 
			
		||||
        token = ConnectionToken.objects.create(
 | 
			
		||||
            provider=self.provider,
 | 
			
		||||
            endpoint=self.endpoint,
 | 
			
		||||
            settings=settings or {},
 | 
			
		||||
            settings=self.executor.plan.context.get("connection_settings", {}),
 | 
			
		||||
            session=self.request.session["authenticatedsession"],
 | 
			
		||||
            expires=now() + timedelta_from_string(self.provider.connection_expiry),
 | 
			
		||||
            expiring=True,
 | 
			
		||||
 | 
			
		||||
@ -34,7 +34,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
 | 
			
		||||
    """SCIM client for groups"""
 | 
			
		||||
 | 
			
		||||
    connection_type = SCIMProviderGroup
 | 
			
		||||
    connection_attr = "scimprovidergroup_set"
 | 
			
		||||
    connection_type_query = "group"
 | 
			
		||||
    mapper: PropertyMappingManager
 | 
			
		||||
 | 
			
		||||
    def __init__(self, provider: SCIMProvider):
 | 
			
		||||
@ -47,16 +47,15 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
 | 
			
		||||
 | 
			
		||||
    def to_schema(self, obj: Group, connection: SCIMProviderGroup) -> SCIMGroupSchema:
 | 
			
		||||
        """Convert authentik user into SCIM"""
 | 
			
		||||
        raw_scim_group = super().to_schema(obj, connection)
 | 
			
		||||
        raw_scim_group = super().to_schema(
 | 
			
		||||
            obj,
 | 
			
		||||
            connection,
 | 
			
		||||
            schemas=(SCIM_GROUP_SCHEMA,),
 | 
			
		||||
        )
 | 
			
		||||
        try:
 | 
			
		||||
            scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group))
 | 
			
		||||
        except ValidationError as exc:
 | 
			
		||||
            raise StopSync(exc, obj) from exc
 | 
			
		||||
        if SCIM_GROUP_SCHEMA not in scim_group.schemas:
 | 
			
		||||
            scim_group.schemas.insert(0, SCIM_GROUP_SCHEMA)
 | 
			
		||||
        # As this might be unset, we need to tell pydantic it's set so ensure the schemas
 | 
			
		||||
        # are included, even if its just the defaults
 | 
			
		||||
        scim_group.schemas = list(scim_group.schemas)
 | 
			
		||||
        if not scim_group.externalId:
 | 
			
		||||
            scim_group.externalId = str(obj.pk)
 | 
			
		||||
 | 
			
		||||
@ -200,7 +199,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
 | 
			
		||||
            chunk_size = len(ops)
 | 
			
		||||
        if len(ops) < 1:
 | 
			
		||||
            return
 | 
			
		||||
        for chunk in batched(ops, chunk_size, strict=False):
 | 
			
		||||
        for chunk in batched(ops, chunk_size):
 | 
			
		||||
            req = PatchRequest(Operations=list(chunk))
 | 
			
		||||
            self._request(
 | 
			
		||||
                "PATCH",
 | 
			
		||||
 | 
			
		||||
@ -18,7 +18,7 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]):
 | 
			
		||||
    """SCIM client for users"""
 | 
			
		||||
 | 
			
		||||
    connection_type = SCIMProviderUser
 | 
			
		||||
    connection_attr = "scimprovideruser_set"
 | 
			
		||||
    connection_type_query = "user"
 | 
			
		||||
    mapper: PropertyMappingManager
 | 
			
		||||
 | 
			
		||||
    def __init__(self, provider: SCIMProvider):
 | 
			
		||||
@ -31,16 +31,15 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]):
 | 
			
		||||
 | 
			
		||||
    def to_schema(self, obj: User, connection: SCIMProviderUser) -> SCIMUserSchema:
 | 
			
		||||
        """Convert authentik user into SCIM"""
 | 
			
		||||
        raw_scim_user = super().to_schema(obj, connection)
 | 
			
		||||
        raw_scim_user = super().to_schema(
 | 
			
		||||
            obj,
 | 
			
		||||
            connection,
 | 
			
		||||
            schemas=(SCIM_USER_SCHEMA,),
 | 
			
		||||
        )
 | 
			
		||||
        try:
 | 
			
		||||
            scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user))
 | 
			
		||||
        except ValidationError as exc:
 | 
			
		||||
            raise StopSync(exc, obj) from exc
 | 
			
		||||
        if SCIM_USER_SCHEMA not in scim_user.schemas:
 | 
			
		||||
            scim_user.schemas.insert(0, SCIM_USER_SCHEMA)
 | 
			
		||||
        # As this might be unset, we need to tell pydantic it's set so ensure the schemas
 | 
			
		||||
        # are included, even if its just the defaults
 | 
			
		||||
        scim_user.schemas = list(scim_user.schemas)
 | 
			
		||||
        if not scim_user.externalId:
 | 
			
		||||
            scim_user.externalId = str(obj.uid)
 | 
			
		||||
        return scim_user
 | 
			
		||||
 | 
			
		||||
@ -116,7 +116,7 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider):
 | 
			
		||||
        if type == User:
 | 
			
		||||
            # Get queryset of all users with consistent ordering
 | 
			
		||||
            # according to the provider's settings
 | 
			
		||||
            base = User.objects.prefetch_related("scimprovideruser_set").all().exclude_anonymous()
 | 
			
		||||
            base = User.objects.all().exclude_anonymous()
 | 
			
		||||
            if self.exclude_users_service_account:
 | 
			
		||||
                base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
 | 
			
		||||
                    type=UserTypes.INTERNAL_SERVICE_ACCOUNT
 | 
			
		||||
@ -126,7 +126,7 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider):
 | 
			
		||||
            return base.order_by("pk")
 | 
			
		||||
        if type == Group:
 | 
			
		||||
            # Get queryset of all groups with consistent ordering
 | 
			
		||||
            return Group.objects.prefetch_related("scimprovidergroup_set").all().order_by("pk")
 | 
			
		||||
            return Group.objects.all().order_by("pk")
 | 
			
		||||
        raise ValueError(f"Invalid type {type}")
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
 | 
			
		||||
@ -91,57 +91,6 @@ class SCIMUserTests(TestCase):
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @Mocker()
 | 
			
		||||
    def test_user_create_custom_schema(self, mock: Mocker):
 | 
			
		||||
        """Test user creation with custom schema"""
 | 
			
		||||
        schema = SCIMMapping.objects.create(
 | 
			
		||||
            name="custom_schema",
 | 
			
		||||
            expression="""return {"schemas": ["foo"]}""",
 | 
			
		||||
        )
 | 
			
		||||
        self.provider.property_mappings.add(schema)
 | 
			
		||||
        scim_id = generate_id()
 | 
			
		||||
        mock.get(
 | 
			
		||||
            "https://localhost/ServiceProviderConfig",
 | 
			
		||||
            json={},
 | 
			
		||||
        )
 | 
			
		||||
        mock.post(
 | 
			
		||||
            "https://localhost/Users",
 | 
			
		||||
            json={
 | 
			
		||||
                "id": scim_id,
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        uid = generate_id()
 | 
			
		||||
        user = User.objects.create(
 | 
			
		||||
            username=uid,
 | 
			
		||||
            name=f"{uid} {uid}",
 | 
			
		||||
            email=f"{uid}@goauthentik.io",
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(mock.call_count, 2)
 | 
			
		||||
        self.assertEqual(mock.request_history[0].method, "GET")
 | 
			
		||||
        self.assertEqual(mock.request_history[1].method, "POST")
 | 
			
		||||
        self.assertJSONEqual(
 | 
			
		||||
            mock.request_history[1].body,
 | 
			
		||||
            {
 | 
			
		||||
                "schemas": ["urn:ietf:params:scim:schemas:core:2.0:User", "foo"],
 | 
			
		||||
                "active": True,
 | 
			
		||||
                "emails": [
 | 
			
		||||
                    {
 | 
			
		||||
                        "primary": True,
 | 
			
		||||
                        "type": "other",
 | 
			
		||||
                        "value": f"{uid}@goauthentik.io",
 | 
			
		||||
                    }
 | 
			
		||||
                ],
 | 
			
		||||
                "externalId": user.uid,
 | 
			
		||||
                "name": {
 | 
			
		||||
                    "familyName": uid,
 | 
			
		||||
                    "formatted": f"{uid} {uid}",
 | 
			
		||||
                    "givenName": uid,
 | 
			
		||||
                },
 | 
			
		||||
                "displayName": f"{uid} {uid}",
 | 
			
		||||
                "userName": uid,
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @Mocker()
 | 
			
		||||
    def test_user_create_different_provider_same_id(self, mock: Mocker):
 | 
			
		||||
        """Test user creation with multiple providers that happen
 | 
			
		||||
@ -435,7 +384,7 @@ class SCIMUserTests(TestCase):
 | 
			
		||||
                self.assertIn(request.method, SAFE_METHODS)
 | 
			
		||||
        task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first()
 | 
			
		||||
        self.assertIsNotNone(task)
 | 
			
		||||
        drop_msg = task.messages[3]
 | 
			
		||||
        drop_msg = task.messages[2]
 | 
			
		||||
        self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run")
 | 
			
		||||
        self.assertIsNotNone(drop_msg["attributes"]["url"])
 | 
			
		||||
        self.assertIsNotNone(drop_msg["attributes"]["body"])
 | 
			
		||||
 | 
			
		||||
@ -99,7 +99,6 @@ class RBACPermissionViewSet(ReadOnlyModelViewSet):
 | 
			
		||||
    filterset_class = PermissionFilter
 | 
			
		||||
    permission_classes = [IsAuthenticated]
 | 
			
		||||
    search_fields = [
 | 
			
		||||
        "name",
 | 
			
		||||
        "codename",
 | 
			
		||||
        "content_type__model",
 | 
			
		||||
        "content_type__app_label",
 | 
			
		||||
 | 
			
		||||
@ -132,7 +132,7 @@ TENANT_CREATION_FAKES_MIGRATIONS = True
 | 
			
		||||
TENANT_BASE_SCHEMA = "template"
 | 
			
		||||
PUBLIC_SCHEMA_NAME = CONFIG.get("postgresql.default_schema")
 | 
			
		||||
 | 
			
		||||
GUARDIAN_MONKEY_PATCH_USER = False
 | 
			
		||||
GUARDIAN_MONKEY_PATCH = False
 | 
			
		||||
 | 
			
		||||
SPECTACULAR_SETTINGS = {
 | 
			
		||||
    "TITLE": "authentik",
 | 
			
		||||
@ -424,7 +424,7 @@ else:
 | 
			
		||||
        "BACKEND": "authentik.root.storages.FileStorage",
 | 
			
		||||
        "OPTIONS": {
 | 
			
		||||
            "location": Path(CONFIG.get("storage.media.file.path")),
 | 
			
		||||
            "base_url": CONFIG.get("web.path", "/") + "media/",
 | 
			
		||||
            "base_url": "/media/",
 | 
			
		||||
        },
 | 
			
		||||
    }
 | 
			
		||||
    # Compatibility for apps not supporting top-level STORAGES
 | 
			
		||||
 | 
			
		||||
@ -3,46 +3,25 @@
 | 
			
		||||
import os
 | 
			
		||||
from argparse import ArgumentParser
 | 
			
		||||
from unittest import TestCase
 | 
			
		||||
from unittest.mock import patch
 | 
			
		||||
 | 
			
		||||
import pytest
 | 
			
		||||
from django.conf import settings
 | 
			
		||||
from django.contrib.contenttypes.models import ContentType
 | 
			
		||||
from django.test.runner import DiscoverRunner
 | 
			
		||||
from structlog.stdlib import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR
 | 
			
		||||
from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR
 | 
			
		||||
from authentik.lib.config import CONFIG
 | 
			
		||||
from authentik.lib.sentry import sentry_init
 | 
			
		||||
from authentik.root.signals import post_startup, pre_startup, startup
 | 
			
		||||
from tests.e2e.utils import get_docker_tag
 | 
			
		||||
 | 
			
		||||
# globally set maxDiff to none to show full assert error
 | 
			
		||||
TestCase.maxDiff = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_docker_tag() -> str:
 | 
			
		||||
    """Get docker-tag based off of CI variables"""
 | 
			
		||||
    env_pr_branch = "GITHUB_HEAD_REF"
 | 
			
		||||
    default_branch = "GITHUB_REF"
 | 
			
		||||
    branch_name = os.environ.get(default_branch, "main")
 | 
			
		||||
    if os.environ.get(env_pr_branch, "") != "":
 | 
			
		||||
        branch_name = os.environ[env_pr_branch]
 | 
			
		||||
    branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
 | 
			
		||||
    return f"gh-{branch_name}"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def patched__get_ct_cached(app_label, codename):
 | 
			
		||||
    """Caches `ContentType` instances like its `QuerySet` does."""
 | 
			
		||||
    return ContentType.objects.get(app_label=app_label, permission__codename=codename)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PytestTestRunner(DiscoverRunner):  # pragma: no cover
 | 
			
		||||
    """Runs pytest to discover and run tests."""
 | 
			
		||||
 | 
			
		||||
    def __init__(self, **kwargs):
 | 
			
		||||
        super().__init__(**kwargs)
 | 
			
		||||
        self.logger = get_logger().bind(runner="pytest")
 | 
			
		||||
 | 
			
		||||
        self.args = []
 | 
			
		||||
        if self.failfast:
 | 
			
		||||
@ -52,8 +31,6 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover
 | 
			
		||||
 | 
			
		||||
        if kwargs.get("randomly_seed", None):
 | 
			
		||||
            self.args.append(f"--randomly-seed={kwargs['randomly_seed']}")
 | 
			
		||||
        if kwargs.get("no_capture", False):
 | 
			
		||||
            self.args.append("--capture=no")
 | 
			
		||||
 | 
			
		||||
        settings.TEST = True
 | 
			
		||||
        settings.CELERY["task_always_eager"] = True
 | 
			
		||||
@ -69,10 +46,6 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover
 | 
			
		||||
        CONFIG.set("error_reporting.sample_rate", 0)
 | 
			
		||||
        CONFIG.set("error_reporting.environment", "testing")
 | 
			
		||||
        CONFIG.set("error_reporting.send_pii", True)
 | 
			
		||||
 | 
			
		||||
        ASN_CONTEXT_PROCESSOR.load()
 | 
			
		||||
        GEOIP_CONTEXT_PROCESSOR.load()
 | 
			
		||||
 | 
			
		||||
        sentry_init()
 | 
			
		||||
 | 
			
		||||
        pre_startup.send(sender=self, mode="test")
 | 
			
		||||
@ -91,11 +64,6 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover
 | 
			
		||||
            "Default behaviour: use random.Random().getrandbits(32), so the seed is"
 | 
			
		||||
            "different on each run.",
 | 
			
		||||
        )
 | 
			
		||||
        parser.add_argument(
 | 
			
		||||
            "--no-capture",
 | 
			
		||||
            action="store_true",
 | 
			
		||||
            help="Disable any capturing of stdout/stderr during tests.",
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def run_tests(self, test_labels, extra_tests=None, **kwargs):
 | 
			
		||||
        """Run pytest and return the exitcode.
 | 
			
		||||
@ -138,10 +106,4 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover
 | 
			
		||||
                    f"path instead."
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        self.logger.info("Running tests", test_files=self.args)
 | 
			
		||||
        with patch("guardian.shortcuts._get_ct_cached", patched__get_ct_cached):
 | 
			
		||||
            try:
 | 
			
		||||
        return pytest.main(self.args)
 | 
			
		||||
            except Exception as e:
 | 
			
		||||
                self.logger.error("Error running tests", error=str(e), test_files=self.args)
 | 
			
		||||
                return 1
 | 
			
		||||
 | 
			
		||||
@ -317,7 +317,7 @@ class KerberosSource(Source):
 | 
			
		||||
                usage="accept", name=name, store=self.get_gssapi_store()
 | 
			
		||||
            )
 | 
			
		||||
        except gssapi.exceptions.GSSError as exc:
 | 
			
		||||
            LOGGER.warning("GSSAPI credentials failure", exc=exc)
 | 
			
		||||
            LOGGER.warn("GSSAPI credentials failure", exc=exc)
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -103,7 +103,6 @@ class LDAPSourceSerializer(SourceSerializer):
 | 
			
		||||
            "user_object_filter",
 | 
			
		||||
            "group_object_filter",
 | 
			
		||||
            "group_membership_field",
 | 
			
		||||
            "user_membership_attribute",
 | 
			
		||||
            "object_uniqueness_field",
 | 
			
		||||
            "password_login_update_internal_password",
 | 
			
		||||
            "sync_users",
 | 
			
		||||
@ -112,7 +111,6 @@ class LDAPSourceSerializer(SourceSerializer):
 | 
			
		||||
            "sync_parent_group",
 | 
			
		||||
            "connectivity",
 | 
			
		||||
            "lookup_groups_from_user",
 | 
			
		||||
            "delete_not_found_objects",
 | 
			
		||||
        ]
 | 
			
		||||
        extra_kwargs = {"bind_password": {"write_only": True}}
 | 
			
		||||
 | 
			
		||||
@ -140,7 +138,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
        "user_object_filter",
 | 
			
		||||
        "group_object_filter",
 | 
			
		||||
        "group_membership_field",
 | 
			
		||||
        "user_membership_attribute",
 | 
			
		||||
        "object_uniqueness_field",
 | 
			
		||||
        "password_login_update_internal_password",
 | 
			
		||||
        "sync_users",
 | 
			
		||||
@ -150,7 +147,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
        "user_property_mappings",
 | 
			
		||||
        "group_property_mappings",
 | 
			
		||||
        "lookup_groups_from_user",
 | 
			
		||||
        "delete_not_found_objects",
 | 
			
		||||
    ]
 | 
			
		||||
    search_fields = ["name", "slug"]
 | 
			
		||||
    ordering = ["name"]
 | 
			
		||||
 | 
			
		||||
@ -1,48 +0,0 @@
 | 
			
		||||
# Generated by Django 5.1.9 on 2025-05-28 08:15
 | 
			
		||||
 | 
			
		||||
from django.db import migrations, models
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Migration(migrations.Migration):
 | 
			
		||||
 | 
			
		||||
    dependencies = [
 | 
			
		||||
        ("authentik_core", "0048_delete_oldauthenticatedsession_content_type"),
 | 
			
		||||
        ("authentik_sources_ldap", "0008_groupldapsourceconnection_userldapsourceconnection"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    operations = [
 | 
			
		||||
        migrations.AddField(
 | 
			
		||||
            model_name="groupldapsourceconnection",
 | 
			
		||||
            name="validated_by",
 | 
			
		||||
            field=models.UUIDField(
 | 
			
		||||
                blank=True,
 | 
			
		||||
                help_text="Unique ID used while checking if this object still exists in the directory.",
 | 
			
		||||
                null=True,
 | 
			
		||||
            ),
 | 
			
		||||
        ),
 | 
			
		||||
        migrations.AddField(
 | 
			
		||||
            model_name="ldapsource",
 | 
			
		||||
            name="delete_not_found_objects",
 | 
			
		||||
            field=models.BooleanField(
 | 
			
		||||
                default=False,
 | 
			
		||||
                help_text="Delete authentik users and groups which were previously supplied by this source, but are now missing from it.",
 | 
			
		||||
            ),
 | 
			
		||||
        ),
 | 
			
		||||
        migrations.AddField(
 | 
			
		||||
            model_name="userldapsourceconnection",
 | 
			
		||||
            name="validated_by",
 | 
			
		||||
            field=models.UUIDField(
 | 
			
		||||
                blank=True,
 | 
			
		||||
                help_text="Unique ID used while checking if this object still exists in the directory.",
 | 
			
		||||
                null=True,
 | 
			
		||||
            ),
 | 
			
		||||
        ),
 | 
			
		||||
        migrations.AddIndex(
 | 
			
		||||
            model_name="groupldapsourceconnection",
 | 
			
		||||
            index=models.Index(fields=["validated_by"], name="authentik_s_validat_b70447_idx"),
 | 
			
		||||
        ),
 | 
			
		||||
        migrations.AddIndex(
 | 
			
		||||
            model_name="userldapsourceconnection",
 | 
			
		||||
            index=models.Index(fields=["validated_by"], name="authentik_s_validat_ff2ebc_idx"),
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
@ -1,32 +0,0 @@
 | 
			
		||||
# Generated by Django 5.1.9 on 2025-05-29 11:22
 | 
			
		||||
 | 
			
		||||
from django.apps.registry import Apps
 | 
			
		||||
from django.db import migrations, models
 | 
			
		||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def set_user_membership_attribute(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
 | 
			
		||||
    LDAPSource = apps.get_model("authentik_sources_ldap", "LDAPSource")
 | 
			
		||||
    db_alias = schema_editor.connection.alias
 | 
			
		||||
 | 
			
		||||
    LDAPSource.objects.using(db_alias).filter(group_membership_field="memberUid").all().update(
 | 
			
		||||
        user_membership_attribute="ldap_uniq"
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Migration(migrations.Migration):
 | 
			
		||||
    dependencies = [
 | 
			
		||||
        ("authentik_sources_ldap", "0009_groupldapsourceconnection_validated_by_and_more"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    operations = [
 | 
			
		||||
        migrations.AddField(
 | 
			
		||||
            model_name="ldapsource",
 | 
			
		||||
            name="user_membership_attribute",
 | 
			
		||||
            field=models.TextField(
 | 
			
		||||
                default="distinguishedName",
 | 
			
		||||
                help_text="Attribute which matches the value of `group_membership_field`.",
 | 
			
		||||
            ),
 | 
			
		||||
        ),
 | 
			
		||||
        migrations.RunPython(set_user_membership_attribute, migrations.RunPython.noop),
 | 
			
		||||
    ]
 | 
			
		||||
@ -100,10 +100,6 @@ class LDAPSource(Source):
 | 
			
		||||
        default="(objectClass=person)",
 | 
			
		||||
        help_text=_("Consider Objects matching this filter to be Users."),
 | 
			
		||||
    )
 | 
			
		||||
    user_membership_attribute = models.TextField(
 | 
			
		||||
        default=LDAP_DISTINGUISHED_NAME,
 | 
			
		||||
        help_text=_("Attribute which matches the value of `group_membership_field`."),
 | 
			
		||||
    )
 | 
			
		||||
    group_membership_field = models.TextField(
 | 
			
		||||
        default="member", help_text=_("Field which contains members of a group.")
 | 
			
		||||
    )
 | 
			
		||||
@ -141,14 +137,6 @@ class LDAPSource(Source):
 | 
			
		||||
        ),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    delete_not_found_objects = models.BooleanField(
 | 
			
		||||
        default=False,
 | 
			
		||||
        help_text=_(
 | 
			
		||||
            "Delete authentik users and groups which were previously supplied by this source, "
 | 
			
		||||
            "but are now missing from it."
 | 
			
		||||
        ),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def component(self) -> str:
 | 
			
		||||
        return "ak-source-ldap-form"
 | 
			
		||||
@ -333,12 +321,6 @@ class LDAPSourcePropertyMapping(PropertyMapping):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UserLDAPSourceConnection(UserSourceConnection):
 | 
			
		||||
    validated_by = models.UUIDField(
 | 
			
		||||
        null=True,
 | 
			
		||||
        blank=True,
 | 
			
		||||
        help_text=_("Unique ID used while checking if this object still exists in the directory."),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def serializer(self) -> type[Serializer]:
 | 
			
		||||
        from authentik.sources.ldap.api import (
 | 
			
		||||
@ -350,18 +332,9 @@ class UserLDAPSourceConnection(UserSourceConnection):
 | 
			
		||||
    class Meta:
 | 
			
		||||
        verbose_name = _("User LDAP Source Connection")
 | 
			
		||||
        verbose_name_plural = _("User LDAP Source Connections")
 | 
			
		||||
        indexes = [
 | 
			
		||||
            models.Index(fields=["validated_by"]),
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class GroupLDAPSourceConnection(GroupSourceConnection):
 | 
			
		||||
    validated_by = models.UUIDField(
 | 
			
		||||
        null=True,
 | 
			
		||||
        blank=True,
 | 
			
		||||
        help_text=_("Unique ID used while checking if this object still exists in the directory."),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def serializer(self) -> type[Serializer]:
 | 
			
		||||
        from authentik.sources.ldap.api import (
 | 
			
		||||
@ -373,6 +346,3 @@ class GroupLDAPSourceConnection(GroupSourceConnection):
 | 
			
		||||
    class Meta:
 | 
			
		||||
        verbose_name = _("Group LDAP Source Connection")
 | 
			
		||||
        verbose_name_plural = _("Group LDAP Source Connections")
 | 
			
		||||
        indexes = [
 | 
			
		||||
            models.Index(fields=["validated_by"]),
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
@ -9,7 +9,7 @@ from structlog.stdlib import BoundLogger, get_logger
 | 
			
		||||
from authentik.core.sources.mapper import SourceMapper
 | 
			
		||||
from authentik.lib.config import CONFIG
 | 
			
		||||
from authentik.lib.sync.mapper import PropertyMappingManager
 | 
			
		||||
from authentik.sources.ldap.models import LDAPSource, flatten
 | 
			
		||||
from authentik.sources.ldap.models import LDAPSource
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BaseLDAPSynchronizer:
 | 
			
		||||
@ -77,16 +77,6 @@ class BaseLDAPSynchronizer:
 | 
			
		||||
        """Get objects from LDAP, implemented in subclass"""
 | 
			
		||||
        raise NotImplementedError()
 | 
			
		||||
 | 
			
		||||
    def get_attributes(self, object):
 | 
			
		||||
        if "attributes" not in object:
 | 
			
		||||
            return
 | 
			
		||||
        return object.get("attributes", {})
 | 
			
		||||
 | 
			
		||||
    def get_identifier(self, attributes: dict):
 | 
			
		||||
        if not attributes.get(self._source.object_uniqueness_field):
 | 
			
		||||
            return
 | 
			
		||||
        return flatten(attributes[self._source.object_uniqueness_field])
 | 
			
		||||
 | 
			
		||||
    def search_paginator(  # noqa: PLR0913
 | 
			
		||||
        self,
 | 
			
		||||
        search_base,
 | 
			
		||||
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user