Compare commits
	
		
			1 Commits
		
	
	
		
			policies/o
			...
			safari-loc
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 9deed34479 | 
| @ -1,16 +1,16 @@ | ||||
| [bumpversion] | ||||
| current_version = 2025.6.0 | ||||
| current_version = 2025.2.4 | ||||
| tag = True | ||||
| commit = True | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||
| serialize = | ||||
| serialize =  | ||||
| 	{major}.{minor}.{patch}-{rc_t}{rc_n} | ||||
| 	{major}.{minor}.{patch} | ||||
| message = release: {new_version} | ||||
| tag_name = version/{new_version} | ||||
|  | ||||
| [bumpversion:part:rc_t] | ||||
| values = | ||||
| values =  | ||||
| 	rc | ||||
| 	final | ||||
| optional_value = final | ||||
|  | ||||
| @ -5,7 +5,6 @@ dist/** | ||||
| build/** | ||||
| build_docs/** | ||||
| *Dockerfile | ||||
| **/*Dockerfile | ||||
| blueprints/local | ||||
| .git | ||||
| !gen-ts-api/node_modules | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -36,7 +36,7 @@ runs: | ||||
|       with: | ||||
|         go-version-file: "go.mod" | ||||
|     - name: Setup docker cache | ||||
|       uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 | ||||
|       uses: ScribeMD/docker-cache@0.5.0 | ||||
|       with: | ||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} | ||||
|     - name: Setup dependencies | ||||
|  | ||||
							
								
								
									
										26
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										26
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -23,13 +23,7 @@ updates: | ||||
|   - package-ecosystem: npm | ||||
|     directories: | ||||
|       - "/web" | ||||
|       - "/web/packages/sfe" | ||||
|       - "/web/packages/core" | ||||
|       - "/web/packages/esbuild-plugin-live-reload" | ||||
|       - "/packages/prettier-config" | ||||
|       - "/packages/tsconfig" | ||||
|       - "/packages/docusaurus-config" | ||||
|       - "/packages/eslint-config" | ||||
|       - "/web/sfe" | ||||
|     schedule: | ||||
|       interval: daily | ||||
|       time: "04:00" | ||||
| @ -74,9 +68,6 @@ updates: | ||||
|       wdio: | ||||
|         patterns: | ||||
|           - "@wdio/*" | ||||
|       goauthentik: | ||||
|         patterns: | ||||
|           - "@goauthentik/*" | ||||
|   - package-ecosystem: npm | ||||
|     directory: "/website" | ||||
|     schedule: | ||||
| @ -97,9 +88,6 @@ updates: | ||||
|           - "swc-*" | ||||
|           - "lightningcss*" | ||||
|           - "@rspack/binding*" | ||||
|       goauthentik: | ||||
|         patterns: | ||||
|           - "@goauthentik/*" | ||||
|   - package-ecosystem: npm | ||||
|     directory: "/lifecycle/aws" | ||||
|     schedule: | ||||
| @ -130,15 +118,3 @@ updates: | ||||
|       prefix: "core:" | ||||
|     labels: | ||||
|       - dependencies | ||||
|   - package-ecosystem: docker-compose | ||||
|     directories: | ||||
|       # - /scripts # Maybe | ||||
|       - /tests/e2e | ||||
|     schedule: | ||||
|       interval: daily | ||||
|       time: "04:00" | ||||
|     open-pull-requests-limit: 10 | ||||
|     commit-message: | ||||
|       prefix: "core:" | ||||
|     labels: | ||||
|       - dependencies | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | ||||
|           signoff: true | ||||
|           # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|           labels: dependencies | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|  | ||||
							
								
								
									
										17
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										17
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -62,7 +62,6 @@ jobs: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|           - 17-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -71,18 +70,22 @@ jobs: | ||||
|       - name: checkout stable | ||||
|         run: | | ||||
|           # Copy current, latest config to local | ||||
|           # Temporarly comment the .github backup while migrating to uv | ||||
|           cp authentik/lib/default.yml local.env.yml | ||||
|           cp -R .github .. | ||||
|           # cp -R .github .. | ||||
|           cp -R scripts .. | ||||
|           git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) | ||||
|           rm -rf .github/ scripts/ | ||||
|           mv ../.github ../scripts . | ||||
|           # rm -rf .github/ scripts/ | ||||
|           # mv ../.github ../scripts . | ||||
|           rm -rf scripts/ | ||||
|           mv ../scripts . | ||||
|       - name: Setup authentik env (stable) | ||||
|         uses: ./.github/actions/setup | ||||
|         with: | ||||
|           postgresql_version: ${{ matrix.psql }} | ||||
|         continue-on-error: true | ||||
|       - name: run migrations to stable | ||||
|         run: uv run python -m lifecycle.migrate | ||||
|         run: poetry run python -m lifecycle.migrate | ||||
|       - name: checkout current code | ||||
|         run: | | ||||
|           set -x | ||||
| @ -117,7 +120,6 @@ jobs: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|           - 17-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -202,7 +204,7 @@ jobs: | ||||
|         uses: actions/cache@v4 | ||||
|         with: | ||||
|           path: web/dist | ||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b | ||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }} | ||||
|       - name: prepare web ui | ||||
|         if: steps.cache-web.outputs.cache-hit != 'true' | ||||
|         working-directory: web | ||||
| @ -210,7 +212,6 @@ jobs: | ||||
|           npm ci | ||||
|           make -C .. gen-client-ts | ||||
|           npm run build | ||||
|           npm run build:sfe | ||||
|       - name: run e2e | ||||
|         run: | | ||||
|           uv run coverage run manage.py test ${{ matrix.job.glob }} | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -29,7 +29,7 @@ jobs: | ||||
|       - name: Generate API | ||||
|         run: make gen-client-go | ||||
|       - name: golangci-lint | ||||
|         uses: golangci/golangci-lint-action@v8 | ||||
|         uses: golangci/golangci-lint-action@v7 | ||||
|         with: | ||||
|           version: latest | ||||
|           args: --timeout 5000s --verbose | ||||
|  | ||||
							
								
								
									
										62
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										62
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							| @ -41,60 +41,32 @@ jobs: | ||||
|       - name: test | ||||
|         working-directory: website/ | ||||
|         run: npm test | ||||
|   build-container: | ||||
|   build: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     name: ${{ matrix.job }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         job: | ||||
|           - build | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.6.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/dev-docs | ||||
|       - name: Login to Container Registry | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Build Docker Image | ||||
|         id: push | ||||
|         uses: docker/build-push-action@v6 | ||||
|         with: | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           file: website/Dockerfile | ||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache | ||||
|           cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }} | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|           node-version-file: website/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: website/package-lock.json | ||||
|       - working-directory: website/ | ||||
|         run: npm ci | ||||
|       - name: build | ||||
|         working-directory: website/ | ||||
|         run: npm run ${{ matrix.job }} | ||||
|   ci-website-mark: | ||||
|     if: always() | ||||
|     needs: | ||||
|       - lint | ||||
|       - test | ||||
|       - build-container | ||||
|       - build | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: re-actors/alls-green@release/v1 | ||||
|  | ||||
| @ -37,7 +37,6 @@ jobs: | ||||
|           signoff: true | ||||
|           # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|           labels: dependencies | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | ||||
|           body: ${{ steps.compress.outputs.markdown }} | ||||
|           delete-branch: true | ||||
|           signoff: true | ||||
|           labels: dependencies | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" | ||||
|         with: | ||||
|  | ||||
							
								
								
									
										24
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										24
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -3,11 +3,10 @@ on: | ||||
|   push: | ||||
|     branches: [main] | ||||
|     paths: | ||||
|       - packages/docusaurus-config/** | ||||
|       - packages/eslint-config/** | ||||
|       - packages/prettier-config/** | ||||
|       - packages/tsconfig/** | ||||
|       - web/packages/esbuild-plugin-live-reload/** | ||||
|       - packages/docusaurus-config | ||||
|       - packages/eslint-config | ||||
|       - packages/prettier-config | ||||
|       - packages/tsconfig | ||||
|   workflow_dispatch: | ||||
| jobs: | ||||
|   publish: | ||||
| @ -17,28 +16,27 @@ jobs: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         package: | ||||
|           - packages/docusaurus-config | ||||
|           - packages/eslint-config | ||||
|           - packages/prettier-config | ||||
|           - packages/tsconfig | ||||
|           - web/packages/esbuild-plugin-live-reload | ||||
|           - docusaurus-config | ||||
|           - eslint-config | ||||
|           - prettier-config | ||||
|           - tsconfig | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           fetch-depth: 2 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: ${{ matrix.package }}/package.json | ||||
|           node-version-file: packages/${{ matrix.package }}/package.json | ||||
|           registry-url: "https://registry.npmjs.org" | ||||
|       - name: Get changed files | ||||
|         id: changed-files | ||||
|         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c | ||||
|         with: | ||||
|           files: | | ||||
|             ${{ matrix.package }}/package.json | ||||
|             packages/${{ matrix.package }}/package.json | ||||
|       - name: Publish package | ||||
|         if: steps.changed-files.outputs.any_changed == 'true' | ||||
|         working-directory: ${{ matrix.package }} | ||||
|         working-directory: packages/${{ matrix.package}} | ||||
|         run: | | ||||
|           npm ci | ||||
|           npm run build | ||||
|  | ||||
							
								
								
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -20,49 +20,6 @@ jobs: | ||||
|       release: true | ||||
|       registry_dockerhub: true | ||||
|       registry_ghcr: true | ||||
|   build-docs: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.6.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/docs | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Build Docker Image | ||||
|         id: push | ||||
|         uses: docker/build-push-action@v6 | ||||
|         with: | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           file: website/Dockerfile | ||||
|           push: true | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         if: true | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|   build-outpost: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
| @ -236,6 +193,6 @@ jobs: | ||||
|           SENTRY_ORG: authentik-security-inc | ||||
|           SENTRY_PROJECT: authentik | ||||
|         with: | ||||
|           release: authentik@${{ steps.ev.outputs.version }} | ||||
|           version: authentik@${{ steps.ev.outputs.version }} | ||||
|           sourcemaps: "./web/dist" | ||||
|           url_prefix: "~/static/dist" | ||||
|  | ||||
| @ -52,6 +52,3 @@ jobs: | ||||
|           body: "core, web: update translations" | ||||
|           delete-branch: true | ||||
|           signoff: true | ||||
|           labels: dependencies | ||||
|           # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|  | ||||
							
								
								
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							| @ -15,7 +15,6 @@ jobs: | ||||
|     runs-on: ubuntu-latest | ||||
|     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         with: | ||||
| @ -26,13 +25,23 @@ jobs: | ||||
|         env: | ||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||
|         run: | | ||||
|           title=$(gh pr view ${{ github.event.pull_request.number }} --json  "title" -q ".title") | ||||
|           title=$(curl -q -L \ | ||||
|             -H "Accept: application/vnd.github+json" \ | ||||
|             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||
|             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||
|             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title) | ||||
|           echo "title=${title}" >> "$GITHUB_OUTPUT" | ||||
|       - name: Rename | ||||
|         env: | ||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||
|         run: | | ||||
|           gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies | ||||
|           curl -L \ | ||||
|             -X PATCH \ | ||||
|             -H "Accept: application/vnd.github+json" \ | ||||
|             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||
|             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||
|             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \ | ||||
|             -d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}" | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|  | ||||
							
								
								
									
										6
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -16,7 +16,7 @@ | ||||
|     ], | ||||
|     "typescript.preferences.importModuleSpecifier": "non-relative", | ||||
|     "typescript.preferences.importModuleSpecifierEnding": "index", | ||||
|     "typescript.tsdk": "./node_modules/typescript/lib", | ||||
|     "typescript.tsdk": "./web/node_modules/typescript/lib", | ||||
|     "typescript.enablePromptUseWorkspaceTsdk": true, | ||||
|     "yaml.schemas": { | ||||
|         "./blueprints/schema.json": "blueprints/**/*.yaml" | ||||
| @ -30,5 +30,7 @@ | ||||
|         } | ||||
|     ], | ||||
|     "go.testFlags": ["-count=1"], | ||||
|     "github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"] | ||||
|     "github-actions.workflows.pinned.workflows": [ | ||||
|         ".github/workflows/ci-main.yml" | ||||
|     ] | ||||
| } | ||||
|  | ||||
							
								
								
									
										56
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										56
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,26 @@ | ||||
| # syntax=docker/dockerfile:1 | ||||
|  | ||||
| # Stage 1: Build webui | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24-slim AS node-builder | ||||
| # Stage 1: Build website | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | ||||
|  | ||||
| ENV NODE_ENV=production | ||||
|  | ||||
| WORKDIR /work/website | ||||
|  | ||||
| RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \ | ||||
|     --mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \ | ||||
|     --mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \ | ||||
|     npm ci --include=dev | ||||
|  | ||||
| COPY ./website /work/website/ | ||||
| COPY ./blueprints /work/blueprints/ | ||||
| COPY ./schema.yml /work/ | ||||
| COPY ./SECURITY.md /work/ | ||||
|  | ||||
| RUN npm run build-bundled | ||||
|  | ||||
| # Stage 2: Build webui | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | ||||
|  | ||||
| ARG GIT_BUILD_HASH | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
| @ -13,7 +32,7 @@ RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | ||||
|     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ | ||||
|     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ | ||||
|     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ | ||||
|     --mount=type=cache,id=npm-ak,sharing=shared,target=/root/.npm \ | ||||
|     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ | ||||
|     npm ci --include=dev | ||||
|  | ||||
| COPY ./package.json /work | ||||
| @ -21,10 +40,9 @@ COPY ./web /work/web/ | ||||
| COPY ./website /work/website/ | ||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||
|  | ||||
| RUN npm run build && \ | ||||
|     npm run build:sfe | ||||
| RUN npm run build | ||||
|  | ||||
| # Stage 2: Build go proxy | ||||
| # Stage 3: Build go proxy | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | ||||
|  | ||||
| ARG TARGETOS | ||||
| @ -49,8 +67,8 @@ RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | ||||
| COPY ./cmd /go/src/goauthentik.io/cmd | ||||
| COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib | ||||
| COPY ./web/static.go /go/src/goauthentik.io/web/static.go | ||||
| COPY --from=node-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | ||||
| COPY --from=node-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt | ||||
| COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | ||||
| COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt | ||||
| COPY ./internal /go/src/goauthentik.io/internal | ||||
| COPY ./go.mod /go/src/goauthentik.io/go.mod | ||||
| COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||
| @ -61,23 +79,24 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | ||||
|     CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \ | ||||
|     go build -o /go/authentik ./cmd/server | ||||
|  | ||||
| # Stage 3: MaxMind GeoIP | ||||
| # Stage 4: MaxMind GeoIP | ||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip | ||||
|  | ||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||
| ENV GEOIPUPDATE_VERBOSE="1" | ||||
| ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ||||
| ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY" | ||||
|  | ||||
| USER root | ||||
| RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||
|     --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ | ||||
|     mkdir -p /usr/share/GeoIP && \ | ||||
|     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|  | ||||
| # Stage 4: Download uv | ||||
| FROM ghcr.io/astral-sh/uv:0.7.11 AS uv | ||||
| # Stage 5: Base python image | ||||
| FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base | ||||
| # Stage 5: Download uv | ||||
| FROM ghcr.io/astral-sh/uv:0.6.16 AS uv | ||||
| # Stage 6: Base python image | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.10-slim-bookworm-fips AS python-base | ||||
|  | ||||
| ENV VENV_PATH="/ak-root/.venv" \ | ||||
|     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ | ||||
| @ -90,7 +109,7 @@ WORKDIR /ak-root/ | ||||
|  | ||||
| COPY --from=uv /uv /uvx /bin/ | ||||
|  | ||||
| # Stage 6: Python dependencies | ||||
| # Stage 7: Python dependencies | ||||
| FROM python-base AS python-deps | ||||
|  | ||||
| ARG TARGETARCH | ||||
| @ -125,7 +144,7 @@ RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \ | ||||
|     --mount=type=cache,target=/root/.cache/uv \ | ||||
|     uv sync --frozen --no-install-project --no-dev | ||||
|  | ||||
| # Stage 7: Run | ||||
| # Stage 8: Run | ||||
| FROM python-base AS final-image | ||||
|  | ||||
| ARG VERSION | ||||
| @ -168,8 +187,9 @@ COPY ./lifecycle/ /lifecycle | ||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | ||||
| COPY --from=go-builder /go/authentik /bin/authentik | ||||
| COPY --from=python-deps /ak-root/.venv /ak-root/.venv | ||||
| COPY --from=node-builder /work/web/dist/ /web/dist/ | ||||
| COPY --from=node-builder /work/web/authentik/ /web/authentik/ | ||||
| COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||
| COPY --from=web-builder /work/web/authentik/ /web/authentik/ | ||||
| COPY --from=website-builder /work/website/build/ /website/help/ | ||||
| COPY --from=geoip /usr/share/GeoIP /geoip | ||||
|  | ||||
| USER 1000 | ||||
|  | ||||
							
								
								
									
										51
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										51
									
								
								Makefile
									
									
									
									
									
								
							| @ -1,7 +1,6 @@ | ||||
| .PHONY: gen dev-reset all clean test web website | ||||
|  | ||||
| SHELL := /usr/bin/env bash | ||||
| .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail | ||||
| .SHELLFLAGS += ${SHELLFLAGS} -e | ||||
| PWD = $(shell pwd) | ||||
| UID = $(shell id -u) | ||||
| GID = $(shell id -g) | ||||
| @ -9,9 +8,9 @@ NPM_VERSION = $(shell python -m scripts.generate_semver) | ||||
| PY_SOURCES = authentik tests scripts lifecycle .github | ||||
| DOCKER_IMAGE ?= "authentik:test" | ||||
|  | ||||
| GEN_API_TS = gen-ts-api | ||||
| GEN_API_PY = gen-py-api | ||||
| GEN_API_GO = gen-go-api | ||||
| GEN_API_TS = "gen-ts-api" | ||||
| GEN_API_PY = "gen-py-api" | ||||
| GEN_API_GO = "gen-go-api" | ||||
|  | ||||
| pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||
| pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||
| @ -118,19 +117,14 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a | ||||
| 	npx prettier --write diff.md | ||||
|  | ||||
| gen-clean-ts:  ## Remove generated API client for Typescript | ||||
| 	rm -rf ${PWD}/${GEN_API_TS}/ | ||||
| 	rm -rf ${PWD}/web/node_modules/@goauthentik/api/ | ||||
| 	rm -rf ./${GEN_API_TS}/ | ||||
| 	rm -rf ./web/node_modules/@goauthentik/api/ | ||||
|  | ||||
| gen-clean-go:  ## Remove generated API client for Go | ||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | ||||
| ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) | ||||
| 	make -C ${PWD}/${GEN_API_GO} clean | ||||
| else | ||||
| 	rm -rf ${PWD}/${GEN_API_GO} | ||||
| endif | ||||
| 	rm -rf ./${GEN_API_GO}/ | ||||
|  | ||||
| gen-clean-py:  ## Remove generated API client for Python | ||||
| 	rm -rf ${PWD}/${GEN_API_PY}/ | ||||
| 	rm -rf ./${GEN_API_PY}/ | ||||
|  | ||||
| gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | ||||
|  | ||||
| @ -147,8 +141,8 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | ||||
| 		--git-repo-id authentik \ | ||||
| 		--git-user-id goauthentik | ||||
| 	mkdir -p web/node_modules/@goauthentik/api | ||||
| 	cd ${PWD}/${GEN_API_TS} && npm i | ||||
| 	\cp -rf ${PWD}/${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||
| 	cd ./${GEN_API_TS} && npm i | ||||
| 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||
|  | ||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||
| 	docker run \ | ||||
| @ -162,17 +156,24 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||
| 		--additional-properties=packageVersion=${NPM_VERSION} \ | ||||
| 		--git-repo-id authentik \ | ||||
| 		--git-user-id goauthentik | ||||
| 	pip install ./${GEN_API_PY} | ||||
|  | ||||
| gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | ||||
| ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) | ||||
| 	git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO} | ||||
| else | ||||
| 	cd ${PWD}/${GEN_API_GO} && git pull | ||||
| endif | ||||
| 	cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO} | ||||
| 	make -C ${PWD}/${GEN_API_GO} build | ||||
| 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache | ||||
| 	cp schema.yml ./${GEN_API_GO}/ | ||||
| 	docker run \ | ||||
| 		--rm -v ${PWD}/${GEN_API_GO}:/local \ | ||||
| 		--user ${UID}:${GID} \ | ||||
| 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||
| 		-i /local/schema.yml \ | ||||
| 		-g go \ | ||||
| 		-o /local/ \ | ||||
| 		-c /local/config.yaml | ||||
| 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | ||||
| 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | ||||
|  | ||||
| gen-dev-config:  ## Generate a local development config file | ||||
| 	uv run scripts/generate_config.py | ||||
| @ -243,7 +244,7 @@ docker:  ## Build a docker image of the current source tree | ||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||
|  | ||||
| test-docker: | ||||
| 	BUILD=true ${PWD}/scripts/test_docker.sh | ||||
| 	BUILD=true ./scripts/test_docker.sh | ||||
|  | ||||
| ######################### | ||||
| ## CI | ||||
|  | ||||
| @ -42,4 +42,4 @@ See [SECURITY.md](SECURITY.md) | ||||
|  | ||||
| ## Adoption and Contributions | ||||
|  | ||||
| Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github). | ||||
| Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md). | ||||
|  | ||||
| @ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | ||||
|  | ||||
| | Version   | Supported | | ||||
| | --------- | --------- | | ||||
| | 2025.4.x  | ✅        | | ||||
| | 2025.6.x  | ✅        | | ||||
| | 2024.12.x | ✅        | | ||||
| | 2025.2.x  | ✅        | | ||||
|  | ||||
| ## Reporting a Vulnerability | ||||
|  | ||||
|  | ||||
| @ -2,7 +2,7 @@ | ||||
|  | ||||
| from os import environ | ||||
|  | ||||
| __version__ = "2025.6.0" | ||||
| __version__ = "2025.2.4" | ||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -1,7 +1,6 @@ | ||||
| """authentik administration overview""" | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django_tenants.utils import get_public_schema_name | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from packaging.version import parse | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| @ -14,7 +13,6 @@ from authentik import __version__, get_build_hash | ||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.outposts.models import Outpost | ||||
| from authentik.tenants.utils import get_current_tenant | ||||
|  | ||||
|  | ||||
| class VersionSerializer(PassiveSerializer): | ||||
| @ -37,8 +35,6 @@ class VersionSerializer(PassiveSerializer): | ||||
|  | ||||
|     def get_version_latest(self, _) -> str: | ||||
|         """Get latest version from cache""" | ||||
|         if get_current_tenant().schema_name == get_public_schema_name(): | ||||
|             return __version__ | ||||
|         version_in_cache = cache.get(VERSION_CACHE_KEY) | ||||
|         if not version_in_cache:  # pragma: no cover | ||||
|             update_latest_version.delay() | ||||
|  | ||||
| @ -14,19 +14,3 @@ class AuthentikAdminConfig(ManagedAppConfig): | ||||
|     label = "authentik_admin" | ||||
|     verbose_name = "authentik Admin" | ||||
|     default = True | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_global | ||||
|     def clear_update_notifications(self): | ||||
|         """Clear update notifications on startup if the notification was for the version | ||||
|         we're running now.""" | ||||
|         from packaging.version import parse | ||||
|  | ||||
|         from authentik.admin.tasks import LOCAL_VERSION | ||||
|         from authentik.events.models import EventAction, Notification | ||||
|  | ||||
|         for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): | ||||
|             if "new_version" not in notification.event.context: | ||||
|                 continue | ||||
|             notification_version = notification.event.context["new_version"] | ||||
|             if LOCAL_VERSION >= parse(notification_version): | ||||
|                 notification.delete() | ||||
|  | ||||
| @ -1,7 +1,6 @@ | ||||
| """authentik admin settings""" | ||||
|  | ||||
| from celery.schedules import crontab | ||||
| from django_tenants.utils import get_public_schema_name | ||||
|  | ||||
| from authentik.lib.utils.time import fqdn_rand | ||||
|  | ||||
| @ -9,7 +8,6 @@ CELERY_BEAT_SCHEDULE = { | ||||
|     "admin_latest_version": { | ||||
|         "task": "authentik.admin.tasks.update_latest_version", | ||||
|         "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"), | ||||
|         "tenant_schemas": [get_public_schema_name()], | ||||
|         "options": {"queue": "authentik_scheduled"}, | ||||
|     } | ||||
| } | ||||
|  | ||||
| @ -1,6 +1,7 @@ | ||||
| """authentik admin tasks""" | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.db import DatabaseError, InternalError, ProgrammingError | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from packaging.version import parse | ||||
| from requests import RequestException | ||||
| @ -8,7 +9,7 @@ from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik import __version__, get_build_hash | ||||
| from authentik.admin.apps import PROM_INFO | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.events.models import Event, EventAction, Notification | ||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.utils.http import get_http_session | ||||
| @ -32,6 +33,20 @@ def _set_prom_info(): | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task( | ||||
|     throws=(DatabaseError, ProgrammingError, InternalError), | ||||
| ) | ||||
| def clear_update_notifications(): | ||||
|     """Clear update notifications on startup if the notification was for the version | ||||
|     we're running now.""" | ||||
|     for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): | ||||
|         if "new_version" not in notification.event.context: | ||||
|             continue | ||||
|         notification_version = notification.event.context["new_version"] | ||||
|         if LOCAL_VERSION >= parse(notification_version): | ||||
|             notification.delete() | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task(bind=True, base=SystemTask) | ||||
| @prefill_task | ||||
| def update_latest_version(self: SystemTask): | ||||
|  | ||||
| @ -1,12 +1,12 @@ | ||||
| """test admin tasks""" | ||||
|  | ||||
| from django.apps import apps | ||||
| from django.core.cache import cache | ||||
| from django.test import TestCase | ||||
| from requests_mock import Mocker | ||||
|  | ||||
| from authentik.admin.tasks import ( | ||||
|     VERSION_CACHE_KEY, | ||||
|     clear_update_notifications, | ||||
|     update_latest_version, | ||||
| ) | ||||
| from authentik.events.models import Event, EventAction | ||||
| @ -72,13 +72,12 @@ class TestAdminTasks(TestCase): | ||||
|  | ||||
|     def test_clear_update_notifications(self): | ||||
|         """Test clear of previous notification""" | ||||
|         admin_config = apps.get_app_config("authentik_admin") | ||||
|         Event.objects.create( | ||||
|             action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"} | ||||
|         ) | ||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) | ||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) | ||||
|         admin_config.clear_update_notifications() | ||||
|         clear_update_notifications() | ||||
|         self.assertFalse( | ||||
|             Event.objects.filter( | ||||
|                 action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" | ||||
|  | ||||
| @ -1,13 +1,12 @@ | ||||
| """authentik API AppConfig""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikAPIConfig(ManagedAppConfig): | ||||
| class AuthentikAPIConfig(AppConfig): | ||||
|     """authentik API Config""" | ||||
|  | ||||
|     name = "authentik.api" | ||||
|     label = "authentik_api" | ||||
|     mountpoint = "api/" | ||||
|     verbose_name = "authentik API" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,9 @@ | ||||
| """API Authentication""" | ||||
|  | ||||
| from hmac import compare_digest | ||||
| from pathlib import Path | ||||
| from tempfile import gettempdir | ||||
| from typing import Any | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.contrib.auth.models import AnonymousUser | ||||
| from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||
| from rest_framework.exceptions import AuthenticationFailed | ||||
| @ -14,17 +11,11 @@ from rest_framework.request import Request | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.middleware import CTX_AUTH_VIA | ||||
| from authentik.core.models import Token, TokenIntents, User, UserTypes | ||||
| from authentik.core.models import Token, TokenIntents, User | ||||
| from authentik.outposts.models import Outpost | ||||
| from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| _tmp = Path(gettempdir()) | ||||
| try: | ||||
|     with open(_tmp / "authentik-core-ipc.key") as _f: | ||||
|         ipc_key = _f.read() | ||||
| except OSError: | ||||
|     ipc_key = None | ||||
|  | ||||
|  | ||||
| def validate_auth(header: bytes) -> str | None: | ||||
| @ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None: | ||||
|     if user: | ||||
|         CTX_AUTH_VIA.set("secret_key") | ||||
|         return user | ||||
|     # then try to auth via secret key (for embedded outpost/etc) | ||||
|     user = token_ipc(auth_credentials) | ||||
|     if user: | ||||
|         CTX_AUTH_VIA.set("ipc") | ||||
|         return user | ||||
|     raise AuthenticationFailed("Token invalid/expired") | ||||
|  | ||||
|  | ||||
| @ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None: | ||||
|     return outpost.user | ||||
|  | ||||
|  | ||||
| class IPCUser(AnonymousUser): | ||||
|     """'Virtual' user for IPC communication between authentik core and the authentik router""" | ||||
|  | ||||
|     username = "authentik:system" | ||||
|     is_active = True | ||||
|     is_superuser = True | ||||
|  | ||||
|     @property | ||||
|     def type(self): | ||||
|         return UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||
|  | ||||
|     def has_perm(self, perm, obj=None): | ||||
|         return True | ||||
|  | ||||
|     def has_perms(self, perm_list, obj=None): | ||||
|         return True | ||||
|  | ||||
|     def has_module_perms(self, module): | ||||
|         return True | ||||
|  | ||||
|     @property | ||||
|     def is_anonymous(self): | ||||
|         return False | ||||
|  | ||||
|     @property | ||||
|     def is_authenticated(self): | ||||
|         return True | ||||
|  | ||||
|  | ||||
| def token_ipc(value: str) -> User | None: | ||||
|     """Check if the token is the secret key | ||||
|     and return the service account for the managed outpost""" | ||||
|     if not ipc_key or not compare_digest(value, ipc_key): | ||||
|         return None | ||||
|     return IPCUser() | ||||
|  | ||||
|  | ||||
| class TokenAuthentication(BaseAuthentication): | ||||
|     """Token-based authentication using HTTP Bearer authentication""" | ||||
|  | ||||
|  | ||||
| @ -54,7 +54,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom | ||||
|     return component | ||||
|  | ||||
|  | ||||
| def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): | ||||
| def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):  # noqa: W0613 | ||||
|     """Workaround to set a default response for endpoints. | ||||
|     Workaround suggested at | ||||
|     <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> | ||||
|  | ||||
| @ -1,14 +0,0 @@ | ||||
| from django.test import TestCase | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
| from authentik.lib.utils.reflection import get_apps | ||||
|  | ||||
|  | ||||
| class TestManagedAppConfig(TestCase): | ||||
|     def test_apps_use_managed_app_config(self): | ||||
|         for app in get_apps(): | ||||
|             if app.name.startswith("authentik.enterprise"): | ||||
|                 self.assertIn(EnterpriseConfig, app.__class__.__bases__) | ||||
|             else: | ||||
|                 self.assertIn(ManagedAppConfig, app.__class__.__bases__) | ||||
| @ -164,7 +164,9 @@ class BlueprintEntry: | ||||
|         """Get the blueprint model, with yaml tags resolved if present""" | ||||
|         return str(self.tag_resolver(self.model, blueprint)) | ||||
|  | ||||
|     def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]: | ||||
|     def get_permissions( | ||||
|         self, blueprint: "Blueprint" | ||||
|     ) -> Generator[BlueprintEntryPermission, None, None]: | ||||
|         """Get permissions of this entry, with all yaml tags resolved""" | ||||
|         for perm in self.permissions: | ||||
|             yield BlueprintEntryPermission( | ||||
|  | ||||
| @ -59,7 +59,6 @@ class BrandSerializer(ModelSerializer): | ||||
|             "flow_device_code", | ||||
|             "default_application", | ||||
|             "web_certificate", | ||||
|             "client_certificates", | ||||
|             "attributes", | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
| @ -121,7 +120,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | ||||
|         "domain", | ||||
|         "branding_title", | ||||
|         "web_certificate__name", | ||||
|         "client_certificates__name", | ||||
|     ] | ||||
|     filterset_fields = [ | ||||
|         "brand_uuid", | ||||
| @ -138,7 +136,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | ||||
|         "flow_user_settings", | ||||
|         "flow_device_code", | ||||
|         "web_certificate", | ||||
|         "client_certificates", | ||||
|     ] | ||||
|     ordering = ["domain"] | ||||
|  | ||||
|  | ||||
| @ -1,9 +1,9 @@ | ||||
| """authentik brands app""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikBrandsConfig(ManagedAppConfig): | ||||
| class AuthentikBrandsConfig(AppConfig): | ||||
|     """authentik Brand app""" | ||||
|  | ||||
|     name = "authentik.brands" | ||||
| @ -12,4 +12,3 @@ class AuthentikBrandsConfig(ManagedAppConfig): | ||||
|     mountpoints = { | ||||
|         "authentik.brands.urls_root": "", | ||||
|     } | ||||
|     default = True | ||||
|  | ||||
| @ -16,7 +16,7 @@ def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     if not path.exists(): | ||||
|         return | ||||
|     css = path.read_text() | ||||
|     Brand.objects.using(db_alias).all().update(branding_custom_css=css) | ||||
|     Brand.objects.using(db_alias).update(branding_custom_css=css) | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
| @ -1,37 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-19 15:09 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_brands", "0009_brand_branding_default_flow_background"), | ||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="brand", | ||||
|             name="client_certificates", | ||||
|             field=models.ManyToManyField( | ||||
|                 blank=True, | ||||
|                 default=None, | ||||
|                 help_text="Certificates used for client authentication.", | ||||
|                 to="authentik_crypto.certificatekeypair", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="brand", | ||||
|             name="web_certificate", | ||||
|             field=models.ForeignKey( | ||||
|                 default=None, | ||||
|                 help_text="Web Certificate used by the authentik Core webserver.", | ||||
|                 null=True, | ||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, | ||||
|                 related_name="+", | ||||
|                 to="authentik_crypto.certificatekeypair", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -73,13 +73,6 @@ class Brand(SerializerModel): | ||||
|         default=None, | ||||
|         on_delete=models.SET_DEFAULT, | ||||
|         help_text=_("Web Certificate used by the authentik Core webserver."), | ||||
|         related_name="+", | ||||
|     ) | ||||
|     client_certificates = models.ManyToManyField( | ||||
|         CertificateKeyPair, | ||||
|         default=None, | ||||
|         blank=True, | ||||
|         help_text=_("Certificates used for client authentication."), | ||||
|     ) | ||||
|     attributes = models.JSONField(default=dict, blank=True) | ||||
|  | ||||
|  | ||||
| @ -5,10 +5,10 @@ from typing import Any | ||||
| from django.db.models import F, Q | ||||
| from django.db.models import Value as V | ||||
| from django.http.request import HttpRequest | ||||
| from sentry_sdk import get_current_span | ||||
|  | ||||
| from authentik import get_full_version | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.lib.sentry import get_http_meta | ||||
| from authentik.tenants.models import Tenant | ||||
|  | ||||
| _q_default = Q(default=True) | ||||
| @ -32,9 +32,13 @@ def context_processor(request: HttpRequest) -> dict[str, Any]: | ||||
|     """Context Processor that injects brand object into every template""" | ||||
|     brand = getattr(request, "brand", DEFAULT_BRAND) | ||||
|     tenant = getattr(request, "tenant", Tenant()) | ||||
|     trace = "" | ||||
|     span = get_current_span() | ||||
|     if span: | ||||
|         trace = span.to_traceparent() | ||||
|     return { | ||||
|         "brand": brand, | ||||
|         "footer_links": tenant.footer_links, | ||||
|         "html_meta": {**get_http_meta()}, | ||||
|         "sentry_trace": trace, | ||||
|         "version": get_full_version(), | ||||
|     } | ||||
|  | ||||
| @ -153,10 +153,10 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|         return applications | ||||
|  | ||||
|     def _filter_applications_with_launch_url( | ||||
|         self, paginated_apps: Iterator[Application] | ||||
|         self, pagined_apps: Iterator[Application] | ||||
|     ) -> list[Application]: | ||||
|         applications = [] | ||||
|         for app in paginated_apps: | ||||
|         for app in pagined_apps: | ||||
|             if app.get_launch_url(): | ||||
|                 applications.append(app) | ||||
|         return applications | ||||
|  | ||||
| @ -99,17 +99,18 @@ class GroupSerializer(ModelSerializer): | ||||
|             if superuser | ||||
|             else "authentik_core.disable_group_superuser" | ||||
|         ) | ||||
|         if self.instance or superuser: | ||||
|             has_perm = user.has_perm(perm) or user.has_perm(perm, self.instance) | ||||
|             if not has_perm: | ||||
|                 raise ValidationError( | ||||
|                     _( | ||||
|                         ( | ||||
|                             "User does not have permission to set " | ||||
|                             "superuser status to {superuser_status}." | ||||
|                         ).format_map({"superuser_status": superuser}) | ||||
|                     ) | ||||
|         has_perm = user.has_perm(perm) | ||||
|         if self.instance and not has_perm: | ||||
|             has_perm = user.has_perm(perm, self.instance) | ||||
|         if not has_perm: | ||||
|             raise ValidationError( | ||||
|                 _( | ||||
|                     ( | ||||
|                         "User does not have permission to set " | ||||
|                         "superuser status to {superuser_status}." | ||||
|                     ).format_map({"superuser_status": superuser}) | ||||
|                 ) | ||||
|             ) | ||||
|         return superuser | ||||
|  | ||||
|     class Meta: | ||||
|  | ||||
| @ -84,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN | ||||
| from authentik.lib.avatars import get_avatar | ||||
| from authentik.rbac.decorators import permission_required | ||||
| from authentik.rbac.models import get_permission_choices | ||||
| from authentik.stages.email.flow import pickle_flow_token_for_email | ||||
| from authentik.stages.email.models import EmailStage | ||||
| from authentik.stages.email.tasks import send_mails | ||||
| from authentik.stages.email.utils import TemplateEmailMessage | ||||
| @ -452,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|     def list(self, request, *args, **kwargs): | ||||
|         return super().list(request, *args, **kwargs) | ||||
|  | ||||
|     def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: | ||||
|     def _create_recovery_link(self) -> tuple[str, Token]: | ||||
|         """Create a recovery link (when the current brand has a recovery flow set), | ||||
|         that can either be shown to an admin or sent to the user directly""" | ||||
|         brand: Brand = self.request._request.brand | ||||
| @ -474,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|             raise ValidationError( | ||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||
|             ) from None | ||||
|         _plan = FlowToken.pickle(plan) | ||||
|         if for_email: | ||||
|             _plan = pickle_flow_token_for_email(plan) | ||||
|         token, __ = FlowToken.objects.update_or_create( | ||||
|             identifier=f"{user.uid}-password-reset", | ||||
|             defaults={ | ||||
|                 "user": user, | ||||
|                 "flow": flow, | ||||
|                 "_plan": _plan, | ||||
|                 "revoke_on_execution": not for_email, | ||||
|                 "_plan": FlowToken.pickle(plan), | ||||
|             }, | ||||
|         ) | ||||
|         querystring = urlencode({QS_KEY_TOKEN: token.key}) | ||||
| @ -653,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|         if for_user.email == "": | ||||
|             LOGGER.debug("User doesn't have an email address") | ||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||
|         link, token = self._create_recovery_link(for_email=True) | ||||
|         link, token = self._create_recovery_link() | ||||
|         # Lookup the email stage to assure the current user can access it | ||||
|         stages = get_objects_for_user( | ||||
|             request.user, "authentik_stages_email.view_emailstage" | ||||
|  | ||||
| @ -2,7 +2,6 @@ | ||||
|  | ||||
| from django.apps import apps | ||||
| from django.contrib.auth.management import create_permissions | ||||
| from django.core.management import call_command | ||||
| from django.core.management.base import BaseCommand, no_translations | ||||
| from guardian.management import create_anonymous_user | ||||
|  | ||||
| @ -17,10 +16,6 @@ class Command(BaseCommand): | ||||
|         """Check permissions for all apps""" | ||||
|         for tenant in Tenant.objects.filter(ready=True): | ||||
|             with tenant: | ||||
|                 # See https://code.djangoproject.com/ticket/28417 | ||||
|                 # Remove potential lingering old permissions | ||||
|                 call_command("remove_stale_contenttypes", "--no-input") | ||||
|  | ||||
|                 for app in apps.get_app_configs(): | ||||
|                     self.stdout.write(f"Checking app {app.name} ({app.label})\n") | ||||
|                     create_permissions(app, verbosity=0) | ||||
|  | ||||
| @ -31,10 +31,7 @@ class PickleSerializer: | ||||
|  | ||||
|     def loads(self, data): | ||||
|         """Unpickle data to be loaded from redis""" | ||||
|         try: | ||||
|             return pickle.loads(data)  # nosec | ||||
|         except Exception: | ||||
|             return {} | ||||
|         return pickle.loads(data)  # nosec | ||||
|  | ||||
|  | ||||
| def _migrate_session( | ||||
| @ -79,7 +76,6 @@ def _migrate_session( | ||||
|         AuthenticatedSession.objects.using(db_alias).create( | ||||
|             session=session, | ||||
|             user=old_auth_session.user, | ||||
|             uuid=old_auth_session.uuid, | ||||
|         ) | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -1,103 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-14 11:15 | ||||
|  | ||||
| from django.apps.registry import Apps, apps as global_apps | ||||
| from django.db import migrations | ||||
| from django.contrib.contenttypes.management import create_contenttypes | ||||
| from django.contrib.auth.management import create_permissions | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
|  | ||||
|  | ||||
| def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession""" | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
|     # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the | ||||
|     # real config for creating permissions and content types | ||||
|     authentik_core_config = global_apps.get_app_config("authentik_core") | ||||
|     # These are only ran by django after all migrations, but we need them right now. | ||||
|     # `global_apps` is needed, | ||||
|     create_permissions(authentik_core_config, using=db_alias, verbosity=1) | ||||
|     create_contenttypes(authentik_core_config, using=db_alias, verbosity=1) | ||||
|  | ||||
|     # But from now on, this is just a regular migration, so use `apps` | ||||
|     Permission = apps.get_model("auth", "Permission") | ||||
|     ContentType = apps.get_model("contenttypes", "ContentType") | ||||
|  | ||||
|     try: | ||||
|         old_ct = ContentType.objects.using(db_alias).get( | ||||
|             app_label="authentik_core", model="oldauthenticatedsession" | ||||
|         ) | ||||
|         new_ct = ContentType.objects.using(db_alias).get( | ||||
|             app_label="authentik_core", model="authenticatedsession" | ||||
|         ) | ||||
|     except ContentType.DoesNotExist: | ||||
|         # This should exist at this point, but if not, let's cut our losses | ||||
|         return | ||||
|  | ||||
|     # Get all permissions for the old content type | ||||
|     old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct) | ||||
|  | ||||
|     # Create equivalent permissions for the new content type | ||||
|     for old_perm in old_perms: | ||||
|         new_perm = ( | ||||
|             Permission.objects.using(db_alias) | ||||
|             .filter( | ||||
|                 content_type=new_ct, | ||||
|                 codename=old_perm.codename, | ||||
|             ) | ||||
|             .first() | ||||
|         ) | ||||
|         if not new_perm: | ||||
|             # This should exist at this point, but if not, let's cut our losses | ||||
|             continue | ||||
|  | ||||
|         # Global user permissions | ||||
|         User = apps.get_model("authentik_core", "User") | ||||
|         User.user_permissions.through.objects.using(db_alias).filter( | ||||
|             permission=old_perm | ||||
|         ).all().update(permission=new_perm) | ||||
|  | ||||
|         # Global role permissions | ||||
|         DjangoGroup = apps.get_model("auth", "Group") | ||||
|         DjangoGroup.permissions.through.objects.using(db_alias).filter( | ||||
|             permission=old_perm | ||||
|         ).all().update(permission=new_perm) | ||||
|  | ||||
|         # Object user permissions | ||||
|         UserObjectPermission = apps.get_model("guardian", "UserObjectPermission") | ||||
|         UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( | ||||
|             permission=new_perm, content_type=new_ct | ||||
|         ) | ||||
|  | ||||
|         # Object role permissions | ||||
|         GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission") | ||||
|         GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( | ||||
|             permission=new_perm, content_type=new_ct | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def remove_old_authenticated_session_content_type( | ||||
|     apps: Apps, schema_editor: BaseDatabaseSchemaEditor | ||||
| ): | ||||
|     db_alias = schema_editor.connection.alias | ||||
|     ContentType = apps.get_model("contenttypes", "ContentType") | ||||
|  | ||||
|     ContentType.objects.using(db_alias).filter(model="oldauthenticatedsession").delete() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0047_delete_oldauthenticatedsession"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython( | ||||
|             code=migrate_authenticated_session_permissions, | ||||
|             reverse_code=migrations.RunPython.noop, | ||||
|         ), | ||||
|         migrations.RunPython( | ||||
|             code=remove_old_authenticated_session_content_type, | ||||
|             reverse_code=migrations.RunPython.noop, | ||||
|         ), | ||||
|     ] | ||||
| @ -21,9 +21,7 @@ | ||||
|         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> | ||||
|         {% block head %} | ||||
|         {% endblock %} | ||||
|         {% for key, value in html_meta.items %} | ||||
|         <meta name="{{key}}" content="{{ value }}" /> | ||||
|         {% endfor %} | ||||
|         <meta name="sentry-trace" content="{{ sentry_trace }}" /> | ||||
|     </head> | ||||
|     <body> | ||||
|         {% block body %} | ||||
|  | ||||
| @ -10,7 +10,7 @@ | ||||
| {% endblock %} | ||||
|  | ||||
| {% block body %} | ||||
| <ak-message-container alignment="bottom"></ak-message-container> | ||||
| <ak-message-container></ak-message-container> | ||||
| <ak-interface-admin> | ||||
|     <ak-loading></ak-loading> | ||||
| </ak-interface-admin> | ||||
|  | ||||
| @ -124,16 +124,6 @@ class TestGroupsAPI(APITestCase): | ||||
|             {"is_superuser": ["User does not have permission to set superuser status to True."]}, | ||||
|         ) | ||||
|  | ||||
|     def test_superuser_no_perm_no_superuser(self): | ||||
|         """Test creating a group without permission and without superuser flag""" | ||||
|         assign_perm("authentik_core.add_group", self.login_user) | ||||
|         self.client.force_login(self.login_user) | ||||
|         res = self.client.post( | ||||
|             reverse("authentik_api:group-list"), | ||||
|             data={"name": generate_id(), "is_superuser": False}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 201) | ||||
|  | ||||
|     def test_superuser_update_no_perm(self): | ||||
|         """Test updating a superuser group without permission""" | ||||
|         group = Group.objects.create(name=generate_id(), is_superuser=True) | ||||
|  | ||||
| @ -30,7 +30,6 @@ from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||
| from authentik.core.models import UserTypes | ||||
| from authentik.crypto.apps import MANAGED_KEY | ||||
| from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| @ -273,12 +272,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | ||||
|     def view_certificate(self, request: Request, pk: str) -> Response: | ||||
|         """Return certificate-key pairs certificate and log access""" | ||||
|         certificate: CertificateKeyPair = self.get_object() | ||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: | ||||
|             Event.new(  # noqa # nosec | ||||
|                 EventAction.SECRET_VIEW, | ||||
|                 secret=certificate, | ||||
|                 type="certificate", | ||||
|             ).from_http(request) | ||||
|         Event.new(  # noqa # nosec | ||||
|             EventAction.SECRET_VIEW, | ||||
|             secret=certificate, | ||||
|             type="certificate", | ||||
|         ).from_http(request) | ||||
|         if "download" in request.query_params: | ||||
|             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html | ||||
|             response = HttpResponse( | ||||
| @ -304,12 +302,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | ||||
|     def view_private_key(self, request: Request, pk: str) -> Response: | ||||
|         """Return certificate-key pairs private key and log access""" | ||||
|         certificate: CertificateKeyPair = self.get_object() | ||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: | ||||
|             Event.new(  # noqa # nosec | ||||
|                 EventAction.SECRET_VIEW, | ||||
|                 secret=certificate, | ||||
|                 type="private_key", | ||||
|             ).from_http(request) | ||||
|         Event.new(  # noqa # nosec | ||||
|             EventAction.SECRET_VIEW, | ||||
|             secret=certificate, | ||||
|             type="private_key", | ||||
|         ).from_http(request) | ||||
|         if "download" in request.query_params: | ||||
|             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html | ||||
|             response = HttpResponse(certificate.key_data, content_type="application/x-pem-file") | ||||
|  | ||||
| @ -132,14 +132,13 @@ class LicenseKey: | ||||
|         """Get a summarized version of all (not expired) licenses""" | ||||
|         total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0) | ||||
|         for lic in License.objects.all(): | ||||
|             if lic.is_valid: | ||||
|                 total.internal_users += lic.internal_users | ||||
|                 total.external_users += lic.external_users | ||||
|                 total.license_flags.extend(lic.status.license_flags) | ||||
|             total.internal_users += lic.internal_users | ||||
|             total.external_users += lic.external_users | ||||
|             exp_ts = int(mktime(lic.expiry.timetuple())) | ||||
|             if total.exp == 0: | ||||
|                 total.exp = exp_ts | ||||
|             total.exp = max(total.exp, exp_ts) | ||||
|             total.license_flags.extend(lic.status.license_flags) | ||||
|         return total | ||||
|  | ||||
|     @staticmethod | ||||
|  | ||||
| @ -39,10 +39,6 @@ class License(SerializerModel): | ||||
|     internal_users = models.BigIntegerField() | ||||
|     external_users = models.BigIntegerField() | ||||
|  | ||||
|     @property | ||||
|     def is_valid(self) -> bool: | ||||
|         return self.expiry >= now() | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[BaseSerializer]: | ||||
|         from authentik.enterprise.api import LicenseSerializer | ||||
|  | ||||
| @ -25,7 +25,7 @@ class GoogleWorkspaceGroupClient( | ||||
|     """Google client for groups""" | ||||
|  | ||||
|     connection_type = GoogleWorkspaceProviderGroup | ||||
|     connection_attr = "googleworkspaceprovidergroup_set" | ||||
|     connection_type_query = "group" | ||||
|     can_discover = True | ||||
|  | ||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||
|  | ||||
| @ -20,7 +20,7 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP | ||||
|     """Sync authentik users into google workspace""" | ||||
|  | ||||
|     connection_type = GoogleWorkspaceProviderUser | ||||
|     connection_attr = "googleworkspaceprovideruser_set" | ||||
|     connection_type_query = "user" | ||||
|     can_discover = True | ||||
|  | ||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||
|  | ||||
| @ -132,11 +132,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider): | ||||
|         if type == User: | ||||
|             # Get queryset of all users with consistent ordering | ||||
|             # according to the provider's settings | ||||
|             base = ( | ||||
|                 User.objects.prefetch_related("googleworkspaceprovideruser_set") | ||||
|                 .all() | ||||
|                 .exclude_anonymous() | ||||
|             ) | ||||
|             base = User.objects.all().exclude_anonymous() | ||||
|             if self.exclude_users_service_account: | ||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||
| @ -146,11 +142,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider): | ||||
|             return base.order_by("pk") | ||||
|         if type == Group: | ||||
|             # Get queryset of all groups with consistent ordering | ||||
|             return ( | ||||
|                 Group.objects.prefetch_related("googleworkspaceprovidergroup_set") | ||||
|                 .all() | ||||
|                 .order_by("pk") | ||||
|             ) | ||||
|             return Group.objects.all().order_by("pk") | ||||
|         raise ValueError(f"Invalid type {type}") | ||||
|  | ||||
|     def google_credentials(self): | ||||
|  | ||||
| @ -29,7 +29,7 @@ class MicrosoftEntraGroupClient( | ||||
|     """Microsoft client for groups""" | ||||
|  | ||||
|     connection_type = MicrosoftEntraProviderGroup | ||||
|     connection_attr = "microsoftentraprovidergroup_set" | ||||
|     connection_type_query = "group" | ||||
|     can_discover = True | ||||
|  | ||||
|     def __init__(self, provider: MicrosoftEntraProvider) -> None: | ||||
|  | ||||
| @ -24,7 +24,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv | ||||
|     """Sync authentik users into microsoft entra""" | ||||
|  | ||||
|     connection_type = MicrosoftEntraProviderUser | ||||
|     connection_attr = "microsoftentraprovideruser_set" | ||||
|     connection_type_query = "user" | ||||
|     can_discover = True | ||||
|  | ||||
|     def __init__(self, provider: MicrosoftEntraProvider) -> None: | ||||
|  | ||||
| @ -121,11 +121,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider): | ||||
|         if type == User: | ||||
|             # Get queryset of all users with consistent ordering | ||||
|             # according to the provider's settings | ||||
|             base = ( | ||||
|                 User.objects.prefetch_related("microsoftentraprovideruser_set") | ||||
|                 .all() | ||||
|                 .exclude_anonymous() | ||||
|             ) | ||||
|             base = User.objects.all().exclude_anonymous() | ||||
|             if self.exclude_users_service_account: | ||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||
| @ -135,11 +131,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider): | ||||
|             return base.order_by("pk") | ||||
|         if type == Group: | ||||
|             # Get queryset of all groups with consistent ordering | ||||
|             return ( | ||||
|                 Group.objects.prefetch_related("microsoftentraprovidergroup_set") | ||||
|                 .all() | ||||
|                 .order_by("pk") | ||||
|             ) | ||||
|             return Group.objects.all().order_by("pk") | ||||
|         raise ValueError(f"Invalid type {type}") | ||||
|  | ||||
|     def microsoft_credentials(self): | ||||
|  | ||||
| @ -19,7 +19,6 @@ TENANT_APPS = [ | ||||
|     "authentik.enterprise.providers.microsoft_entra", | ||||
|     "authentik.enterprise.providers.ssf", | ||||
|     "authentik.enterprise.stages.authenticator_endpoint_gdtc", | ||||
|     "authentik.enterprise.stages.mtls", | ||||
|     "authentik.enterprise.stages.source", | ||||
| ] | ||||
|  | ||||
|  | ||||
| @ -1,31 +0,0 @@ | ||||
| """Mutual TLS Stage API Views""" | ||||
|  | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.stages.mtls.models import MutualTLSStage | ||||
| from authentik.flows.api.stages import StageSerializer | ||||
|  | ||||
|  | ||||
| class MutualTLSStageSerializer(EnterpriseRequiredMixin, StageSerializer): | ||||
|     """MutualTLSStage Serializer""" | ||||
|  | ||||
|     class Meta: | ||||
|         model = MutualTLSStage | ||||
|         fields = StageSerializer.Meta.fields + [ | ||||
|             "mode", | ||||
|             "certificate_authorities", | ||||
|             "cert_attribute", | ||||
|             "user_attribute", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class MutualTLSStageViewSet(UsedByMixin, ModelViewSet): | ||||
|     """MutualTLSStage Viewset""" | ||||
|  | ||||
|     queryset = MutualTLSStage.objects.all() | ||||
|     serializer_class = MutualTLSStageSerializer | ||||
|     filterset_fields = "__all__" | ||||
|     ordering = ["name"] | ||||
|     search_fields = ["name"] | ||||
| @ -1,12 +0,0 @@ | ||||
| """authentik stage app config""" | ||||
|  | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
|  | ||||
|  | ||||
| class AuthentikEnterpriseStageMTLSConfig(EnterpriseConfig): | ||||
|     """authentik MTLS stage config""" | ||||
|  | ||||
|     name = "authentik.enterprise.stages.mtls" | ||||
|     label = "authentik_stages_mtls" | ||||
|     verbose_name = "authentik Enterprise.Stages.MTLS" | ||||
|     default = True | ||||
| @ -1,68 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-19 18:29 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     initial = True | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), | ||||
|         ("authentik_flows", "0027_auto_20231028_1424"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="MutualTLSStage", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "stage_ptr", | ||||
|                     models.OneToOneField( | ||||
|                         auto_created=True, | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         parent_link=True, | ||||
|                         primary_key=True, | ||||
|                         serialize=False, | ||||
|                         to="authentik_flows.stage", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "mode", | ||||
|                     models.TextField(choices=[("optional", "Optional"), ("required", "Required")]), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "cert_attribute", | ||||
|                     models.TextField( | ||||
|                         choices=[ | ||||
|                             ("subject", "Subject"), | ||||
|                             ("common_name", "Common Name"), | ||||
|                             ("email", "Email"), | ||||
|                         ] | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "user_attribute", | ||||
|                     models.TextField(choices=[("username", "Username"), ("email", "Email")]), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "certificate_authorities", | ||||
|                     models.ManyToManyField( | ||||
|                         blank=True, | ||||
|                         default=None, | ||||
|                         help_text="Configure certificate authorities to validate the certificate against. This option has a higher priority than the `client_certificate` option on `Brand`.", | ||||
|                         to="authentik_crypto.certificatekeypair", | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "Mutual TLS Stage", | ||||
|                 "verbose_name_plural": "Mutual TLS Stages", | ||||
|                 "permissions": [ | ||||
|                     ("pass_outpost_certificate", "Permissions to pass Certificates for outposts.") | ||||
|                 ], | ||||
|             }, | ||||
|             bases=("authentik_flows.stage",), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,71 +0,0 @@ | ||||
| from django.db import models | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from rest_framework.serializers import Serializer | ||||
|  | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.flows.models import Stage | ||||
| from authentik.flows.stage import StageView | ||||
|  | ||||
|  | ||||
| class TLSMode(models.TextChoices): | ||||
|     """Modes the TLS Stage can operate in""" | ||||
|  | ||||
|     OPTIONAL = "optional" | ||||
|     REQUIRED = "required" | ||||
|  | ||||
|  | ||||
| class CertAttributes(models.TextChoices): | ||||
|     """Certificate attribute used for user matching""" | ||||
|  | ||||
|     SUBJECT = "subject" | ||||
|     COMMON_NAME = "common_name" | ||||
|     EMAIL = "email" | ||||
|  | ||||
|  | ||||
| class UserAttributes(models.TextChoices): | ||||
|     """User attribute for user matching""" | ||||
|  | ||||
|     USERNAME = "username" | ||||
|     EMAIL = "email" | ||||
|  | ||||
|  | ||||
| class MutualTLSStage(Stage): | ||||
|     """Authenticate/enroll users using a client-certificate.""" | ||||
|  | ||||
|     mode = models.TextField(choices=TLSMode.choices) | ||||
|  | ||||
|     certificate_authorities = models.ManyToManyField( | ||||
|         CertificateKeyPair, | ||||
|         default=None, | ||||
|         blank=True, | ||||
|         help_text=_( | ||||
|             "Configure certificate authorities to validate the certificate against. " | ||||
|             "This option has a higher priority than the `client_certificate` option on `Brand`." | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     cert_attribute = models.TextField(choices=CertAttributes.choices) | ||||
|     user_attribute = models.TextField(choices=UserAttributes.choices) | ||||
|  | ||||
|     @property | ||||
|     def view(self) -> type[StageView]: | ||||
|         from authentik.enterprise.stages.mtls.stage import MTLSStageView | ||||
|  | ||||
|         return MTLSStageView | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.enterprise.stages.mtls.api import MutualTLSStageSerializer | ||||
|  | ||||
|         return MutualTLSStageSerializer | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         return "ak-stage-mtls-form" | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Mutual TLS Stage") | ||||
|         verbose_name_plural = _("Mutual TLS Stages") | ||||
|         permissions = [ | ||||
|             ("pass_outpost_certificate", _("Permissions to pass Certificates for outposts.")), | ||||
|         ] | ||||
| @ -1,230 +0,0 @@ | ||||
| from binascii import hexlify | ||||
| from urllib.parse import unquote_plus | ||||
|  | ||||
| from cryptography.exceptions import InvalidSignature | ||||
| from cryptography.hazmat.primitives import hashes | ||||
| from cryptography.x509 import ( | ||||
|     Certificate, | ||||
|     NameOID, | ||||
|     ObjectIdentifier, | ||||
|     UnsupportedGeneralNameType, | ||||
|     load_pem_x509_certificate, | ||||
| ) | ||||
| from cryptography.x509.verification import PolicyBuilder, Store, VerificationError | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
|  | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.core.models import User | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.enterprise.stages.mtls.models import ( | ||||
|     CertAttributes, | ||||
|     MutualTLSStage, | ||||
|     TLSMode, | ||||
|     UserAttributes, | ||||
| ) | ||||
| from authentik.flows.challenge import AccessDeniedChallenge | ||||
| from authentik.flows.models import FlowDesignation | ||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER | ||||
| from authentik.flows.stage import ChallengeStageView | ||||
| from authentik.root.middleware import ClientIPMiddleware | ||||
| from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS | ||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||
|  | ||||
| # All of these headers must only be accepted from "trusted" reverse proxies | ||||
| # See internal/web/proxy.go:39 | ||||
| HEADER_PROXY_FORWARDED = "X-Forwarded-Client-Cert" | ||||
| HEADER_NGINX_FORWARDED = "SSL-Client-Cert" | ||||
| HEADER_TRAEFIK_FORWARDED = "X-Forwarded-TLS-Client-Cert" | ||||
| HEADER_OUTPOST_FORWARDED = "X-Authentik-Outpost-Certificate" | ||||
|  | ||||
|  | ||||
| PLAN_CONTEXT_CERTIFICATE = "certificate" | ||||
|  | ||||
|  | ||||
| class MTLSStageView(ChallengeStageView): | ||||
|  | ||||
|     def __parse_single_cert(self, raw: str | None) -> list[Certificate]: | ||||
|         """Helper to parse a single certificate""" | ||||
|         if not raw: | ||||
|             return [] | ||||
|         try: | ||||
|             cert = load_pem_x509_certificate(unquote_plus(raw).encode()) | ||||
|             return [cert] | ||||
|         except ValueError as exc: | ||||
|             self.logger.info("Failed to parse certificate", exc=exc) | ||||
|             return [] | ||||
|  | ||||
|     def _parse_cert_xfcc(self) -> list[Certificate]: | ||||
|         """Parse certificates in the format given to us in | ||||
|         the format of the authentik router/envoy""" | ||||
|         xfcc_raw = self.request.headers.get(HEADER_PROXY_FORWARDED) | ||||
|         if not xfcc_raw: | ||||
|             return [] | ||||
|         certs = [] | ||||
|         for r_cert in xfcc_raw.split(","): | ||||
|             el = r_cert.split(";") | ||||
|             raw_cert = {k.split("=")[0]: k.split("=")[1] for k in el} | ||||
|             if "Cert" not in raw_cert: | ||||
|                 continue | ||||
|             certs.extend(self.__parse_single_cert(raw_cert["Cert"])) | ||||
|         return certs | ||||
|  | ||||
|     def _parse_cert_nginx(self) -> list[Certificate]: | ||||
|         """Parse certificates in the format nginx-ingress gives to us""" | ||||
|         sslcc_raw = self.request.headers.get(HEADER_NGINX_FORWARDED) | ||||
|         return self.__parse_single_cert(sslcc_raw) | ||||
|  | ||||
|     def _parse_cert_traefik(self) -> list[Certificate]: | ||||
|         """Parse certificates in the format traefik gives to us""" | ||||
|         ftcc_raw = self.request.headers.get(HEADER_TRAEFIK_FORWARDED) | ||||
|         return self.__parse_single_cert(ftcc_raw) | ||||
|  | ||||
|     def _parse_cert_outpost(self) -> list[Certificate]: | ||||
|         """Parse certificates in the format outposts give to us. Also authenticates | ||||
|         the outpost to ensure it has the permission to do so""" | ||||
|         user = ClientIPMiddleware.get_outpost_user(self.request) | ||||
|         if not user: | ||||
|             return [] | ||||
|         if not user.has_perm( | ||||
|             "pass_outpost_certificate", self.executor.current_stage | ||||
|         ) and not user.has_perm("authentik_stages_mtls.pass_outpost_certificate"): | ||||
|             return [] | ||||
|         outpost_raw = self.request.headers.get(HEADER_OUTPOST_FORWARDED) | ||||
|         return self.__parse_single_cert(outpost_raw) | ||||
|  | ||||
|     def get_authorities(self) -> list[CertificateKeyPair] | None: | ||||
|         # We can't access `certificate_authorities` on `self.executor.current_stage`, as that would | ||||
|         # load the certificate into the directly referenced foreign key, which we have to pickle | ||||
|         # as part of the flow plan, and cryptography certs can't be pickled | ||||
|         stage: MutualTLSStage = ( | ||||
|             MutualTLSStage.objects.filter(pk=self.executor.current_stage.pk) | ||||
|             .prefetch_related("certificate_authorities") | ||||
|             .first() | ||||
|         ) | ||||
|         if stage.certificate_authorities.exists(): | ||||
|             return stage.certificate_authorities.order_by("name") | ||||
|         brand: Brand = self.request.brand | ||||
|         if brand.client_certificates.exists(): | ||||
|             return brand.client_certificates.order_by("name") | ||||
|         return None | ||||
|  | ||||
|     def validate_cert(self, authorities: list[CertificateKeyPair], certs: list[Certificate]): | ||||
|         authorities_cert = [x.certificate for x in authorities] | ||||
|         for _cert in certs: | ||||
|             try: | ||||
|                 PolicyBuilder().store(Store(authorities_cert)).build_client_verifier().verify( | ||||
|                     _cert, [] | ||||
|                 ) | ||||
|                 return _cert | ||||
|             except ( | ||||
|                 InvalidSignature, | ||||
|                 TypeError, | ||||
|                 ValueError, | ||||
|                 VerificationError, | ||||
|                 UnsupportedGeneralNameType, | ||||
|             ) as exc: | ||||
|                 self.logger.warning("Discarding invalid certificate", cert=_cert, exc=exc) | ||||
|                 continue | ||||
|         return None | ||||
|  | ||||
|     def check_if_user(self, cert: Certificate): | ||||
|         stage: MutualTLSStage = self.executor.current_stage | ||||
|         cert_attr = None | ||||
|         user_attr = None | ||||
|         match stage.cert_attribute: | ||||
|             case CertAttributes.SUBJECT: | ||||
|                 cert_attr = cert.subject.rfc4514_string() | ||||
|             case CertAttributes.COMMON_NAME: | ||||
|                 cert_attr = self.get_cert_attribute(cert, NameOID.COMMON_NAME) | ||||
|             case CertAttributes.EMAIL: | ||||
|                 cert_attr = self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS) | ||||
|         match stage.user_attribute: | ||||
|             case UserAttributes.USERNAME: | ||||
|                 user_attr = "username" | ||||
|             case UserAttributes.EMAIL: | ||||
|                 user_attr = "email" | ||||
|         if not user_attr or not cert_attr: | ||||
|             return None | ||||
|         return User.objects.filter(**{user_attr: cert_attr}).first() | ||||
|  | ||||
|     def _cert_to_dict(self, cert: Certificate) -> dict: | ||||
|         """Represent a certificate in a dictionary, as certificate objects cannot be pickled""" | ||||
|         return { | ||||
|             "serial_number": str(cert.serial_number), | ||||
|             "subject": cert.subject.rfc4514_string(), | ||||
|             "issuer": cert.issuer.rfc4514_string(), | ||||
|             "fingerprint_sha256": hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8"), | ||||
|             "fingerprint_sha1": hexlify(cert.fingerprint(hashes.SHA1()), ":").decode(  # nosec | ||||
|                 "utf-8" | ||||
|             ), | ||||
|         } | ||||
|  | ||||
|     def auth_user(self, user: User, cert: Certificate): | ||||
|         self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] = user | ||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD, "mtls") | ||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD_ARGS, {}) | ||||
|         self.executor.plan.context[PLAN_CONTEXT_METHOD_ARGS].update( | ||||
|             {"certificate": self._cert_to_dict(cert)} | ||||
|         ) | ||||
|  | ||||
|     def enroll_prepare_user(self, cert: Certificate): | ||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_PROMPT, {}) | ||||
|         self.executor.plan.context[PLAN_CONTEXT_PROMPT].update( | ||||
|             { | ||||
|                 "email": self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS), | ||||
|                 "name": self.get_cert_attribute(cert, NameOID.COMMON_NAME), | ||||
|             } | ||||
|         ) | ||||
|         self.executor.plan.context[PLAN_CONTEXT_CERTIFICATE] = self._cert_to_dict(cert) | ||||
|  | ||||
|     def get_cert_attribute(self, cert: Certificate, oid: ObjectIdentifier) -> str | None: | ||||
|         attr = cert.subject.get_attributes_for_oid(oid) | ||||
|         if len(attr) < 1: | ||||
|             return None | ||||
|         return str(attr[0].value) | ||||
|  | ||||
|     def dispatch(self, request, *args, **kwargs): | ||||
|         stage: MutualTLSStage = self.executor.current_stage | ||||
|         certs = [ | ||||
|             *self._parse_cert_xfcc(), | ||||
|             *self._parse_cert_nginx(), | ||||
|             *self._parse_cert_traefik(), | ||||
|             *self._parse_cert_outpost(), | ||||
|         ] | ||||
|         authorities = self.get_authorities() | ||||
|         if not authorities: | ||||
|             self.logger.warning("No Certificate authority found") | ||||
|             if stage.mode == TLSMode.OPTIONAL: | ||||
|                 return self.executor.stage_ok() | ||||
|             if stage.mode == TLSMode.REQUIRED: | ||||
|                 return super().dispatch(request, *args, **kwargs) | ||||
|         cert = self.validate_cert(authorities, certs) | ||||
|         if not cert and stage.mode == TLSMode.REQUIRED: | ||||
|             self.logger.warning("Client certificate required but no certificates given") | ||||
|             return super().dispatch( | ||||
|                 request, | ||||
|                 *args, | ||||
|                 error_message=_("Certificate required but no certificate was given."), | ||||
|                 **kwargs, | ||||
|             ) | ||||
|         if not cert and stage.mode == TLSMode.OPTIONAL: | ||||
|             self.logger.info("No certificate given, continuing") | ||||
|             return self.executor.stage_ok() | ||||
|         existing_user = self.check_if_user(cert) | ||||
|         if self.executor.flow.designation == FlowDesignation.ENROLLMENT: | ||||
|             self.enroll_prepare_user(cert) | ||||
|         elif existing_user: | ||||
|             self.auth_user(existing_user, cert) | ||||
|         else: | ||||
|             return super().dispatch( | ||||
|                 request, *args, error_message=_("No user found for certificate."), **kwargs | ||||
|             ) | ||||
|         return self.executor.stage_ok() | ||||
|  | ||||
|     def get_challenge(self, *args, error_message: str | None = None, **kwargs): | ||||
|         return AccessDeniedChallenge( | ||||
|             data={ | ||||
|                 "component": "ak-stage-access-denied", | ||||
|                 "error_message": str(error_message or "Unknown error"), | ||||
|             } | ||||
|         ) | ||||
| @ -1,31 +0,0 @@ | ||||
| -----BEGIN CERTIFICATE----- | ||||
| MIIFXDCCA0SgAwIBAgIUBmV7zREyC1SPr72/75/L9zpwV18wDQYJKoZIhvcNAQEL | ||||
| BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl | ||||
| bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNDI3MTgzMDUwWhcNMzUw | ||||
| MzA3MTgzMDUwWjBGMRowGAYDVQQDDBFhdXRoZW50aWsgVGVzdCBDQTESMBAGA1UE | ||||
| CgwJYXV0aGVudGlrMRQwEgYDVQQLDAtTZWxmLXNpZ25lZDCCAiIwDQYJKoZIhvcN | ||||
| AQEBBQADggIPADCCAgoCggIBAMc0NxZj7j1mPu0aRToo8oMPdC3T99xgxnqdr18x | ||||
| LV4pWyi/YLghgZHqNQY2xNP6JIlSeUZD6KFUYT2sPL4Av/zSg5zO8bl+/lf7ckje | ||||
| O1/Bt5A8xtL0CpmpMDGiI6ibdDElaywM6AohisbxrV29pygSKGq2wugF/urqGtE+ | ||||
| 5z4y5Kt6qMdKkd0iXT+WagbQTIUlykFKgB0+qqTLzDl01lVDa/DoLl8Hqp45mVx2 | ||||
| pqrGsSa3TCErLIv9hUlZklF7A8UV4ZB4JL20UKcP8dKzQClviNie17tpsUpOuy3A | ||||
| SQ6+guWTHTLJNCSdLn1xIqc5q+f5wd2dIDf8zXCTHj+Xp0bJE3Vgaq5R31K9+b+1 | ||||
| 2dDWz1KcNJaLEnw2+b0O8M64wTMLxhqOv7QfLUr6Pmg1ZymghjLcZ6bnU9e31Vza | ||||
| hlPKhxjqYQUC4Kq+oaYF6qdUeJy+dsYf0iDv5tTC+eReZDWIjxTPrNpwA773ZwT7 | ||||
| WVmL7ULGpuP2g9rNvFBcZiN+i6d7CUoN+jd/iRdo79lrI0dfXiyy4bYgW/2HeZfF | ||||
| HaOsc1xsoqnJdWbWkX/ooyaCjAfm07kS3HiOzz4q3QW4wgGrwV8lEraLPxYYeOQu | ||||
| YcGMOM8NfnVkjc8gmyXUxedCje5Vz/Tu5fKrQEInnCmXxVsWbwr/LzEjMKAM/ivY | ||||
| 0TXxAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0G | ||||
| A1UdDgQWBBTa+Ns6QzqlNvnTGszkouQQtZnVJDANBgkqhkiG9w0BAQsFAAOCAgEA | ||||
| NpJEDMXjuEIzSzafkxSshvjnt5sMYmzmvjNoRlkxgN2YcWvPoxbalGAYzcpyggT2 | ||||
| 6xZY8R4tvB1oNTCArqwf860kkofUoJCr88D/pU3Cv4JhjCWs4pmXTsvSqlBSlJbo | ||||
| +jPBZwbn6it/6jcit6Be3rW2PtHe8tASd9Lf8/2r1ZvupXwPzcR84R4Z10ve2lqV | ||||
| xxcWlMmBh51CaYI0b1/WTe9Ua+wgkCVkxbf9zNcDQXjxw2ICWK+nR/4ld4nmqVm2 | ||||
| C7nhvXwU8FAHl7ZgR2Z3PLrwPuhd+kd6NXQqNkS9A+n+1vSRLbRjmV8pwIPpdPEq | ||||
| nslUAGJJBHDUBArxC3gOJSB+WtmaCfzDu2gepMf9Ng1H2ZhwSF/FH3v3fsJqZkzz | ||||
| NBstT9KuNGQRYiCmAPJaoVAc9BoLa+BFML1govtWtpdmbFk8PZEcuUsP7iAZqFF1 | ||||
| uuldPyZ8huGpQSR6Oq2bILRHowfGY0npTZAyxg0Vs8UMy1HTwNOp9OuRtArMZmsJ | ||||
| jFIx1QzRf9S1i6bYpOzOudoXj4ARkS1KmVExGjJFcIT0xlFSSERie2fEKSeEYOyG | ||||
| G+PA2qRt/F51FGOMm1ZscjPXqk2kt3C4BFbz6Vvxsq7D3lmhvFLn4jVA8+OidsM0 | ||||
| YUrVMtWET/RkjEIbADbgRXxNUNo+jtQZDU9C1IiAdfk= | ||||
| -----END CERTIFICATE----- | ||||
| @ -1,31 +0,0 @@ | ||||
| -----BEGIN CERTIFICATE----- | ||||
| MIIFWTCCA0GgAwIBAgIUDEnKCSmIXG/akySGes7bhOGrN/8wDQYJKoZIhvcNAQEL | ||||
| BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl | ||||
| bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNTE5MTIzODQ2WhcNMjYw | ||||
| NTE1MTIzODQ2WjARMQ8wDQYDVQQDDAZjbGllbnQwggIiMA0GCSqGSIb3DQEBAQUA | ||||
| A4ICDwAwggIKAoICAQCkPkS1V6l0gj0ulxMznkxkgrw4p9Tjd8teSsGZt02A2Eo6 | ||||
| 7D8FbJ7pp3d5fYW/TWuEKVBLWTID6rijW5EGcdgTM5Jxf/QR+aZTEK6umQxUd4yO | ||||
| mOtp+xVS3KlcsSej2dFpeE5h5VkZizHpvh5xkoAP8W5VtQLOVF0hIeumHnJmaeLj | ||||
| +mhK9PBFpO7k9SFrYYhd/uLrYbIdANihbIO2Q74rNEJHewhFNM7oNSjjEWzRd/7S | ||||
| qNdQij9JGrVG7u8YJJscEQHqyHMYFVCEMjxmsge5BO6Vx5OWmUE3wXPzb5TbyTS4 | ||||
| +yg88g9rYTUXrzz+poCyKpaur45qBsdw35lJ8nq69VJj2xJLGQDwoTgGSXRuPciC | ||||
| 3OilQI+Ma+j8qQGJxJ8WJxISlf1cuhp+V4ZUd1lawlM5hAXyXmHRlH4pun4y+g7O | ||||
| O34+fE3pK25JjVCicMT/rC2A/sb95j/fHTzzJpbB70U0I50maTcIsOkyw6aiF//E | ||||
| 0ShTDz14x22SCMolUc6hxTDZvBB6yrcJHd7d9CCnFH2Sgo13QrtNJ/atXgm13HGh | ||||
| wBzRwK38XUGl/J4pJaxAupTVCPriStUM3m0EYHNelRRUE91pbyeGT0rvOuv00uLw | ||||
| Rj7K7hJZR8avTKWmKrVBVpq+gSojGW1DwBS0NiDNkZs0d/IjB1wkzczEgdZjXwID | ||||
| AQABo3QwcjAfBgNVHSMEGDAWgBTa+Ns6QzqlNvnTGszkouQQtZnVJDAdBgNVHSUE | ||||
| FjAUBggrBgEFBQcDAgYIKwYBBQUHAwEwEQYDVR0RBAowCIIGY2xpZW50MB0GA1Ud | ||||
| DgQWBBT1xg5sXkypRBwvCxBuyfoanaiZ5jANBgkqhkiG9w0BAQsFAAOCAgEAvUAz | ||||
| YwIjxY/0KHZDU8owdILVqKChzfLcy9OHNPyEI3TSOI8X6gNtBO+HE6r8aWGcC9vw | ||||
| zzeIsNQ3UEjvRWi2r+vUVbiPTbFdZboNDSZv6ZmGHxwd85VsjXRGoXV6koCT/9zi | ||||
| 9/lCM1DwqwYSwBphMJdRVFRUMluSYk1oHflGeA18xgGuts4eFivJwhabGm1AdVVQ | ||||
| /CYvqCuTxd/DCzWZBdyxYpDru64i/kyeJCt1pThKEFDWmpumFdBI4CxJ0OhxVSGp | ||||
| dOXzK+Y6ULepxCvi6/OpSog52jQ6PnNd1ghiYtq7yO1T4GQz65M1vtHHVvQ3gfBE | ||||
| AuKYQp6io7ypitRx+LpjsBQenyP4FFGfrq7pm90nLluOBOArfSdF0N+CP2wo/YFV | ||||
| 9BGf89OtvRi3BXCm2NXkE/Sc4We26tY8x7xNLOmNs8YOT0O3r/EQ690W9GIwRMx0 | ||||
| m0r/RXWn5V3o4Jib9r8eH9NzaDstD8g9dECcGfM4fHoM/DAGFaRrNcjMsS1APP3L | ||||
| jp7+BfBSXtrz9V6rVJ3CBLXlLK0AuSm7bqd1MJsGA9uMLpsVZIUA+KawcmPGdPU+ | ||||
| NxdpBCtzyurQSUyaTLtVqSeP35gMAwaNzUDph8Uh+vHz+kRwgXS19OQvTaud5LJu | ||||
| nQe4JNS+u5e2VDEBWUxt8NTpu6eShDN0iIEHtxA= | ||||
| -----END CERTIFICATE----- | ||||
| @ -1,228 +0,0 @@ | ||||
| from unittest.mock import MagicMock, patch | ||||
| from urllib.parse import quote_plus | ||||
|  | ||||
| from django.urls import reverse | ||||
| from guardian.shortcuts import assign_perm | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.core.tests.utils import ( | ||||
|     create_test_brand, | ||||
|     create_test_cert, | ||||
|     create_test_flow, | ||||
|     create_test_user, | ||||
| ) | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.enterprise.stages.mtls.models import ( | ||||
|     CertAttributes, | ||||
|     MutualTLSStage, | ||||
|     TLSMode, | ||||
|     UserAttributes, | ||||
| ) | ||||
| from authentik.enterprise.stages.mtls.stage import PLAN_CONTEXT_CERTIFICATE | ||||
| from authentik.flows.models import FlowDesignation, FlowStageBinding | ||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER | ||||
| from authentik.flows.tests import FlowTestCase | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.lib.tests.utils import load_fixture | ||||
| from authentik.outposts.models import Outpost, OutpostType | ||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||
|  | ||||
|  | ||||
| class MTLSStageTests(FlowTestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         super().setUp() | ||||
|         self.flow = create_test_flow(FlowDesignation.AUTHENTICATION) | ||||
|         self.ca = CertificateKeyPair.objects.create( | ||||
|             name=generate_id(), | ||||
|             certificate_data=load_fixture("fixtures/ca.pem"), | ||||
|         ) | ||||
|         self.stage = MutualTLSStage.objects.create( | ||||
|             name=generate_id(), | ||||
|             mode=TLSMode.REQUIRED, | ||||
|             cert_attribute=CertAttributes.COMMON_NAME, | ||||
|             user_attribute=UserAttributes.USERNAME, | ||||
|         ) | ||||
|  | ||||
|         self.stage.certificate_authorities.add(self.ca) | ||||
|         self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=0) | ||||
|         self.client_cert = load_fixture("fixtures/cert_client.pem") | ||||
|         # User matching the certificate | ||||
|         User.objects.filter(username="client").delete() | ||||
|         self.cert_user = create_test_user(username="client") | ||||
|  | ||||
|     def test_parse_xfcc(self): | ||||
|         """Test authentik Proxy/Envoy's XFCC format""" | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Forwarded-Client-Cert": f"Cert={quote_plus(self.client_cert)}"}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_parse_nginx(self): | ||||
|         """Test nginx's format""" | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"SSL-Client-Cert": quote_plus(self.client_cert)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_parse_traefik(self): | ||||
|         """Test traefik's format""" | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_parse_outpost_object(self): | ||||
|         """Test outposts's format""" | ||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) | ||||
|         assign_perm("pass_outpost_certificate", outpost.user, self.stage) | ||||
|         with patch( | ||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", | ||||
|             MagicMock(return_value=outpost.user), | ||||
|         ): | ||||
|             with self.assertFlowFinishes() as plan: | ||||
|                 res = self.client.get( | ||||
|                     reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                     headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, | ||||
|                 ) | ||||
|                 self.assertEqual(res.status_code, 200) | ||||
|                 self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|             self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_parse_outpost_global(self): | ||||
|         """Test outposts's format""" | ||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) | ||||
|         assign_perm("authentik_stages_mtls.pass_outpost_certificate", outpost.user) | ||||
|         with patch( | ||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", | ||||
|             MagicMock(return_value=outpost.user), | ||||
|         ): | ||||
|             with self.assertFlowFinishes() as plan: | ||||
|                 res = self.client.get( | ||||
|                     reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                     headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, | ||||
|                 ) | ||||
|                 self.assertEqual(res.status_code, 200) | ||||
|                 self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|             self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_parse_outpost_no_perm(self): | ||||
|         """Test outposts's format""" | ||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) | ||||
|         with patch( | ||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", | ||||
|             MagicMock(return_value=outpost.user), | ||||
|         ): | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") | ||||
|  | ||||
|     def test_invalid_cert(self): | ||||
|         """Test invalid certificate""" | ||||
|         cert = create_test_cert() | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(cert.certificate_data)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") | ||||
|         self.assertNotIn(PLAN_CONTEXT_PENDING_USER, plan().context) | ||||
|  | ||||
|     def test_auth_no_user(self): | ||||
|         """Test auth with no user""" | ||||
|         User.objects.filter(username="client").delete() | ||||
|         res = self.client.get( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") | ||||
|  | ||||
|     def test_brand_ca(self): | ||||
|         """Test using a CA from the brand""" | ||||
|         self.stage.certificate_authorities.clear() | ||||
|  | ||||
|         brand = create_test_brand() | ||||
|         brand.client_certificates.add(self.ca) | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) | ||||
|  | ||||
|     def test_no_ca_optional(self): | ||||
|         """Test using no CA Set""" | ||||
|         self.stage.mode = TLSMode.OPTIONAL | ||||
|         self.stage.certificate_authorities.clear() | ||||
|         self.stage.save() | ||||
|         res = self.client.get( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|  | ||||
|     def test_no_ca_required(self): | ||||
|         """Test using no CA Set""" | ||||
|         self.stage.certificate_authorities.clear() | ||||
|         self.stage.save() | ||||
|         res = self.client.get( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") | ||||
|  | ||||
|     def test_no_cert_optional(self): | ||||
|         """Test using no cert Set""" | ||||
|         self.stage.mode = TLSMode.OPTIONAL | ||||
|         self.stage.save() | ||||
|         res = self.client.get( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|  | ||||
|     def test_enroll(self): | ||||
|         """Test Enrollment flow""" | ||||
|         self.flow.designation = FlowDesignation.ENROLLMENT | ||||
|         self.flow.save() | ||||
|         with self.assertFlowFinishes() as plan: | ||||
|             res = self.client.get( | ||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, | ||||
|             ) | ||||
|             self.assertEqual(res.status_code, 200) | ||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) | ||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PROMPT], {"email": None, "name": "client"}) | ||||
|         self.assertEqual( | ||||
|             plan().context[PLAN_CONTEXT_CERTIFICATE], | ||||
|             { | ||||
|                 "fingerprint_sha1": "52:39:ca:1e:3a:1f:78:3a:9f:26:3b:c2:84:99:48:68:99:99:81:8a", | ||||
|                 "fingerprint_sha256": ( | ||||
|                     "c1:07:8b:7c:e9:02:57:87:1e:92:e5:81:83:21:bc:92:c7:47:65:e3:97:fb:05:97:6f:36:9e:b5:31:77:98:b7" | ||||
|                 ), | ||||
|                 "issuer": "OU=Self-signed,O=authentik,CN=authentik Test CA", | ||||
|                 "serial_number": "70153443448884702681996102271549704759327537151", | ||||
|                 "subject": "CN=client", | ||||
|             }, | ||||
|         ) | ||||
| @ -1,5 +0,0 @@ | ||||
| """API URLs""" | ||||
|  | ||||
| from authentik.enterprise.stages.mtls.api import MutualTLSStageViewSet | ||||
|  | ||||
| api_urlpatterns = [("stages/mtls", MutualTLSStageViewSet)] | ||||
| @ -8,7 +8,6 @@ from django.test import TestCase | ||||
| from django.utils.timezone import now | ||||
| from rest_framework.exceptions import ValidationError | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.enterprise.models import ( | ||||
|     THRESHOLD_READ_ONLY_WEEKS, | ||||
| @ -72,9 +71,9 @@ class TestEnterpriseLicense(TestCase): | ||||
|     ) | ||||
|     def test_valid_multiple(self): | ||||
|         """Check license verification""" | ||||
|         lic = License.objects.create(key=generate_id(), expiry=expiry_valid) | ||||
|         lic = License.objects.create(key=generate_id()) | ||||
|         self.assertTrue(lic.status.status().is_valid) | ||||
|         lic2 = License.objects.create(key=generate_id(), expiry=expiry_valid) | ||||
|         lic2 = License.objects.create(key=generate_id()) | ||||
|         self.assertTrue(lic2.status.status().is_valid) | ||||
|         total = LicenseKey.get_total() | ||||
|         self.assertEqual(total.internal_users, 200) | ||||
| @ -233,9 +232,7 @@ class TestEnterpriseLicense(TestCase): | ||||
|     ) | ||||
|     def test_expiry_expired(self): | ||||
|         """Check license verification""" | ||||
|         User.objects.all().delete() | ||||
|         License.objects.all().delete() | ||||
|         License.objects.create(key=generate_id(), expiry=expiry_expired) | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.assertEqual(LicenseKey.get_total().summary().status, LicenseUsageStatus.EXPIRED) | ||||
|  | ||||
|     @patch( | ||||
|  | ||||
| @ -57,7 +57,7 @@ class LogEventSerializer(PassiveSerializer): | ||||
|  | ||||
|  | ||||
| @contextmanager | ||||
| def capture_logs(log_default_output=True) -> Generator[list[LogEvent]]: | ||||
| def capture_logs(log_default_output=True) -> Generator[list[LogEvent], None, None]: | ||||
|     """Capture log entries created""" | ||||
|     logs = [] | ||||
|     cap = LogCapture() | ||||
|  | ||||
| @ -1,18 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-27 12:52 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_flows", "0027_auto_20231028_1424"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="flowtoken", | ||||
|             name="revoke_on_execution", | ||||
|             field=models.BooleanField(default=True), | ||||
|         ), | ||||
|     ] | ||||
| @ -303,10 +303,9 @@ class FlowToken(Token): | ||||
|  | ||||
|     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) | ||||
|     _plan = models.TextField() | ||||
|     revoke_on_execution = models.BooleanField(default=True) | ||||
|  | ||||
|     @staticmethod | ||||
|     def pickle(plan: "FlowPlan") -> str: | ||||
|     def pickle(plan) -> str: | ||||
|         """Pickle into string""" | ||||
|         data = dumps(plan) | ||||
|         return b64encode(data).decode() | ||||
|  | ||||
| @ -99,10 +99,9 @@ class ChallengeStageView(StageView): | ||||
|             self.logger.debug("Got StageInvalidException", exc=exc) | ||||
|             return self.executor.stage_invalid() | ||||
|         if not challenge.is_valid(): | ||||
|             self.logger.error( | ||||
|             self.logger.warning( | ||||
|                 "f(ch): Invalid challenge", | ||||
|                 errors=challenge.errors, | ||||
|                 challenge=challenge.data, | ||||
|             ) | ||||
|         return HttpChallengeResponse(challenge) | ||||
|  | ||||
|  | ||||
| @ -15,7 +15,6 @@ | ||||
|         {% endblock %} | ||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/sfe/bootstrap.min.css' %}"> | ||||
|         <meta name="sentry-trace" content="{{ sentry_trace }}" /> | ||||
|         <link rel="prefetch" href="{{ flow_background_url }}" /> | ||||
|         {% include "base/header_js.html" %} | ||||
|         <style> | ||||
|           html, | ||||
| @ -23,7 +22,7 @@ | ||||
|             height: 100%; | ||||
|           } | ||||
|           body { | ||||
|             background-image: url("{{ flow_background_url }}"); | ||||
|             background-image: url("{{ flow.background_url }}"); | ||||
|             background-repeat: no-repeat; | ||||
|             background-size: cover; | ||||
|           } | ||||
|  | ||||
| @ -5,9 +5,9 @@ | ||||
|  | ||||
| {% block head_before %} | ||||
| {{ block.super }} | ||||
| <link rel="prefetch" href="{{ flow_background_url }}" /> | ||||
| <link rel="prefetch" href="{{ flow.background_url }}" /> | ||||
| {% if flow.compatibility_mode and not inspector %} | ||||
| <script>ShadyDOM = { force: true };</script> | ||||
| <script>ShadyDOM = { force: !navigator.webdriver };</script> | ||||
| {% endif %} | ||||
| {% include "base/header_js.html" %} | ||||
| <script> | ||||
| @ -21,7 +21,7 @@ window.authentik.flow = { | ||||
| <script src="{% versioned_script 'dist/flow/FlowInterface-%v.js' %}" type="module"></script> | ||||
| <style> | ||||
| :root { | ||||
|     --ak-flow-background: url("{{ flow_background_url }}"); | ||||
|     --ak-flow-background: url("{{ flow.background_url }}"); | ||||
| } | ||||
| </style> | ||||
| {% endblock %} | ||||
|  | ||||
| @ -1,10 +1,7 @@ | ||||
| """Test helpers""" | ||||
|  | ||||
| from collections.abc import Callable, Generator | ||||
| from contextlib import contextmanager | ||||
| from json import loads | ||||
| from typing import Any | ||||
| from unittest.mock import MagicMock, patch | ||||
|  | ||||
| from django.http.response import HttpResponse | ||||
| from django.urls.base import reverse | ||||
| @ -12,8 +9,6 @@ from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.flows.models import Flow | ||||
| from authentik.flows.planner import FlowPlan | ||||
| from authentik.flows.views.executor import SESSION_KEY_PLAN | ||||
|  | ||||
|  | ||||
| class FlowTestCase(APITestCase): | ||||
| @ -49,12 +44,3 @@ class FlowTestCase(APITestCase): | ||||
|     def assertStageRedirects(self, response: HttpResponse, to: str) -> dict[str, Any]: | ||||
|         """Wrapper around assertStageResponse that checks for a redirect""" | ||||
|         return self.assertStageResponse(response, component="xak-flow-redirect", to=to) | ||||
|  | ||||
|     @contextmanager | ||||
|     def assertFlowFinishes(self) -> Generator[Callable[[], FlowPlan]]: | ||||
|         """Capture the flow plan before the flow finishes and return it""" | ||||
|         try: | ||||
|             with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()): | ||||
|                 yield lambda: self.client.session.get(SESSION_KEY_PLAN) | ||||
|         finally: | ||||
|             pass | ||||
|  | ||||
| @ -146,8 +146,7 @@ class FlowExecutorView(APIView): | ||||
|         except (AttributeError, EOFError, ImportError, IndexError) as exc: | ||||
|             LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) | ||||
|         finally: | ||||
|             if token.revoke_on_execution: | ||||
|                 token.delete() | ||||
|             token.delete() | ||||
|         if not isinstance(plan, FlowPlan): | ||||
|             return None | ||||
|         plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||
|  | ||||
| @ -13,9 +13,7 @@ class FlowInterfaceView(InterfaceView): | ||||
|     """Flow interface""" | ||||
|  | ||||
|     def get_context_data(self, **kwargs: Any) -> dict[str, Any]: | ||||
|         flow = get_object_or_404(Flow, slug=self.kwargs.get("flow_slug")) | ||||
|         kwargs["flow"] = flow | ||||
|         kwargs["flow_background_url"] = flow.background_url(self.request) | ||||
|         kwargs["flow"] = get_object_or_404(Flow, slug=self.kwargs.get("flow_slug")) | ||||
|         kwargs["inspector"] = "inspector" in self.request.GET | ||||
|         return super().get_context_data(**kwargs) | ||||
|  | ||||
|  | ||||
| @ -363,9 +363,6 @@ def django_db_config(config: ConfigLoader | None = None) -> dict: | ||||
|         pool_options = config.get_dict_from_b64_json("postgresql.pool_options", True) | ||||
|         if not pool_options: | ||||
|             pool_options = True | ||||
|     # FIXME: Temporarily force pool to be deactivated. | ||||
|     # See https://github.com/goauthentik/authentik/issues/14320 | ||||
|     pool_options = False | ||||
|  | ||||
|     db = { | ||||
|         "default": { | ||||
|  | ||||
| @ -81,6 +81,7 @@ debugger: false | ||||
|  | ||||
| log_level: info | ||||
|  | ||||
| session_storage: cache | ||||
| sessions: | ||||
|   unauthenticated_age: days=1 | ||||
|  | ||||
|  | ||||
| @ -17,7 +17,7 @@ from ldap3.core.exceptions import LDAPException | ||||
| from redis.exceptions import ConnectionError as RedisConnectionError | ||||
| from redis.exceptions import RedisError, ResponseError | ||||
| from rest_framework.exceptions import APIException | ||||
| from sentry_sdk import HttpTransport, get_current_scope | ||||
| from sentry_sdk import HttpTransport | ||||
| from sentry_sdk import init as sentry_sdk_init | ||||
| from sentry_sdk.api import set_tag | ||||
| from sentry_sdk.integrations.argv import ArgvIntegration | ||||
| @ -27,7 +27,6 @@ from sentry_sdk.integrations.redis import RedisIntegration | ||||
| from sentry_sdk.integrations.socket import SocketIntegration | ||||
| from sentry_sdk.integrations.stdlib import StdlibIntegration | ||||
| from sentry_sdk.integrations.threading import ThreadingIntegration | ||||
| from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME | ||||
| from structlog.stdlib import get_logger | ||||
| from websockets.exceptions import WebSocketException | ||||
|  | ||||
| @ -96,8 +95,6 @@ def traces_sampler(sampling_context: dict) -> float: | ||||
|         return 0 | ||||
|     if _type == "websocket": | ||||
|         return 0 | ||||
|     if CONFIG.get_bool("debug"): | ||||
|         return 1 | ||||
|     return float(CONFIG.get("error_reporting.sample_rate", 0.1)) | ||||
|  | ||||
|  | ||||
| @ -170,14 +167,3 @@ def before_send(event: dict, hint: dict) -> dict | None: | ||||
|     if settings.DEBUG: | ||||
|         return None | ||||
|     return event | ||||
|  | ||||
|  | ||||
| def get_http_meta(): | ||||
|     """Get sentry-related meta key-values""" | ||||
|     scope = get_current_scope() | ||||
|     meta = { | ||||
|         SENTRY_TRACE_HEADER_NAME: scope.get_traceparent() or "", | ||||
|     } | ||||
|     if bag := scope.get_baggage(): | ||||
|         meta[BAGGAGE_HEADER_NAME] = bag.serialize() | ||||
|     return meta | ||||
|  | ||||
| @ -59,7 +59,7 @@ class PropertyMappingManager: | ||||
|         request: HttpRequest | None, | ||||
|         return_mapping: bool = False, | ||||
|         **kwargs, | ||||
|     ) -> Generator[tuple[dict, PropertyMapping]]: | ||||
|     ) -> Generator[tuple[dict, PropertyMapping], None]: | ||||
|         """Iterate over all mappings that were pre-compiled and | ||||
|         execute all of them with the given context""" | ||||
|         if not self.__has_compiled: | ||||
|  | ||||
| @ -23,6 +23,7 @@ if TYPE_CHECKING: | ||||
|  | ||||
|  | ||||
| class Direction(StrEnum): | ||||
|  | ||||
|     add = "add" | ||||
|     remove = "remove" | ||||
|  | ||||
| @ -36,16 +37,13 @@ SAFE_METHODS = [ | ||||
|  | ||||
|  | ||||
| class BaseOutgoingSyncClient[ | ||||
|     TModel: "Model", | ||||
|     TConnection: "Model", | ||||
|     TSchema: dict, | ||||
|     TProvider: "OutgoingSyncProvider", | ||||
|     TModel: "Model", TConnection: "Model", TSchema: dict, TProvider: "OutgoingSyncProvider" | ||||
| ]: | ||||
|     """Basic Outgoing sync client Client""" | ||||
|  | ||||
|     provider: TProvider | ||||
|     connection_type: type[TConnection] | ||||
|     connection_attr: str | ||||
|     connection_type_query: str | ||||
|     mapper: PropertyMappingManager | ||||
|  | ||||
|     can_discover = False | ||||
| @ -65,7 +63,9 @@ class BaseOutgoingSyncClient[ | ||||
|     def write(self, obj: TModel) -> tuple[TConnection, bool]: | ||||
|         """Write object to destination. Uses self.create and self.update, but | ||||
|         can be overwritten for further logic""" | ||||
|         connection = getattr(obj, self.connection_attr).filter(provider=self.provider).first() | ||||
|         connection = self.connection_type.objects.filter( | ||||
|             provider=self.provider, **{self.connection_type_query: obj} | ||||
|         ).first() | ||||
|         try: | ||||
|             if not connection: | ||||
|                 connection = self.create(obj) | ||||
|  | ||||
| @ -1,7 +1,6 @@ | ||||
| from collections.abc import Callable | ||||
| from dataclasses import asdict | ||||
|  | ||||
| from celery import group | ||||
| from celery.exceptions import Retry | ||||
| from celery.result import allow_join_result | ||||
| from django.core.paginator import Paginator | ||||
| @ -83,41 +82,21 @@ class SyncTasks: | ||||
|                 self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) | ||||
|                 return | ||||
|             try: | ||||
|                 messages.append(_("Syncing users")) | ||||
|                 user_results = ( | ||||
|                     group( | ||||
|                         [ | ||||
|                             sync_objects.signature( | ||||
|                                 args=(class_to_path(User), page, provider_pk), | ||||
|                                 time_limit=PAGE_TIMEOUT, | ||||
|                                 soft_time_limit=PAGE_TIMEOUT, | ||||
|                             ) | ||||
|                             for page in users_paginator.page_range | ||||
|                         ] | ||||
|                     ) | ||||
|                     .apply_async() | ||||
|                     .get() | ||||
|                 ) | ||||
|                 for result in user_results: | ||||
|                     for msg in result: | ||||
|                 for page in users_paginator.page_range: | ||||
|                     messages.append(_("Syncing page {page} of users".format(page=page))) | ||||
|                     for msg in sync_objects.apply_async( | ||||
|                         args=(class_to_path(User), page, provider_pk), | ||||
|                         time_limit=PAGE_TIMEOUT, | ||||
|                         soft_time_limit=PAGE_TIMEOUT, | ||||
|                     ).get(): | ||||
|                         messages.append(LogEvent(**msg)) | ||||
|                 messages.append(_("Syncing groups")) | ||||
|                 group_results = ( | ||||
|                     group( | ||||
|                         [ | ||||
|                             sync_objects.signature( | ||||
|                                 args=(class_to_path(Group), page, provider_pk), | ||||
|                                 time_limit=PAGE_TIMEOUT, | ||||
|                                 soft_time_limit=PAGE_TIMEOUT, | ||||
|                             ) | ||||
|                             for page in groups_paginator.page_range | ||||
|                         ] | ||||
|                     ) | ||||
|                     .apply_async() | ||||
|                     .get() | ||||
|                 ) | ||||
|                 for result in group_results: | ||||
|                     for msg in result: | ||||
|                 for page in groups_paginator.page_range: | ||||
|                     messages.append(_("Syncing page {page} of groups".format(page=page))) | ||||
|                     for msg in sync_objects.apply_async( | ||||
|                         args=(class_to_path(Group), page, provider_pk), | ||||
|                         time_limit=PAGE_TIMEOUT, | ||||
|                         soft_time_limit=PAGE_TIMEOUT, | ||||
|                     ).get(): | ||||
|                         messages.append(LogEvent(**msg)) | ||||
|             except TransientSyncException as exc: | ||||
|                 self.logger.warning("transient sync exception", exc=exc) | ||||
| @ -130,7 +109,7 @@ class SyncTasks: | ||||
|     def sync_objects( | ||||
|         self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter | ||||
|     ): | ||||
|         _object_type: type[Model] = path_to_class(object_type) | ||||
|         _object_type = path_to_class(object_type) | ||||
|         self.logger = get_logger().bind( | ||||
|             provider_type=class_to_path(self._provider_model), | ||||
|             provider_pk=provider_pk, | ||||
| @ -153,19 +132,6 @@ class SyncTasks: | ||||
|             self.logger.debug("starting discover") | ||||
|             client.discover() | ||||
|         self.logger.debug("starting sync for page", page=page) | ||||
|         messages.append( | ||||
|             asdict( | ||||
|                 LogEvent( | ||||
|                     _( | ||||
|                         "Syncing page {page} of {object_type}".format( | ||||
|                             page=page, object_type=_object_type._meta.verbose_name_plural | ||||
|                         ) | ||||
|                     ), | ||||
|                     log_level="info", | ||||
|                     logger=f"{provider._meta.verbose_name}@{object_type}", | ||||
|                 ) | ||||
|             ) | ||||
|         ) | ||||
|         for obj in paginator.page(page).object_list: | ||||
|             obj: Model | ||||
|             try: | ||||
|  | ||||
| @ -494,88 +494,86 @@ class TestConfig(TestCase): | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     # FIXME: Temporarily force pool to be deactivated. | ||||
|     # See https://github.com/goauthentik/authentik/issues/14320 | ||||
|     # def test_db_pool(self): | ||||
|     #     """Test DB Config with pool""" | ||||
|     #     config = ConfigLoader() | ||||
|     #     config.set("postgresql.host", "foo") | ||||
|     #     config.set("postgresql.name", "foo") | ||||
|     #     config.set("postgresql.user", "foo") | ||||
|     #     config.set("postgresql.password", "foo") | ||||
|     #     config.set("postgresql.port", "foo") | ||||
|     #     config.set("postgresql.test.name", "foo") | ||||
|     #     config.set("postgresql.use_pool", True) | ||||
|     #     conf = django_db_config(config) | ||||
|     #     self.assertEqual( | ||||
|     #         conf, | ||||
|     #         { | ||||
|     #             "default": { | ||||
|     #                 "ENGINE": "authentik.root.db", | ||||
|     #                 "HOST": "foo", | ||||
|     #                 "NAME": "foo", | ||||
|     #                 "OPTIONS": { | ||||
|     #                     "pool": True, | ||||
|     #                     "sslcert": None, | ||||
|     #                     "sslkey": None, | ||||
|     #                     "sslmode": None, | ||||
|     #                     "sslrootcert": None, | ||||
|     #                 }, | ||||
|     #                 "PASSWORD": "foo", | ||||
|     #                 "PORT": "foo", | ||||
|     #                 "TEST": {"NAME": "foo"}, | ||||
|     #                 "USER": "foo", | ||||
|     #                 "CONN_MAX_AGE": 0, | ||||
|     #                 "CONN_HEALTH_CHECKS": False, | ||||
|     #                 "DISABLE_SERVER_SIDE_CURSORS": False, | ||||
|     #             } | ||||
|     #         }, | ||||
|     #     ) | ||||
|     def test_db_pool(self): | ||||
|         """Test DB Config with pool""" | ||||
|         config = ConfigLoader() | ||||
|         config.set("postgresql.host", "foo") | ||||
|         config.set("postgresql.name", "foo") | ||||
|         config.set("postgresql.user", "foo") | ||||
|         config.set("postgresql.password", "foo") | ||||
|         config.set("postgresql.port", "foo") | ||||
|         config.set("postgresql.test.name", "foo") | ||||
|         config.set("postgresql.use_pool", True) | ||||
|         conf = django_db_config(config) | ||||
|         self.assertEqual( | ||||
|             conf, | ||||
|             { | ||||
|                 "default": { | ||||
|                     "ENGINE": "authentik.root.db", | ||||
|                     "HOST": "foo", | ||||
|                     "NAME": "foo", | ||||
|                     "OPTIONS": { | ||||
|                         "pool": True, | ||||
|                         "sslcert": None, | ||||
|                         "sslkey": None, | ||||
|                         "sslmode": None, | ||||
|                         "sslrootcert": None, | ||||
|                     }, | ||||
|                     "PASSWORD": "foo", | ||||
|                     "PORT": "foo", | ||||
|                     "TEST": {"NAME": "foo"}, | ||||
|                     "USER": "foo", | ||||
|                     "CONN_MAX_AGE": 0, | ||||
|                     "CONN_HEALTH_CHECKS": False, | ||||
|                     "DISABLE_SERVER_SIDE_CURSORS": False, | ||||
|                 } | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     # def test_db_pool_options(self): | ||||
|     #     """Test DB Config with pool""" | ||||
|     #     config = ConfigLoader() | ||||
|     #     config.set("postgresql.host", "foo") | ||||
|     #     config.set("postgresql.name", "foo") | ||||
|     #     config.set("postgresql.user", "foo") | ||||
|     #     config.set("postgresql.password", "foo") | ||||
|     #     config.set("postgresql.port", "foo") | ||||
|     #     config.set("postgresql.test.name", "foo") | ||||
|     #     config.set("postgresql.use_pool", True) | ||||
|     #     config.set( | ||||
|     #         "postgresql.pool_options", | ||||
|     #         base64.b64encode( | ||||
|     #             dumps( | ||||
|     #                 { | ||||
|     #                     "max_size": 15, | ||||
|     #                 } | ||||
|     #             ).encode() | ||||
|     #         ).decode(), | ||||
|     #     ) | ||||
|     #     conf = django_db_config(config) | ||||
|     #     self.assertEqual( | ||||
|     #         conf, | ||||
|     #         { | ||||
|     #             "default": { | ||||
|     #                 "ENGINE": "authentik.root.db", | ||||
|     #                 "HOST": "foo", | ||||
|     #                 "NAME": "foo", | ||||
|     #                 "OPTIONS": { | ||||
|     #                     "pool": { | ||||
|     #                         "max_size": 15, | ||||
|     #                     }, | ||||
|     #                     "sslcert": None, | ||||
|     #                     "sslkey": None, | ||||
|     #                     "sslmode": None, | ||||
|     #                     "sslrootcert": None, | ||||
|     #                 }, | ||||
|     #                 "PASSWORD": "foo", | ||||
|     #                 "PORT": "foo", | ||||
|     #                 "TEST": {"NAME": "foo"}, | ||||
|     #                 "USER": "foo", | ||||
|     #                 "CONN_MAX_AGE": 0, | ||||
|     #                 "CONN_HEALTH_CHECKS": False, | ||||
|     #                 "DISABLE_SERVER_SIDE_CURSORS": False, | ||||
|     #             } | ||||
|     #         }, | ||||
|     #     ) | ||||
|     def test_db_pool_options(self): | ||||
|         """Test DB Config with pool""" | ||||
|         config = ConfigLoader() | ||||
|         config.set("postgresql.host", "foo") | ||||
|         config.set("postgresql.name", "foo") | ||||
|         config.set("postgresql.user", "foo") | ||||
|         config.set("postgresql.password", "foo") | ||||
|         config.set("postgresql.port", "foo") | ||||
|         config.set("postgresql.test.name", "foo") | ||||
|         config.set("postgresql.use_pool", True) | ||||
|         config.set( | ||||
|             "postgresql.pool_options", | ||||
|             base64.b64encode( | ||||
|                 dumps( | ||||
|                     { | ||||
|                         "max_size": 15, | ||||
|                     } | ||||
|                 ).encode() | ||||
|             ).decode(), | ||||
|         ) | ||||
|         conf = django_db_config(config) | ||||
|         self.assertEqual( | ||||
|             conf, | ||||
|             { | ||||
|                 "default": { | ||||
|                     "ENGINE": "authentik.root.db", | ||||
|                     "HOST": "foo", | ||||
|                     "NAME": "foo", | ||||
|                     "OPTIONS": { | ||||
|                         "pool": { | ||||
|                             "max_size": 15, | ||||
|                         }, | ||||
|                         "sslcert": None, | ||||
|                         "sslkey": None, | ||||
|                         "sslmode": None, | ||||
|                         "sslrootcert": None, | ||||
|                     }, | ||||
|                     "PASSWORD": "foo", | ||||
|                     "PORT": "foo", | ||||
|                     "TEST": {"NAME": "foo"}, | ||||
|                     "USER": "foo", | ||||
|                     "CONN_MAX_AGE": 0, | ||||
|                     "CONN_HEALTH_CHECKS": False, | ||||
|                     "DISABLE_SERVER_SIDE_CURSORS": False, | ||||
|                 } | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
| @ -74,8 +74,6 @@ class OutpostConfig: | ||||
|     kubernetes_ingress_annotations: dict[str, str] = field(default_factory=dict) | ||||
|     kubernetes_ingress_secret_name: str = field(default="authentik-outpost-tls") | ||||
|     kubernetes_ingress_class_name: str | None = field(default=None) | ||||
|     kubernetes_httproute_annotations: dict[str, str] = field(default_factory=dict) | ||||
|     kubernetes_httproute_parent_refs: list[dict[str, str]] = field(default_factory=list) | ||||
|     kubernetes_service_type: str = field(default="ClusterIP") | ||||
|     kubernetes_disabled_components: list[str] = field(default_factory=list) | ||||
|     kubernetes_image_pull_secrets: list[str] = field(default_factory=list) | ||||
|  | ||||
| @ -1,11 +1,9 @@ | ||||
| """Websocket tests""" | ||||
|  | ||||
| from dataclasses import asdict | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from channels.routing import URLRouter | ||||
| from channels.testing import WebsocketCommunicator | ||||
| from django.contrib.contenttypes.models import ContentType | ||||
| from django.test import TransactionTestCase | ||||
|  | ||||
| from authentik import __version__ | ||||
| @ -16,12 +14,6 @@ from authentik.providers.proxy.models import ProxyProvider | ||||
| from authentik.root import websocket | ||||
|  | ||||
|  | ||||
| def patched__get_ct_cached(app_label, codename): | ||||
|     """Caches `ContentType` instances like its `QuerySet` does.""" | ||||
|     return ContentType.objects.get(app_label=app_label, permission__codename=codename) | ||||
|  | ||||
|  | ||||
| @patch("guardian.shortcuts._get_ct_cached", patched__get_ct_cached) | ||||
| class TestOutpostWS(TransactionTestCase): | ||||
|     """Websocket tests""" | ||||
|  | ||||
| @ -46,7 +38,6 @@ class TestOutpostWS(TransactionTestCase): | ||||
|         ) | ||||
|         connected, _ = await communicator.connect() | ||||
|         self.assertFalse(connected) | ||||
|         await communicator.disconnect() | ||||
|  | ||||
|     async def test_auth_valid(self): | ||||
|         """Test auth with token""" | ||||
| @ -57,7 +48,6 @@ class TestOutpostWS(TransactionTestCase): | ||||
|         ) | ||||
|         connected, _ = await communicator.connect() | ||||
|         self.assertTrue(connected) | ||||
|         await communicator.disconnect() | ||||
|  | ||||
|     async def test_send(self): | ||||
|         """Test sending of Hello""" | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """Authentik policy dummy app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPolicyDummyConfig(ManagedAppConfig): | ||||
| class AuthentikPolicyDummyConfig(AppConfig): | ||||
|     """Authentik policy_dummy app config""" | ||||
|  | ||||
|     name = "authentik.policies.dummy" | ||||
|     label = "authentik_policies_dummy" | ||||
|     verbose_name = "authentik Policies.Dummy" | ||||
|     default = True | ||||
|  | ||||
| @ -1,11 +1,11 @@ | ||||
| """authentik policy engine""" | ||||
|  | ||||
| from collections.abc import Iterable | ||||
| from collections.abc import Iterator | ||||
| from multiprocessing import Pipe, current_process | ||||
| from multiprocessing.connection import Connection | ||||
| from time import perf_counter | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.db.models import Count, Q, QuerySet | ||||
| from django.http import HttpRequest | ||||
| from sentry_sdk import start_span | ||||
| from sentry_sdk.tracing import Span | ||||
| @ -67,11 +67,14 @@ class PolicyEngine: | ||||
|         self.__processes: list[PolicyProcessInfo] = [] | ||||
|         self.use_cache = True | ||||
|         self.__expected_result_count = 0 | ||||
|         self.__static_result: PolicyResult | None = None | ||||
|  | ||||
|     def bindings(self) -> QuerySet[PolicyBinding] | Iterable[PolicyBinding]: | ||||
|     def iterate_bindings(self) -> Iterator[PolicyBinding]: | ||||
|         """Make sure all Policies are their respective classes""" | ||||
|         return PolicyBinding.objects.filter(target=self.__pbm, enabled=True).order_by("order") | ||||
|         return ( | ||||
|             PolicyBinding.objects.filter(target=self.__pbm, enabled=True) | ||||
|             .order_by("order") | ||||
|             .iterator() | ||||
|         ) | ||||
|  | ||||
|     def _check_policy_type(self, binding: PolicyBinding): | ||||
|         """Check policy type, make sure it's not the root class as that has no logic implemented""" | ||||
| @ -81,66 +84,30 @@ class PolicyEngine: | ||||
|     def _check_cache(self, binding: PolicyBinding): | ||||
|         if not self.use_cache: | ||||
|             return False | ||||
|         # It's a bit silly to time this, but | ||||
|         with HIST_POLICIES_EXECUTION_TIME.labels( | ||||
|             binding_order=binding.order, | ||||
|             binding_target_type=binding.target_type, | ||||
|             binding_target_name=binding.target_name, | ||||
|             object_pk=str(self.request.obj.pk), | ||||
|             object_type=class_to_path(self.request.obj.__class__), | ||||
|             mode="cache_retrieve", | ||||
|         ).time(): | ||||
|             key = cache_key(binding, self.request) | ||||
|             cached_policy = cache.get(key, None) | ||||
|             if not cached_policy: | ||||
|                 return False | ||||
|         before = perf_counter() | ||||
|         key = cache_key(binding, self.request) | ||||
|         cached_policy = cache.get(key, None) | ||||
|         duration = max(perf_counter() - before, 0) | ||||
|         if not cached_policy: | ||||
|             return False | ||||
|         self.logger.debug( | ||||
|             "P_ENG: Taking result from cache", | ||||
|             binding=binding, | ||||
|             cache_key=key, | ||||
|             request=self.request, | ||||
|         ) | ||||
|         HIST_POLICIES_EXECUTION_TIME.labels( | ||||
|             binding_order=binding.order, | ||||
|             binding_target_type=binding.target_type, | ||||
|             binding_target_name=binding.target_name, | ||||
|             object_pk=str(self.request.obj.pk), | ||||
|             object_type=class_to_path(self.request.obj.__class__), | ||||
|             mode="cache_retrieve", | ||||
|         ).observe(duration) | ||||
|         # It's a bit silly to time this, but | ||||
|         self.__cached_policies.append(cached_policy) | ||||
|         return True | ||||
|  | ||||
|     def compute_static_bindings(self, bindings: QuerySet[PolicyBinding]): | ||||
|         """Check static bindings if possible""" | ||||
|         aggrs = { | ||||
|             "total": Count( | ||||
|                 "pk", filter=Q(Q(group__isnull=False) | Q(user__isnull=False), policy=None) | ||||
|             ), | ||||
|         } | ||||
|         if self.request.user.pk: | ||||
|             all_groups = self.request.user.all_groups() | ||||
|             aggrs["passing"] = Count( | ||||
|                 "pk", | ||||
|                 filter=Q( | ||||
|                     Q( | ||||
|                         Q(user=self.request.user) | Q(group__in=all_groups), | ||||
|                         negate=False, | ||||
|                     ) | ||||
|                     | Q( | ||||
|                         Q(~Q(user=self.request.user), user__isnull=False) | ||||
|                         | Q(~Q(group__in=all_groups), group__isnull=False), | ||||
|                         negate=True, | ||||
|                     ), | ||||
|                     enabled=True, | ||||
|                 ), | ||||
|             ) | ||||
|         matched_bindings = bindings.aggregate(**aggrs) | ||||
|         passing = False | ||||
|         if matched_bindings["total"] == 0 and matched_bindings.get("passing", 0) == 0: | ||||
|             # If we didn't find any static bindings, do nothing | ||||
|             return | ||||
|         self.logger.debug("P_ENG: Found static bindings", **matched_bindings) | ||||
|         if matched_bindings.get("passing", 0) > 0: | ||||
|             # Any passing static binding -> passing | ||||
|             passing = True | ||||
|         elif matched_bindings["total"] > 0 and matched_bindings.get("passing", 0) < 1: | ||||
|             # No matching static bindings but at least one is configured -> not passing | ||||
|             passing = False | ||||
|         self.__static_result = PolicyResult(passing) | ||||
|  | ||||
|     def build(self) -> "PolicyEngine": | ||||
|         """Build wrapper which monitors performance""" | ||||
|         with ( | ||||
| @ -156,12 +123,7 @@ class PolicyEngine: | ||||
|             span: Span | ||||
|             span.set_data("pbm", self.__pbm) | ||||
|             span.set_data("request", self.request) | ||||
|             bindings = self.bindings() | ||||
|             policy_bindings = bindings | ||||
|             if isinstance(bindings, QuerySet): | ||||
|                 self.compute_static_bindings(bindings) | ||||
|                 policy_bindings = [x for x in bindings if x.policy] | ||||
|             for binding in policy_bindings: | ||||
|             for binding in self.iterate_bindings(): | ||||
|                 self.__expected_result_count += 1 | ||||
|  | ||||
|                 self._check_policy_type(binding) | ||||
| @ -191,13 +153,10 @@ class PolicyEngine: | ||||
|     @property | ||||
|     def result(self) -> PolicyResult: | ||||
|         """Get policy-checking result""" | ||||
|         self.__processes.sort(key=lambda x: x.binding.order) | ||||
|         process_results: list[PolicyResult] = [x.result for x in self.__processes if x.result] | ||||
|         all_results = list(process_results + self.__cached_policies) | ||||
|         if len(all_results) < self.__expected_result_count:  # pragma: no cover | ||||
|             raise AssertionError("Got less results than polices") | ||||
|         if self.__static_result: | ||||
|             all_results.append(self.__static_result) | ||||
|         # No results, no policies attached -> passing | ||||
|         if len(all_results) == 0: | ||||
|             return PolicyResult(self.empty_result) | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik Event Matcher policy app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPoliciesEventMatcherConfig(ManagedAppConfig): | ||||
| class AuthentikPoliciesEventMatcherConfig(AppConfig): | ||||
|     """authentik Event Matcher policy app config""" | ||||
|  | ||||
|     name = "authentik.policies.event_matcher" | ||||
|     label = "authentik_policies_event_matcher" | ||||
|     verbose_name = "authentik Policies.Event Matcher" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """Authentik policy_expiry app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPolicyExpiryConfig(ManagedAppConfig): | ||||
| class AuthentikPolicyExpiryConfig(AppConfig): | ||||
|     """Authentik policy_expiry app config""" | ||||
|  | ||||
|     name = "authentik.policies.expiry" | ||||
|     label = "authentik_policies_expiry" | ||||
|     verbose_name = "authentik Policies.Expiry" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """Authentik policy_expression app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPolicyExpressionConfig(ManagedAppConfig): | ||||
| class AuthentikPolicyExpressionConfig(AppConfig): | ||||
|     """Authentik policy_expression app config""" | ||||
|  | ||||
|     name = "authentik.policies.expression" | ||||
|     label = "authentik_policies_expression" | ||||
|     verbose_name = "authentik Policies.Expression" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """Authentik policy geoip app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPolicyGeoIPConfig(ManagedAppConfig): | ||||
| class AuthentikPolicyGeoIPConfig(AppConfig): | ||||
|     """Authentik policy_geoip app config""" | ||||
|  | ||||
|     name = "authentik.policies.geoip" | ||||
|     label = "authentik_policies_geoip" | ||||
|     verbose_name = "authentik Policies.GeoIP" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik Password policy app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikPoliciesPasswordConfig(ManagedAppConfig): | ||||
| class AuthentikPoliciesPasswordConfig(AppConfig): | ||||
|     """authentik Password policy app config""" | ||||
|  | ||||
|     name = "authentik.policies.password" | ||||
|     label = "authentik_policies_password" | ||||
|     verbose_name = "authentik Policies.Password" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,9 @@ | ||||
| """policy engine tests""" | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.db import connections | ||||
| from django.test import TestCase | ||||
| from django.test.utils import CaptureQueriesContext | ||||
|  | ||||
| from authentik.core.models import Group | ||||
| from authentik.core.tests.utils import create_test_user | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.policies.dummy.models import DummyPolicy | ||||
| from authentik.policies.engine import PolicyEngine | ||||
| @ -22,7 +19,7 @@ class TestPolicyEngine(TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         clear_policy_cache() | ||||
|         self.user = create_test_user() | ||||
|         self.user = create_test_admin_user() | ||||
|         self.policy_false = DummyPolicy.objects.create( | ||||
|             name=generate_id(), result=False, wait_min=0, wait_max=1 | ||||
|         ) | ||||
| @ -130,43 +127,3 @@ class TestPolicyEngine(TestCase): | ||||
|         self.assertEqual(len(cache.keys(f"{CACHE_PREFIX}{binding.policy_binding_uuid.hex}*")), 1) | ||||
|         self.assertEqual(engine.build().passing, False) | ||||
|         self.assertEqual(len(cache.keys(f"{CACHE_PREFIX}{binding.policy_binding_uuid.hex}*")), 1) | ||||
|  | ||||
|     def test_engine_static_bindings(self): | ||||
|         """Test static bindings""" | ||||
|         group_a = Group.objects.create(name=generate_id()) | ||||
|         group_b = Group.objects.create(name=generate_id()) | ||||
|         group_b.users.add(self.user) | ||||
|         user = create_test_user() | ||||
|  | ||||
|         for case in [ | ||||
|             { | ||||
|                 "message": "Group, not member", | ||||
|                 "binding_args": {"group": group_a}, | ||||
|                 "passing": False, | ||||
|             }, | ||||
|             { | ||||
|                 "message": "Group, member", | ||||
|                 "binding_args": {"group": group_b}, | ||||
|                 "passing": True, | ||||
|             }, | ||||
|             { | ||||
|                 "message": "User, other", | ||||
|                 "binding_args": {"user": user}, | ||||
|                 "passing": False, | ||||
|             }, | ||||
|             { | ||||
|                 "message": "User, same", | ||||
|                 "binding_args": {"user": self.user}, | ||||
|                 "passing": True, | ||||
|             }, | ||||
|         ]: | ||||
|             with self.subTest(): | ||||
|                 pbm = PolicyBindingModel.objects.create() | ||||
|                 for x in range(1000): | ||||
|                     PolicyBinding.objects.create(target=pbm, order=x, **case["binding_args"]) | ||||
|                 engine = PolicyEngine(pbm, self.user) | ||||
|                 engine.use_cache = False | ||||
|                 with CaptureQueriesContext(connections["default"]) as ctx: | ||||
|                     engine.build() | ||||
|                 self.assertLess(ctx.final_queries, 1000) | ||||
|                 self.assertEqual(engine.result.passing, case["passing"]) | ||||
|  | ||||
| @ -29,12 +29,13 @@ class TestPolicyProcess(TestCase): | ||||
|     def setUp(self): | ||||
|         clear_policy_cache() | ||||
|         self.factory = RequestFactory() | ||||
|         self.user = User.objects.create_user(username=generate_id()) | ||||
|         self.user = User.objects.create_user(username="policyuser") | ||||
|  | ||||
|     def test_group_passing(self): | ||||
|         """Test binding to group""" | ||||
|         group = Group.objects.create(name=generate_id()) | ||||
|         group = Group.objects.create(name="test-group") | ||||
|         group.users.add(self.user) | ||||
|         group.save() | ||||
|         binding = PolicyBinding(group=group) | ||||
|  | ||||
|         request = PolicyRequest(self.user) | ||||
| @ -43,7 +44,8 @@ class TestPolicyProcess(TestCase): | ||||
|  | ||||
|     def test_group_negative(self): | ||||
|         """Test binding to group""" | ||||
|         group = Group.objects.create(name=generate_id()) | ||||
|         group = Group.objects.create(name="test-group") | ||||
|         group.save() | ||||
|         binding = PolicyBinding(group=group) | ||||
|  | ||||
|         request = PolicyRequest(self.user) | ||||
| @ -113,10 +115,8 @@ class TestPolicyProcess(TestCase): | ||||
|  | ||||
|     def test_exception(self): | ||||
|         """Test policy execution""" | ||||
|         policy = Policy.objects.create(name=generate_id()) | ||||
|         binding = PolicyBinding( | ||||
|             policy=policy, target=Application.objects.create(name=generate_id()) | ||||
|         ) | ||||
|         policy = Policy.objects.create(name="test-execution") | ||||
|         binding = PolicyBinding(policy=policy, target=Application.objects.create(name="test")) | ||||
|  | ||||
|         request = PolicyRequest(self.user) | ||||
|         response = PolicyProcess(binding, request, None).execute() | ||||
| @ -125,15 +125,13 @@ class TestPolicyProcess(TestCase): | ||||
|     def test_execution_logging(self): | ||||
|         """Test policy execution creates event""" | ||||
|         policy = DummyPolicy.objects.create( | ||||
|             name=generate_id(), | ||||
|             name="test-execution-logging", | ||||
|             result=False, | ||||
|             wait_min=0, | ||||
|             wait_max=1, | ||||
|             execution_logging=True, | ||||
|         ) | ||||
|         binding = PolicyBinding( | ||||
|             policy=policy, target=Application.objects.create(name=generate_id()) | ||||
|         ) | ||||
|         binding = PolicyBinding(policy=policy, target=Application.objects.create(name="test")) | ||||
|  | ||||
|         http_request = self.factory.get(reverse("authentik_api:user-impersonate-end")) | ||||
|         http_request.user = self.user | ||||
| @ -188,15 +186,13 @@ class TestPolicyProcess(TestCase): | ||||
|     def test_execution_logging_anonymous(self): | ||||
|         """Test policy execution creates event with anonymous user""" | ||||
|         policy = DummyPolicy.objects.create( | ||||
|             name=generate_id(), | ||||
|             name="test-execution-logging-anon", | ||||
|             result=False, | ||||
|             wait_min=0, | ||||
|             wait_max=1, | ||||
|             execution_logging=True, | ||||
|         ) | ||||
|         binding = PolicyBinding( | ||||
|             policy=policy, target=Application.objects.create(name=generate_id()) | ||||
|         ) | ||||
|         binding = PolicyBinding(policy=policy, target=Application.objects.create(name="test")) | ||||
|  | ||||
|         user = AnonymousUser() | ||||
|  | ||||
| @ -223,9 +219,9 @@ class TestPolicyProcess(TestCase): | ||||
|  | ||||
|     def test_raises(self): | ||||
|         """Test policy that raises error""" | ||||
|         policy_raises = ExpressionPolicy.objects.create(name=generate_id(), expression="{{ 0/0 }}") | ||||
|         policy_raises = ExpressionPolicy.objects.create(name="raises", expression="{{ 0/0 }}") | ||||
|         binding = PolicyBinding( | ||||
|             policy=policy_raises, target=Application.objects.create(name=generate_id()) | ||||
|             policy=policy_raises, target=Application.objects.create(name="test") | ||||
|         ) | ||||
|  | ||||
|         request = PolicyRequest(self.user) | ||||
|  | ||||
| @ -1,12 +1,11 @@ | ||||
| """authentik ldap provider app config""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikProviderLDAPConfig(ManagedAppConfig): | ||||
| class AuthentikProviderLDAPConfig(AppConfig): | ||||
|     """authentik ldap provider app config""" | ||||
|  | ||||
|     name = "authentik.providers.ldap" | ||||
|     label = "authentik_providers_ldap" | ||||
|     verbose_name = "authentik Providers.LDAP" | ||||
|     default = True | ||||
|  | ||||
| @ -7,8 +7,10 @@ from django.db import migrations | ||||
|  | ||||
|  | ||||
| def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     from authentik.core.models import User | ||||
|     from django.apps import apps as real_apps | ||||
|     from django.contrib.auth.management import create_permissions | ||||
|     from guardian.shortcuts import UserObjectPermission | ||||
|  | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
|  | ||||
| @ -50,4 +50,3 @@ AMR_PASSWORD = "pwd"  # nosec | ||||
| AMR_MFA = "mfa" | ||||
| AMR_OTP = "otp" | ||||
| AMR_WEBAUTHN = "user" | ||||
| AMR_SMART_CARD = "sc" | ||||
|  | ||||
| @ -16,7 +16,6 @@ from authentik.providers.oauth2.constants import ( | ||||
|     ACR_AUTHENTIK_DEFAULT, | ||||
|     AMR_MFA, | ||||
|     AMR_PASSWORD, | ||||
|     AMR_SMART_CARD, | ||||
|     AMR_WEBAUTHN, | ||||
| ) | ||||
| from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS | ||||
| @ -140,10 +139,9 @@ class IDToken: | ||||
|                 amr.append(AMR_PASSWORD) | ||||
|             if method == "auth_webauthn_pwl": | ||||
|                 amr.append(AMR_WEBAUTHN) | ||||
|             if "certificate" in method_args: | ||||
|                 amr.append(AMR_SMART_CARD) | ||||
|             if "mfa_devices" in method_args: | ||||
|                 amr.append(AMR_MFA) | ||||
|                 if len(amr) > 0: | ||||
|                     amr.append(AMR_MFA) | ||||
|             if amr: | ||||
|                 id_token.amr = amr | ||||
|  | ||||
|  | ||||
| @ -10,11 +10,3 @@ class AuthentikProviderProxyConfig(ManagedAppConfig): | ||||
|     label = "authentik_providers_proxy" | ||||
|     verbose_name = "authentik Providers.Proxy" | ||||
|     default = True | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_tenant | ||||
|     def proxy_set_defaults(self): | ||||
|         from authentik.providers.proxy.models import ProxyProvider | ||||
|  | ||||
|         for provider in ProxyProvider.objects.all(): | ||||
|             provider.set_oauth_defaults() | ||||
|             provider.save() | ||||
|  | ||||
| @ -1,234 +0,0 @@ | ||||
| from dataclasses import asdict, dataclass, field | ||||
| from typing import TYPE_CHECKING | ||||
| from urllib.parse import urlparse | ||||
|  | ||||
| from dacite.core import from_dict | ||||
| from kubernetes.client import ApiextensionsV1Api, CustomObjectsApi, V1ObjectMeta | ||||
|  | ||||
| from authentik.outposts.controllers.base import FIELD_MANAGER | ||||
| from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler | ||||
| from authentik.outposts.controllers.k8s.triggers import NeedsUpdate | ||||
| from authentik.outposts.controllers.kubernetes import KubernetesController | ||||
| from authentik.providers.proxy.models import ProxyMode, ProxyProvider | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from authentik.outposts.controllers.kubernetes import KubernetesController | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class RouteBackendRef: | ||||
|     name: str | ||||
|     port: int | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class RouteSpecParentRefs: | ||||
|     name: str | ||||
|     sectionName: str | None = None | ||||
|     port: int | None = None | ||||
|     namespace: str | None = None | ||||
|     kind: str = "Gateway" | ||||
|     group: str = "gateway.networking.k8s.io" | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class HTTPRouteSpecRuleMatchPath: | ||||
|     type: str | ||||
|     value: str | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class HTTPRouteSpecRuleMatchHeader: | ||||
|     name: str | ||||
|     value: str | ||||
|     type: str = "Exact" | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class HTTPRouteSpecRuleMatch: | ||||
|     path: HTTPRouteSpecRuleMatchPath | ||||
|     headers: list[HTTPRouteSpecRuleMatchHeader] | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class HTTPRouteSpecRule: | ||||
|     backendRefs: list[RouteBackendRef] | ||||
|     matches: list[HTTPRouteSpecRuleMatch] | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class HTTPRouteSpec: | ||||
|     parentRefs: list[RouteSpecParentRefs] | ||||
|     hostnames: list[str] | ||||
|     rules: list[HTTPRouteSpecRule] | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class HTTPRouteMetadata: | ||||
|     name: str | ||||
|     namespace: str | ||||
|     annotations: dict = field(default_factory=dict) | ||||
|     labels: dict = field(default_factory=dict) | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class HTTPRoute: | ||||
|     apiVersion: str | ||||
|     kind: str | ||||
|     metadata: HTTPRouteMetadata | ||||
|     spec: HTTPRouteSpec | ||||
|  | ||||
|  | ||||
| class HTTPRouteReconciler(KubernetesObjectReconciler): | ||||
|     """Kubernetes Gateway API HTTPRoute Reconciler""" | ||||
|  | ||||
|     def __init__(self, controller: "KubernetesController") -> None: | ||||
|         super().__init__(controller) | ||||
|         self.api_ex = ApiextensionsV1Api(controller.client) | ||||
|         self.api = CustomObjectsApi(controller.client) | ||||
|         self.crd_group = "gateway.networking.k8s.io" | ||||
|         self.crd_version = "v1" | ||||
|         self.crd_plural = "httproutes" | ||||
|  | ||||
|     @staticmethod | ||||
|     def reconciler_name() -> str: | ||||
|         return "httproute" | ||||
|  | ||||
|     @property | ||||
|     def noop(self) -> bool: | ||||
|         if not self.crd_exists(): | ||||
|             self.logger.debug("CRD doesn't exist") | ||||
|             return True | ||||
|         if not self.controller.outpost.config.kubernetes_httproute_parent_refs: | ||||
|             self.logger.debug("HTTPRoute parentRefs not set.") | ||||
|             return True | ||||
|         return False | ||||
|  | ||||
|     def crd_exists(self) -> bool: | ||||
|         """Check if the Gateway API resources exists""" | ||||
|         return bool( | ||||
|             len( | ||||
|                 self.api_ex.list_custom_resource_definition( | ||||
|                     field_selector=f"metadata.name={self.crd_plural}.{self.crd_group}" | ||||
|                 ).items | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     def reconcile(self, current: HTTPRoute, reference: HTTPRoute): | ||||
|         super().reconcile(current, reference) | ||||
|         if current.metadata.annotations != reference.metadata.annotations: | ||||
|             raise NeedsUpdate() | ||||
|         if current.spec.parentRefs != reference.spec.parentRefs: | ||||
|             raise NeedsUpdate() | ||||
|         if current.spec.hostnames != reference.spec.hostnames: | ||||
|             raise NeedsUpdate() | ||||
|         if current.spec.rules != reference.spec.rules: | ||||
|             raise NeedsUpdate() | ||||
|  | ||||
|     def get_object_meta(self, **kwargs) -> V1ObjectMeta: | ||||
|         return super().get_object_meta( | ||||
|             **kwargs, | ||||
|         ) | ||||
|  | ||||
|     def get_reference_object(self) -> HTTPRoute: | ||||
|         hostnames = [] | ||||
|         rules = [] | ||||
|  | ||||
|         for proxy_provider in ProxyProvider.objects.filter(outpost__in=[self.controller.outpost]): | ||||
|             proxy_provider: ProxyProvider | ||||
|             external_host_name = urlparse(proxy_provider.external_host) | ||||
|             if proxy_provider.mode in [ProxyMode.FORWARD_SINGLE, ProxyMode.FORWARD_DOMAIN]: | ||||
|                 rule = HTTPRouteSpecRule( | ||||
|                     backendRefs=[RouteBackendRef(name=self.name, port=9000)], | ||||
|                     matches=[ | ||||
|                         HTTPRouteSpecRuleMatch( | ||||
|                             headers=[ | ||||
|                                 HTTPRouteSpecRuleMatchHeader( | ||||
|                                     name="Host", | ||||
|                                     value=external_host_name.hostname, | ||||
|                                 ) | ||||
|                             ], | ||||
|                             path=HTTPRouteSpecRuleMatchPath( | ||||
|                                 type="PathPrefix", value="/outpost.goauthentik.io" | ||||
|                             ), | ||||
|                         ) | ||||
|                     ], | ||||
|                 ) | ||||
|             else: | ||||
|                 rule = HTTPRouteSpecRule( | ||||
|                     backendRefs=[RouteBackendRef(name=self.name, port=9000)], | ||||
|                     matches=[ | ||||
|                         HTTPRouteSpecRuleMatch( | ||||
|                             headers=[ | ||||
|                                 HTTPRouteSpecRuleMatchHeader( | ||||
|                                     name="Host", | ||||
|                                     value=external_host_name.hostname, | ||||
|                                 ) | ||||
|                             ], | ||||
|                             path=HTTPRouteSpecRuleMatchPath(type="PathPrefix", value="/"), | ||||
|                         ) | ||||
|                     ], | ||||
|                 ) | ||||
|             hostnames.append(external_host_name.hostname) | ||||
|             rules.append(rule) | ||||
|  | ||||
|         return HTTPRoute( | ||||
|             apiVersion=f"{self.crd_group}/{self.crd_version}", | ||||
|             kind="HTTPRoute", | ||||
|             metadata=HTTPRouteMetadata( | ||||
|                 name=self.name, | ||||
|                 namespace=self.namespace, | ||||
|                 annotations=self.controller.outpost.config.kubernetes_httproute_annotations, | ||||
|                 labels=self.get_object_meta().labels, | ||||
|             ), | ||||
|             spec=HTTPRouteSpec( | ||||
|                 parentRefs=[ | ||||
|                     from_dict(RouteSpecParentRefs, spec) | ||||
|                     for spec in self.controller.outpost.config.kubernetes_httproute_parent_refs | ||||
|                 ], | ||||
|                 hostnames=hostnames, | ||||
|                 rules=rules, | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|     def create(self, reference: HTTPRoute): | ||||
|         return self.api.create_namespaced_custom_object( | ||||
|             group=self.crd_group, | ||||
|             version=self.crd_version, | ||||
|             plural=self.crd_plural, | ||||
|             namespace=self.namespace, | ||||
|             body=asdict(reference), | ||||
|             field_manager=FIELD_MANAGER, | ||||
|         ) | ||||
|  | ||||
|     def delete(self, reference: HTTPRoute): | ||||
|         return self.api.delete_namespaced_custom_object( | ||||
|             group=self.crd_group, | ||||
|             version=self.crd_version, | ||||
|             plural=self.crd_plural, | ||||
|             namespace=self.namespace, | ||||
|             name=self.name, | ||||
|         ) | ||||
|  | ||||
|     def retrieve(self) -> HTTPRoute: | ||||
|         return from_dict( | ||||
|             HTTPRoute, | ||||
|             self.api.get_namespaced_custom_object( | ||||
|                 group=self.crd_group, | ||||
|                 version=self.crd_version, | ||||
|                 plural=self.crd_plural, | ||||
|                 namespace=self.namespace, | ||||
|                 name=self.name, | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|     def update(self, current: HTTPRoute, reference: HTTPRoute): | ||||
|         return self.api.patch_namespaced_custom_object( | ||||
|             group=self.crd_group, | ||||
|             version=self.crd_version, | ||||
|             plural=self.crd_plural, | ||||
|             namespace=self.namespace, | ||||
|             name=self.name, | ||||
|             body=asdict(reference), | ||||
|             field_manager=FIELD_MANAGER, | ||||
|         ) | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	