Compare commits
	
		
			1 Commits
		
	
	
		
			providers/
			...
			policies-n
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 3ba6b51ed7 | 
| @ -1,5 +1,5 @@ | |||||||
| [bumpversion] | [bumpversion] | ||||||
| current_version = 2025.6.1 | current_version = 2025.2.4 | ||||||
| tag = True | tag = True | ||||||
| commit = True | commit = True | ||||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||||
|  | |||||||
| @ -5,10 +5,8 @@ dist/** | |||||||
| build/** | build/** | ||||||
| build_docs/** | build_docs/** | ||||||
| *Dockerfile | *Dockerfile | ||||||
| **/*Dockerfile |  | ||||||
| blueprints/local | blueprints/local | ||||||
| .git | .git | ||||||
| !gen-ts-api/node_modules | !gen-ts-api/node_modules | ||||||
| !gen-ts-api/dist/** | !gen-ts-api/dist/** | ||||||
| !gen-go-api/ | !gen-go-api/ | ||||||
| .venv |  | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -36,7 +36,7 @@ runs: | |||||||
|       with: |       with: | ||||||
|         go-version-file: "go.mod" |         go-version-file: "go.mod" | ||||||
|     - name: Setup docker cache |     - name: Setup docker cache | ||||||
|       uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 |       uses: ScribeMD/docker-cache@0.5.0 | ||||||
|       with: |       with: | ||||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} |         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} | ||||||
|     - name: Setup dependencies |     - name: Setup dependencies | ||||||
|  | |||||||
							
								
								
									
										33
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										33
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -23,13 +23,7 @@ updates: | |||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directories: |     directories: | ||||||
|       - "/web" |       - "/web" | ||||||
|       - "/web/packages/sfe" |       - "/web/sfe" | ||||||
|       - "/web/packages/core" |  | ||||||
|       - "/web/packages/esbuild-plugin-live-reload" |  | ||||||
|       - "/packages/prettier-config" |  | ||||||
|       - "/packages/tsconfig" |  | ||||||
|       - "/packages/docusaurus-config" |  | ||||||
|       - "/packages/eslint-config" |  | ||||||
|     schedule: |     schedule: | ||||||
|       interval: daily |       interval: daily | ||||||
|       time: "04:00" |       time: "04:00" | ||||||
| @ -74,9 +68,6 @@ updates: | |||||||
|       wdio: |       wdio: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@wdio/*" |           - "@wdio/*" | ||||||
|       goauthentik: |  | ||||||
|         patterns: |  | ||||||
|           - "@goauthentik/*" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/website" |     directory: "/website" | ||||||
|     schedule: |     schedule: | ||||||
| @ -97,16 +88,6 @@ updates: | |||||||
|           - "swc-*" |           - "swc-*" | ||||||
|           - "lightningcss*" |           - "lightningcss*" | ||||||
|           - "@rspack/binding*" |           - "@rspack/binding*" | ||||||
|       goauthentik: |  | ||||||
|         patterns: |  | ||||||
|           - "@goauthentik/*" |  | ||||||
|       eslint: |  | ||||||
|         patterns: |  | ||||||
|           - "@eslint/*" |  | ||||||
|           - "@typescript-eslint/*" |  | ||||||
|           - "eslint-*" |  | ||||||
|           - "eslint" |  | ||||||
|           - "typescript-eslint" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/lifecycle/aws" |     directory: "/lifecycle/aws" | ||||||
|     schedule: |     schedule: | ||||||
| @ -137,15 +118,3 @@ updates: | |||||||
|       prefix: "core:" |       prefix: "core:" | ||||||
|     labels: |     labels: | ||||||
|       - dependencies |       - dependencies | ||||||
|   - package-ecosystem: docker-compose |  | ||||||
|     directories: |  | ||||||
|       # - /scripts # Maybe |  | ||||||
|       - /tests/e2e |  | ||||||
|     schedule: |  | ||||||
|       interval: daily |  | ||||||
|       time: "04:00" |  | ||||||
|     open-pull-requests-limit: 10 |  | ||||||
|     commit-message: |  | ||||||
|       prefix: "core:" |  | ||||||
|     labels: |  | ||||||
|       - dependencies |  | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | |||||||
|           signoff: true |           signoff: true | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |           # ID from https://api.github.com/users/authentik-automation[bot] | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										17
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										17
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -62,7 +62,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -71,18 +70,22 @@ jobs: | |||||||
|       - name: checkout stable |       - name: checkout stable | ||||||
|         run: | |         run: | | ||||||
|           # Copy current, latest config to local |           # Copy current, latest config to local | ||||||
|  |           # Temporarly comment the .github backup while migrating to uv | ||||||
|           cp authentik/lib/default.yml local.env.yml |           cp authentik/lib/default.yml local.env.yml | ||||||
|           cp -R .github .. |           # cp -R .github .. | ||||||
|           cp -R scripts .. |           cp -R scripts .. | ||||||
|           git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) |           git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) | ||||||
|           rm -rf .github/ scripts/ |           # rm -rf .github/ scripts/ | ||||||
|           mv ../.github ../scripts . |           # mv ../.github ../scripts . | ||||||
|  |           rm -rf scripts/ | ||||||
|  |           mv ../scripts . | ||||||
|       - name: Setup authentik env (stable) |       - name: Setup authentik env (stable) | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|         with: |         with: | ||||||
|           postgresql_version: ${{ matrix.psql }} |           postgresql_version: ${{ matrix.psql }} | ||||||
|  |         continue-on-error: true | ||||||
|       - name: run migrations to stable |       - name: run migrations to stable | ||||||
|         run: uv run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|       - name: checkout current code |       - name: checkout current code | ||||||
|         run: | |         run: | | ||||||
|           set -x |           set -x | ||||||
| @ -117,7 +120,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -202,7 +204,7 @@ jobs: | |||||||
|         uses: actions/cache@v4 |         uses: actions/cache@v4 | ||||||
|         with: |         with: | ||||||
|           path: web/dist |           path: web/dist | ||||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b |           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }} | ||||||
|       - name: prepare web ui |       - name: prepare web ui | ||||||
|         if: steps.cache-web.outputs.cache-hit != 'true' |         if: steps.cache-web.outputs.cache-hit != 'true' | ||||||
|         working-directory: web |         working-directory: web | ||||||
| @ -210,7 +212,6 @@ jobs: | |||||||
|           npm ci |           npm ci | ||||||
|           make -C .. gen-client-ts |           make -C .. gen-client-ts | ||||||
|           npm run build |           npm run build | ||||||
|           npm run build:sfe |  | ||||||
|       - name: run e2e |       - name: run e2e | ||||||
|         run: | |         run: | | ||||||
|           uv run coverage run manage.py test ${{ matrix.job.glob }} |           uv run coverage run manage.py test ${{ matrix.job.glob }} | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -29,7 +29,7 @@ jobs: | |||||||
|       - name: Generate API |       - name: Generate API | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: golangci-lint |       - name: golangci-lint | ||||||
|         uses: golangci/golangci-lint-action@v8 |         uses: golangci/golangci-lint-action@v7 | ||||||
|         with: |         with: | ||||||
|           version: latest |           version: latest | ||||||
|           args: --timeout 5000s --verbose |           args: --timeout 5000s --verbose | ||||||
|  | |||||||
							
								
								
									
										62
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										62
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							| @ -41,60 +41,32 @@ jobs: | |||||||
|       - name: test |       - name: test | ||||||
|         working-directory: website/ |         working-directory: website/ | ||||||
|         run: npm test |         run: npm test | ||||||
|   build-container: |   build: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     permissions: |     name: ${{ matrix.job }} | ||||||
|       # Needed to upload container images to ghcr.io |     strategy: | ||||||
|       packages: write |       fail-fast: false | ||||||
|       # Needed for attestation |       matrix: | ||||||
|       id-token: write |         job: | ||||||
|       attestations: write |           - build | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|         with: |         with: | ||||||
|           ref: ${{ github.event.pull_request.head.sha }} |           node-version-file: website/package.json | ||||||
|       - name: Set up QEMU |           cache: "npm" | ||||||
|         uses: docker/setup-qemu-action@v3.6.0 |           cache-dependency-path: website/package-lock.json | ||||||
|       - name: Set up Docker Buildx |       - working-directory: website/ | ||||||
|         uses: docker/setup-buildx-action@v3 |         run: npm ci | ||||||
|       - name: prepare variables |       - name: build | ||||||
|         uses: ./.github/actions/docker-push-variables |         working-directory: website/ | ||||||
|         id: ev |         run: npm run ${{ matrix.job }} | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |  | ||||||
|           image-name: ghcr.io/goauthentik/dev-docs |  | ||||||
|       - name: Login to Container Registry |  | ||||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|         uses: docker/login-action@v3 |  | ||||||
|         with: |  | ||||||
|           registry: ghcr.io |  | ||||||
|           username: ${{ github.repository_owner }} |  | ||||||
|           password: ${{ secrets.GITHUB_TOKEN }} |  | ||||||
|       - name: Build Docker Image |  | ||||||
|         id: push |  | ||||||
|         uses: docker/build-push-action@v6 |  | ||||||
|         with: |  | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |  | ||||||
|           file: website/Dockerfile |  | ||||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|           platforms: linux/amd64,linux/arm64 |  | ||||||
|           context: . |  | ||||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache |  | ||||||
|           cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }} |  | ||||||
|       - uses: actions/attest-build-provenance@v2 |  | ||||||
|         id: attest |  | ||||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   ci-website-mark: |   ci-website-mark: | ||||||
|     if: always() |     if: always() | ||||||
|     needs: |     needs: | ||||||
|       - lint |       - lint | ||||||
|       - test |       - test | ||||||
|       - build-container |       - build | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: re-actors/alls-green@release/v1 |       - uses: re-actors/alls-green@release/v1 | ||||||
|  | |||||||
| @ -37,7 +37,6 @@ jobs: | |||||||
|           signoff: true |           signoff: true | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |           # ID from https://api.github.com/users/authentik-automation[bot] | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | |||||||
|           body: ${{ steps.compress.outputs.markdown }} |           body: ${{ steps.compress.outputs.markdown }} | ||||||
|           delete-branch: true |           delete-branch: true | ||||||
|           signoff: true |           signoff: true | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" |         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										24
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										24
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -3,11 +3,10 @@ on: | |||||||
|   push: |   push: | ||||||
|     branches: [main] |     branches: [main] | ||||||
|     paths: |     paths: | ||||||
|       - packages/docusaurus-config/** |       - packages/docusaurus-config | ||||||
|       - packages/eslint-config/** |       - packages/eslint-config | ||||||
|       - packages/prettier-config/** |       - packages/prettier-config | ||||||
|       - packages/tsconfig/** |       - packages/tsconfig | ||||||
|       - web/packages/esbuild-plugin-live-reload/** |  | ||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
| jobs: | jobs: | ||||||
|   publish: |   publish: | ||||||
| @ -17,28 +16,27 @@ jobs: | |||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         package: |         package: | ||||||
|           - packages/docusaurus-config |           - docusaurus-config | ||||||
|           - packages/eslint-config |           - eslint-config | ||||||
|           - packages/prettier-config |           - prettier-config | ||||||
|           - packages/tsconfig |           - tsconfig | ||||||
|           - web/packages/esbuild-plugin-live-reload |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|         with: |         with: | ||||||
|           fetch-depth: 2 |           fetch-depth: 2 | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
|         with: |         with: | ||||||
|           node-version-file: ${{ matrix.package }}/package.json |           node-version-file: packages/${{ matrix.package }}/package.json | ||||||
|           registry-url: "https://registry.npmjs.org" |           registry-url: "https://registry.npmjs.org" | ||||||
|       - name: Get changed files |       - name: Get changed files | ||||||
|         id: changed-files |         id: changed-files | ||||||
|         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c |         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c | ||||||
|         with: |         with: | ||||||
|           files: | |           files: | | ||||||
|             ${{ matrix.package }}/package.json |             packages/${{ matrix.package }}/package.json | ||||||
|       - name: Publish package |       - name: Publish package | ||||||
|         if: steps.changed-files.outputs.any_changed == 'true' |         if: steps.changed-files.outputs.any_changed == 'true' | ||||||
|         working-directory: ${{ matrix.package }} |         working-directory: packages/${{ matrix.package}} | ||||||
|         run: | |         run: | | ||||||
|           npm ci |           npm ci | ||||||
|           npm run build |           npm run build | ||||||
|  | |||||||
							
								
								
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -20,49 +20,6 @@ jobs: | |||||||
|       release: true |       release: true | ||||||
|       registry_dockerhub: true |       registry_dockerhub: true | ||||||
|       registry_ghcr: true |       registry_ghcr: true | ||||||
|   build-docs: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     permissions: |  | ||||||
|       # Needed to upload container images to ghcr.io |  | ||||||
|       packages: write |  | ||||||
|       # Needed for attestation |  | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - name: Set up QEMU |  | ||||||
|         uses: docker/setup-qemu-action@v3.6.0 |  | ||||||
|       - name: Set up Docker Buildx |  | ||||||
|         uses: docker/setup-buildx-action@v3 |  | ||||||
|       - name: prepare variables |  | ||||||
|         uses: ./.github/actions/docker-push-variables |  | ||||||
|         id: ev |  | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |  | ||||||
|           image-name: ghcr.io/goauthentik/docs |  | ||||||
|       - name: Login to GitHub Container Registry |  | ||||||
|         uses: docker/login-action@v3 |  | ||||||
|         with: |  | ||||||
|           registry: ghcr.io |  | ||||||
|           username: ${{ github.repository_owner }} |  | ||||||
|           password: ${{ secrets.GITHUB_TOKEN }} |  | ||||||
|       - name: Build Docker Image |  | ||||||
|         id: push |  | ||||||
|         uses: docker/build-push-action@v6 |  | ||||||
|         with: |  | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |  | ||||||
|           file: website/Dockerfile |  | ||||||
|           push: true |  | ||||||
|           platforms: linux/amd64,linux/arm64 |  | ||||||
|           context: . |  | ||||||
|       - uses: actions/attest-build-provenance@v2 |  | ||||||
|         id: attest |  | ||||||
|         if: true |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   build-outpost: |   build-outpost: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     permissions: |     permissions: | ||||||
| @ -236,6 +193,6 @@ jobs: | |||||||
|           SENTRY_ORG: authentik-security-inc |           SENTRY_ORG: authentik-security-inc | ||||||
|           SENTRY_PROJECT: authentik |           SENTRY_PROJECT: authentik | ||||||
|         with: |         with: | ||||||
|           release: authentik@${{ steps.ev.outputs.version }} |           version: authentik@${{ steps.ev.outputs.version }} | ||||||
|           sourcemaps: "./web/dist" |           sourcemaps: "./web/dist" | ||||||
|           url_prefix: "~/static/dist" |           url_prefix: "~/static/dist" | ||||||
|  | |||||||
| @ -52,6 +52,3 @@ jobs: | |||||||
|           body: "core, web: update translations" |           body: "core, web: update translations" | ||||||
|           delete-branch: true |           delete-branch: true | ||||||
|           signoff: true |           signoff: true | ||||||
|           labels: dependencies |  | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |  | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |  | ||||||
|  | |||||||
							
								
								
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							| @ -15,7 +15,6 @@ jobs: | |||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} |     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - id: generate_token |       - id: generate_token | ||||||
|         uses: tibdex/github-app-token@v2 |         uses: tibdex/github-app-token@v2 | ||||||
|         with: |         with: | ||||||
| @ -26,13 +25,23 @@ jobs: | |||||||
|         env: |         env: | ||||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} |           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||||
|         run: | |         run: | | ||||||
|           title=$(gh pr view ${{ github.event.pull_request.number }} --json  "title" -q ".title") |           title=$(curl -q -L \ | ||||||
|  |             -H "Accept: application/vnd.github+json" \ | ||||||
|  |             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||||
|  |             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||||
|  |             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title) | ||||||
|           echo "title=${title}" >> "$GITHUB_OUTPUT" |           echo "title=${title}" >> "$GITHUB_OUTPUT" | ||||||
|       - name: Rename |       - name: Rename | ||||||
|         env: |         env: | ||||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} |           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||||
|         run: | |         run: | | ||||||
|           gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies |           curl -L \ | ||||||
|  |             -X PATCH \ | ||||||
|  |             -H "Accept: application/vnd.github+json" \ | ||||||
|  |             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||||
|  |             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||||
|  |             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \ | ||||||
|  |             -d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}" | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										6
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -16,7 +16,7 @@ | |||||||
|     ], |     ], | ||||||
|     "typescript.preferences.importModuleSpecifier": "non-relative", |     "typescript.preferences.importModuleSpecifier": "non-relative", | ||||||
|     "typescript.preferences.importModuleSpecifierEnding": "index", |     "typescript.preferences.importModuleSpecifierEnding": "index", | ||||||
|     "typescript.tsdk": "./node_modules/typescript/lib", |     "typescript.tsdk": "./web/node_modules/typescript/lib", | ||||||
|     "typescript.enablePromptUseWorkspaceTsdk": true, |     "typescript.enablePromptUseWorkspaceTsdk": true, | ||||||
|     "yaml.schemas": { |     "yaml.schemas": { | ||||||
|         "./blueprints/schema.json": "blueprints/**/*.yaml" |         "./blueprints/schema.json": "blueprints/**/*.yaml" | ||||||
| @ -30,5 +30,7 @@ | |||||||
|         } |         } | ||||||
|     ], |     ], | ||||||
|     "go.testFlags": ["-count=1"], |     "go.testFlags": ["-count=1"], | ||||||
|     "github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"] |     "github-actions.workflows.pinned.workflows": [ | ||||||
|  |         ".github/workflows/ci-main.yml" | ||||||
|  |     ] | ||||||
| } | } | ||||||
|  | |||||||
							
								
								
									
										56
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										56
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,26 @@ | |||||||
| # syntax=docker/dockerfile:1 | # syntax=docker/dockerfile:1 | ||||||
|  |  | ||||||
| # Stage 1: Build webui | # Stage 1: Build website | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24-slim AS node-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | ||||||
|  |  | ||||||
|  | ENV NODE_ENV=production | ||||||
|  |  | ||||||
|  | WORKDIR /work/website | ||||||
|  |  | ||||||
|  | RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \ | ||||||
|  |     --mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \ | ||||||
|  |     --mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \ | ||||||
|  |     npm ci --include=dev | ||||||
|  |  | ||||||
|  | COPY ./website /work/website/ | ||||||
|  | COPY ./blueprints /work/blueprints/ | ||||||
|  | COPY ./schema.yml /work/ | ||||||
|  | COPY ./SECURITY.md /work/ | ||||||
|  |  | ||||||
|  | RUN npm run build-bundled | ||||||
|  |  | ||||||
|  | # Stage 2: Build webui | ||||||
|  | FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | ||||||
|  |  | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||||
| @ -13,7 +32,7 @@ RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | |||||||
|     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ |     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ | ||||||
|     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ |     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ | ||||||
|     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ |     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ | ||||||
|     --mount=type=cache,id=npm-ak,sharing=shared,target=/root/.npm \ |     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ | ||||||
|     npm ci --include=dev |     npm ci --include=dev | ||||||
|  |  | ||||||
| COPY ./package.json /work | COPY ./package.json /work | ||||||
| @ -21,10 +40,9 @@ COPY ./web /work/web/ | |||||||
| COPY ./website /work/website/ | COPY ./website /work/website/ | ||||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||||
|  |  | ||||||
| RUN npm run build && \ | RUN npm run build | ||||||
|     npm run build:sfe |  | ||||||
|  |  | ||||||
| # Stage 2: Build go proxy | # Stage 3: Build go proxy | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | ||||||
|  |  | ||||||
| ARG TARGETOS | ARG TARGETOS | ||||||
| @ -49,8 +67,8 @@ RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | |||||||
| COPY ./cmd /go/src/goauthentik.io/cmd | COPY ./cmd /go/src/goauthentik.io/cmd | ||||||
| COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib | COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib | ||||||
| COPY ./web/static.go /go/src/goauthentik.io/web/static.go | COPY ./web/static.go /go/src/goauthentik.io/web/static.go | ||||||
| COPY --from=node-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | ||||||
| COPY --from=node-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt | COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt | ||||||
| COPY ./internal /go/src/goauthentik.io/internal | COPY ./internal /go/src/goauthentik.io/internal | ||||||
| COPY ./go.mod /go/src/goauthentik.io/go.mod | COPY ./go.mod /go/src/goauthentik.io/go.mod | ||||||
| COPY ./go.sum /go/src/goauthentik.io/go.sum | COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||||
| @ -61,23 +79,24 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | |||||||
|     CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \ |     CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \ | ||||||
|     go build -o /go/authentik ./cmd/server |     go build -o /go/authentik ./cmd/server | ||||||
|  |  | ||||||
| # Stage 3: MaxMind GeoIP | # Stage 4: MaxMind GeoIP | ||||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip | FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip | ||||||
|  |  | ||||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||||
| ENV GEOIPUPDATE_VERBOSE="1" | ENV GEOIPUPDATE_VERBOSE="1" | ||||||
| ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ||||||
|  | ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY" | ||||||
|  |  | ||||||
| USER root | USER root | ||||||
| RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||||
|     --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ |     --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ | ||||||
|     mkdir -p /usr/share/GeoIP && \ |     mkdir -p /usr/share/GeoIP && \ | ||||||
|     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" |     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||||
|  |  | ||||||
| # Stage 4: Download uv | # Stage 5: Download uv | ||||||
| FROM ghcr.io/astral-sh/uv:0.7.13 AS uv | FROM ghcr.io/astral-sh/uv:0.6.14 AS uv | ||||||
| # Stage 5: Base python image | # Stage 6: Base python image | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.13.4-slim-bookworm-fips AS python-base | FROM ghcr.io/goauthentik/fips-python:3.12.10-slim-bookworm-fips AS python-base | ||||||
|  |  | ||||||
| ENV VENV_PATH="/ak-root/.venv" \ | ENV VENV_PATH="/ak-root/.venv" \ | ||||||
|     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ |     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ | ||||||
| @ -90,7 +109,7 @@ WORKDIR /ak-root/ | |||||||
|  |  | ||||||
| COPY --from=uv /uv /uvx /bin/ | COPY --from=uv /uv /uvx /bin/ | ||||||
|  |  | ||||||
| # Stage 6: Python dependencies | # Stage 7: Python dependencies | ||||||
| FROM python-base AS python-deps | FROM python-base AS python-deps | ||||||
|  |  | ||||||
| ARG TARGETARCH | ARG TARGETARCH | ||||||
| @ -125,7 +144,7 @@ RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \ | |||||||
|     --mount=type=cache,target=/root/.cache/uv \ |     --mount=type=cache,target=/root/.cache/uv \ | ||||||
|     uv sync --frozen --no-install-project --no-dev |     uv sync --frozen --no-install-project --no-dev | ||||||
|  |  | ||||||
| # Stage 7: Run | # Stage 8: Run | ||||||
| FROM python-base AS final-image | FROM python-base AS final-image | ||||||
|  |  | ||||||
| ARG VERSION | ARG VERSION | ||||||
| @ -168,8 +187,9 @@ COPY ./lifecycle/ /lifecycle | |||||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | ||||||
| COPY --from=go-builder /go/authentik /bin/authentik | COPY --from=go-builder /go/authentik /bin/authentik | ||||||
| COPY --from=python-deps /ak-root/.venv /ak-root/.venv | COPY --from=python-deps /ak-root/.venv /ak-root/.venv | ||||||
| COPY --from=node-builder /work/web/dist/ /web/dist/ | COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||||
| COPY --from=node-builder /work/web/authentik/ /web/authentik/ | COPY --from=web-builder /work/web/authentik/ /web/authentik/ | ||||||
|  | COPY --from=website-builder /work/website/build/ /website/help/ | ||||||
| COPY --from=geoip /usr/share/GeoIP /geoip | COPY --from=geoip /usr/share/GeoIP /geoip | ||||||
|  |  | ||||||
| USER 1000 | USER 1000 | ||||||
|  | |||||||
							
								
								
									
										53
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										53
									
								
								Makefile
									
									
									
									
									
								
							| @ -1,7 +1,6 @@ | |||||||
| .PHONY: gen dev-reset all clean test web website | .PHONY: gen dev-reset all clean test web website | ||||||
|  |  | ||||||
| SHELL := /usr/bin/env bash | .SHELLFLAGS += ${SHELLFLAGS} -e | ||||||
| .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail |  | ||||||
| PWD = $(shell pwd) | PWD = $(shell pwd) | ||||||
| UID = $(shell id -u) | UID = $(shell id -u) | ||||||
| GID = $(shell id -g) | GID = $(shell id -g) | ||||||
| @ -9,9 +8,9 @@ NPM_VERSION = $(shell python -m scripts.generate_semver) | |||||||
| PY_SOURCES = authentik tests scripts lifecycle .github | PY_SOURCES = authentik tests scripts lifecycle .github | ||||||
| DOCKER_IMAGE ?= "authentik:test" | DOCKER_IMAGE ?= "authentik:test" | ||||||
|  |  | ||||||
| GEN_API_TS = gen-ts-api | GEN_API_TS = "gen-ts-api" | ||||||
| GEN_API_PY = gen-py-api | GEN_API_PY = "gen-py-api" | ||||||
| GEN_API_GO = gen-go-api | GEN_API_GO = "gen-go-api" | ||||||
|  |  | ||||||
| pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||||
| pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||||
| @ -94,7 +93,7 @@ gen-build:  ## Extract the schema from the database | |||||||
| 	AUTHENTIK_DEBUG=true \ | 	AUTHENTIK_DEBUG=true \ | ||||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||||
| 		uv run ak make_blueprint_schema --file blueprints/schema.json | 		uv run ak make_blueprint_schema > blueprints/schema.json | ||||||
| 	AUTHENTIK_DEBUG=true \ | 	AUTHENTIK_DEBUG=true \ | ||||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||||
| @ -118,19 +117,14 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a | |||||||
| 	npx prettier --write diff.md | 	npx prettier --write diff.md | ||||||
|  |  | ||||||
| gen-clean-ts:  ## Remove generated API client for Typescript | gen-clean-ts:  ## Remove generated API client for Typescript | ||||||
| 	rm -rf ${PWD}/${GEN_API_TS}/ | 	rm -rf ./${GEN_API_TS}/ | ||||||
| 	rm -rf ${PWD}/web/node_modules/@goauthentik/api/ | 	rm -rf ./web/node_modules/@goauthentik/api/ | ||||||
|  |  | ||||||
| gen-clean-go:  ## Remove generated API client for Go | gen-clean-go:  ## Remove generated API client for Go | ||||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | 	rm -rf ./${GEN_API_GO}/ | ||||||
| ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) |  | ||||||
| 	make -C ${PWD}/${GEN_API_GO} clean |  | ||||||
| else |  | ||||||
| 	rm -rf ${PWD}/${GEN_API_GO} |  | ||||||
| endif |  | ||||||
|  |  | ||||||
| gen-clean-py:  ## Remove generated API client for Python | gen-clean-py:  ## Remove generated API client for Python | ||||||
| 	rm -rf ${PWD}/${GEN_API_PY}/ | 	rm -rf ./${GEN_API_PY}/ | ||||||
|  |  | ||||||
| gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | ||||||
|  |  | ||||||
| @ -147,8 +141,8 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | |||||||
| 		--git-repo-id authentik \ | 		--git-repo-id authentik \ | ||||||
| 		--git-user-id goauthentik | 		--git-user-id goauthentik | ||||||
| 	mkdir -p web/node_modules/@goauthentik/api | 	mkdir -p web/node_modules/@goauthentik/api | ||||||
| 	cd ${PWD}/${GEN_API_TS} && npm i | 	cd ./${GEN_API_TS} && npm i | ||||||
| 	\cp -rf ${PWD}/${GEN_API_TS}/* web/node_modules/@goauthentik/api | 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||||
|  |  | ||||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python | gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||||
| 	docker run \ | 	docker run \ | ||||||
| @ -162,17 +156,24 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python | |||||||
| 		--additional-properties=packageVersion=${NPM_VERSION} \ | 		--additional-properties=packageVersion=${NPM_VERSION} \ | ||||||
| 		--git-repo-id authentik \ | 		--git-repo-id authentik \ | ||||||
| 		--git-user-id goauthentik | 		--git-user-id goauthentik | ||||||
|  | 	pip install ./${GEN_API_PY} | ||||||
|  |  | ||||||
| gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | ||||||
| ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | ||||||
| 	git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO} | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache | ||||||
| else | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache | ||||||
| 	cd ${PWD}/${GEN_API_GO} && git pull | 	cp schema.yml ./${GEN_API_GO}/ | ||||||
| endif | 	docker run \ | ||||||
| 	cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO} | 		--rm -v ${PWD}/${GEN_API_GO}:/local \ | ||||||
| 	make -C ${PWD}/${GEN_API_GO} build | 		--user ${UID}:${GID} \ | ||||||
|  | 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||||
|  | 		-i /local/schema.yml \ | ||||||
|  | 		-g go \ | ||||||
|  | 		-o /local/ \ | ||||||
|  | 		-c /local/config.yaml | ||||||
| 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | ||||||
|  | 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | ||||||
|  |  | ||||||
| gen-dev-config:  ## Generate a local development config file | gen-dev-config:  ## Generate a local development config file | ||||||
| 	uv run scripts/generate_config.py | 	uv run scripts/generate_config.py | ||||||
| @ -243,7 +244,7 @@ docker:  ## Build a docker image of the current source tree | |||||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||||
|  |  | ||||||
| test-docker: | test-docker: | ||||||
| 	BUILD=true ${PWD}/scripts/test_docker.sh | 	BUILD=true ./scripts/test_docker.sh | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| ## CI | ## CI | ||||||
|  | |||||||
| @ -42,4 +42,4 @@ See [SECURITY.md](SECURITY.md) | |||||||
|  |  | ||||||
| ## Adoption and Contributions | ## Adoption and Contributions | ||||||
|  |  | ||||||
| Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github). | Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md). | ||||||
|  | |||||||
| @ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | |||||||
|  |  | ||||||
| | Version   | Supported | | | Version   | Supported | | ||||||
| | --------- | --------- | | | --------- | --------- | | ||||||
| | 2025.4.x  | ✅        | | | 2024.12.x | ✅        | | ||||||
| | 2025.6.x  | ✅        | | | 2025.2.x  | ✅        | | ||||||
|  |  | ||||||
| ## Reporting a Vulnerability | ## Reporting a Vulnerability | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
|  |  | ||||||
| from os import environ | from os import environ | ||||||
|  |  | ||||||
| __version__ = "2025.6.1" | __version__ = "2025.2.4" | ||||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,79 @@ | |||||||
|  | """authentik administration metrics""" | ||||||
|  |  | ||||||
|  | from datetime import timedelta | ||||||
|  |  | ||||||
|  | from django.db.models.functions import ExtractHour | ||||||
|  | from drf_spectacular.utils import extend_schema, extend_schema_field | ||||||
|  | from guardian.shortcuts import get_objects_for_user | ||||||
|  | from rest_framework.fields import IntegerField, SerializerMethodField | ||||||
|  | from rest_framework.permissions import IsAuthenticated | ||||||
|  | from rest_framework.request import Request | ||||||
|  | from rest_framework.response import Response | ||||||
|  | from rest_framework.views import APIView | ||||||
|  |  | ||||||
|  | from authentik.core.api.utils import PassiveSerializer | ||||||
|  | from authentik.events.models import EventAction | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class CoordinateSerializer(PassiveSerializer): | ||||||
|  |     """Coordinates for diagrams""" | ||||||
|  |  | ||||||
|  |     x_cord = IntegerField(read_only=True) | ||||||
|  |     y_cord = IntegerField(read_only=True) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class LoginMetricsSerializer(PassiveSerializer): | ||||||
|  |     """Login Metrics per 1h""" | ||||||
|  |  | ||||||
|  |     logins = SerializerMethodField() | ||||||
|  |     logins_failed = SerializerMethodField() | ||||||
|  |     authorizations = SerializerMethodField() | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins(self, _): | ||||||
|  |         """Get successful logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins_failed(self, _): | ||||||
|  |         """Get failed logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN_FAILED | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_authorizations(self, _): | ||||||
|  |         """Get successful authorizations per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.AUTHORIZE_APPLICATION | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class AdministrationMetricsViewSet(APIView): | ||||||
|  |     """Login Metrics per 1h""" | ||||||
|  |  | ||||||
|  |     permission_classes = [IsAuthenticated] | ||||||
|  |  | ||||||
|  |     @extend_schema(responses={200: LoginMetricsSerializer(many=False)}) | ||||||
|  |     def get(self, request: Request) -> Response: | ||||||
|  |         """Login Metrics per 1h""" | ||||||
|  |         serializer = LoginMetricsSerializer(True) | ||||||
|  |         serializer.context["user"] = request.user | ||||||
|  |         return Response(serializer.data) | ||||||
| @ -1,7 +1,6 @@ | |||||||
| """authentik administration overview""" | """authentik administration overview""" | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django_tenants.utils import get_public_schema_name |  | ||||||
| from drf_spectacular.utils import extend_schema | from drf_spectacular.utils import extend_schema | ||||||
| from packaging.version import parse | from packaging.version import parse | ||||||
| from rest_framework.fields import SerializerMethodField | from rest_framework.fields import SerializerMethodField | ||||||
| @ -14,7 +13,6 @@ from authentik import __version__, get_build_hash | |||||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.outposts.models import Outpost | from authentik.outposts.models import Outpost | ||||||
| from authentik.tenants.utils import get_current_tenant |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionSerializer(PassiveSerializer): | class VersionSerializer(PassiveSerializer): | ||||||
| @ -37,8 +35,6 @@ class VersionSerializer(PassiveSerializer): | |||||||
|  |  | ||||||
|     def get_version_latest(self, _) -> str: |     def get_version_latest(self, _) -> str: | ||||||
|         """Get latest version from cache""" |         """Get latest version from cache""" | ||||||
|         if get_current_tenant().schema_name == get_public_schema_name(): |  | ||||||
|             return __version__ |  | ||||||
|         version_in_cache = cache.get(VERSION_CACHE_KEY) |         version_in_cache = cache.get(VERSION_CACHE_KEY) | ||||||
|         if not version_in_cache:  # pragma: no cover |         if not version_in_cache:  # pragma: no cover | ||||||
|             update_latest_version.delay() |             update_latest_version.delay() | ||||||
|  | |||||||
| @ -14,19 +14,3 @@ class AuthentikAdminConfig(ManagedAppConfig): | |||||||
|     label = "authentik_admin" |     label = "authentik_admin" | ||||||
|     verbose_name = "authentik Admin" |     verbose_name = "authentik Admin" | ||||||
|     default = True |     default = True | ||||||
|  |  | ||||||
|     @ManagedAppConfig.reconcile_global |  | ||||||
|     def clear_update_notifications(self): |  | ||||||
|         """Clear update notifications on startup if the notification was for the version |  | ||||||
|         we're running now.""" |  | ||||||
|         from packaging.version import parse |  | ||||||
|  |  | ||||||
|         from authentik.admin.tasks import LOCAL_VERSION |  | ||||||
|         from authentik.events.models import EventAction, Notification |  | ||||||
|  |  | ||||||
|         for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): |  | ||||||
|             if "new_version" not in notification.event.context: |  | ||||||
|                 continue |  | ||||||
|             notification_version = notification.event.context["new_version"] |  | ||||||
|             if LOCAL_VERSION >= parse(notification_version): |  | ||||||
|                 notification.delete() |  | ||||||
|  | |||||||
| @ -1,7 +1,6 @@ | |||||||
| """authentik admin settings""" | """authentik admin settings""" | ||||||
|  |  | ||||||
| from celery.schedules import crontab | from celery.schedules import crontab | ||||||
| from django_tenants.utils import get_public_schema_name |  | ||||||
|  |  | ||||||
| from authentik.lib.utils.time import fqdn_rand | from authentik.lib.utils.time import fqdn_rand | ||||||
|  |  | ||||||
| @ -9,7 +8,6 @@ CELERY_BEAT_SCHEDULE = { | |||||||
|     "admin_latest_version": { |     "admin_latest_version": { | ||||||
|         "task": "authentik.admin.tasks.update_latest_version", |         "task": "authentik.admin.tasks.update_latest_version", | ||||||
|         "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"), |         "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"), | ||||||
|         "tenant_schemas": [get_public_schema_name()], |  | ||||||
|         "options": {"queue": "authentik_scheduled"}, |         "options": {"queue": "authentik_scheduled"}, | ||||||
|     } |     } | ||||||
| } | } | ||||||
|  | |||||||
| @ -1,6 +1,7 @@ | |||||||
| """authentik admin tasks""" | """authentik admin tasks""" | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
|  | from django.db import DatabaseError, InternalError, ProgrammingError | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from packaging.version import parse | from packaging.version import parse | ||||||
| from requests import RequestException | from requests import RequestException | ||||||
| @ -8,7 +9,7 @@ from structlog.stdlib import get_logger | |||||||
|  |  | ||||||
| from authentik import __version__, get_build_hash | from authentik import __version__, get_build_hash | ||||||
| from authentik.admin.apps import PROM_INFO | from authentik.admin.apps import PROM_INFO | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction, Notification | ||||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.utils.http import get_http_session | from authentik.lib.utils.http import get_http_session | ||||||
| @ -32,6 +33,20 @@ def _set_prom_info(): | |||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @CELERY_APP.task( | ||||||
|  |     throws=(DatabaseError, ProgrammingError, InternalError), | ||||||
|  | ) | ||||||
|  | def clear_update_notifications(): | ||||||
|  |     """Clear update notifications on startup if the notification was for the version | ||||||
|  |     we're running now.""" | ||||||
|  |     for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): | ||||||
|  |         if "new_version" not in notification.event.context: | ||||||
|  |             continue | ||||||
|  |         notification_version = notification.event.context["new_version"] | ||||||
|  |         if LOCAL_VERSION >= parse(notification_version): | ||||||
|  |             notification.delete() | ||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task(bind=True, base=SystemTask) | @CELERY_APP.task(bind=True, base=SystemTask) | ||||||
| @prefill_task | @prefill_task | ||||||
| def update_latest_version(self: SystemTask): | def update_latest_version(self: SystemTask): | ||||||
|  | |||||||
| @ -36,6 +36,11 @@ class TestAdminAPI(TestCase): | |||||||
|         body = loads(response.content) |         body = loads(response.content) | ||||||
|         self.assertEqual(len(body), 0) |         self.assertEqual(len(body), 0) | ||||||
|  |  | ||||||
|  |     def test_metrics(self): | ||||||
|  |         """Test metrics API""" | ||||||
|  |         response = self.client.get(reverse("authentik_api:admin_metrics")) | ||||||
|  |         self.assertEqual(response.status_code, 200) | ||||||
|  |  | ||||||
|     def test_apps(self): |     def test_apps(self): | ||||||
|         """Test apps API""" |         """Test apps API""" | ||||||
|         response = self.client.get(reverse("authentik_api:apps-list")) |         response = self.client.get(reverse("authentik_api:apps-list")) | ||||||
|  | |||||||
| @ -1,12 +1,12 @@ | |||||||
| """test admin tasks""" | """test admin tasks""" | ||||||
|  |  | ||||||
| from django.apps import apps |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
| from requests_mock import Mocker | from requests_mock import Mocker | ||||||
|  |  | ||||||
| from authentik.admin.tasks import ( | from authentik.admin.tasks import ( | ||||||
|     VERSION_CACHE_KEY, |     VERSION_CACHE_KEY, | ||||||
|  |     clear_update_notifications, | ||||||
|     update_latest_version, |     update_latest_version, | ||||||
| ) | ) | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
| @ -72,13 +72,12 @@ class TestAdminTasks(TestCase): | |||||||
|  |  | ||||||
|     def test_clear_update_notifications(self): |     def test_clear_update_notifications(self): | ||||||
|         """Test clear of previous notification""" |         """Test clear of previous notification""" | ||||||
|         admin_config = apps.get_app_config("authentik_admin") |  | ||||||
|         Event.objects.create( |         Event.objects.create( | ||||||
|             action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"} |             action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"} | ||||||
|         ) |         ) | ||||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) |         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) | ||||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) |         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) | ||||||
|         admin_config.clear_update_notifications() |         clear_update_notifications() | ||||||
|         self.assertFalse( |         self.assertFalse( | ||||||
|             Event.objects.filter( |             Event.objects.filter( | ||||||
|                 action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" |                 action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" | ||||||
|  | |||||||
| @ -3,6 +3,7 @@ | |||||||
| from django.urls import path | from django.urls import path | ||||||
|  |  | ||||||
| from authentik.admin.api.meta import AppsViewSet, ModelViewSet | from authentik.admin.api.meta import AppsViewSet, ModelViewSet | ||||||
|  | from authentik.admin.api.metrics import AdministrationMetricsViewSet | ||||||
| from authentik.admin.api.system import SystemView | from authentik.admin.api.system import SystemView | ||||||
| from authentik.admin.api.version import VersionView | from authentik.admin.api.version import VersionView | ||||||
| from authentik.admin.api.version_history import VersionHistoryViewSet | from authentik.admin.api.version_history import VersionHistoryViewSet | ||||||
| @ -11,6 +12,11 @@ from authentik.admin.api.workers import WorkerView | |||||||
| api_urlpatterns = [ | api_urlpatterns = [ | ||||||
|     ("admin/apps", AppsViewSet, "apps"), |     ("admin/apps", AppsViewSet, "apps"), | ||||||
|     ("admin/models", ModelViewSet, "models"), |     ("admin/models", ModelViewSet, "models"), | ||||||
|  |     path( | ||||||
|  |         "admin/metrics/", | ||||||
|  |         AdministrationMetricsViewSet.as_view(), | ||||||
|  |         name="admin_metrics", | ||||||
|  |     ), | ||||||
|     path("admin/version/", VersionView.as_view(), name="admin_version"), |     path("admin/version/", VersionView.as_view(), name="admin_version"), | ||||||
|     ("admin/version/history", VersionHistoryViewSet, "version_history"), |     ("admin/version/history", VersionHistoryViewSet, "version_history"), | ||||||
|     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), |     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), | ||||||
|  | |||||||
| @ -1,13 +1,12 @@ | |||||||
| """authentik API AppConfig""" | """authentik API AppConfig""" | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig | from django.apps import AppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikAPIConfig(ManagedAppConfig): | class AuthentikAPIConfig(AppConfig): | ||||||
|     """authentik API Config""" |     """authentik API Config""" | ||||||
|  |  | ||||||
|     name = "authentik.api" |     name = "authentik.api" | ||||||
|     label = "authentik_api" |     label = "authentik_api" | ||||||
|     mountpoint = "api/" |     mountpoint = "api/" | ||||||
|     verbose_name = "authentik API" |     verbose_name = "authentik API" | ||||||
|     default = True |  | ||||||
|  | |||||||
| @ -1,12 +1,9 @@ | |||||||
| """API Authentication""" | """API Authentication""" | ||||||
|  |  | ||||||
| from hmac import compare_digest | from hmac import compare_digest | ||||||
| from pathlib import Path |  | ||||||
| from tempfile import gettempdir |  | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from django.contrib.auth.models import AnonymousUser |  | ||||||
| from drf_spectacular.extensions import OpenApiAuthenticationExtension | from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||||
| from rest_framework.exceptions import AuthenticationFailed | from rest_framework.exceptions import AuthenticationFailed | ||||||
| @ -14,17 +11,11 @@ from rest_framework.request import Request | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.core.middleware import CTX_AUTH_VIA | from authentik.core.middleware import CTX_AUTH_VIA | ||||||
| from authentik.core.models import Token, TokenIntents, User, UserTypes | from authentik.core.models import Token, TokenIntents, User | ||||||
| from authentik.outposts.models import Outpost | from authentik.outposts.models import Outpost | ||||||
| from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| _tmp = Path(gettempdir()) |  | ||||||
| try: |  | ||||||
|     with open(_tmp / "authentik-core-ipc.key") as _f: |  | ||||||
|         ipc_key = _f.read() |  | ||||||
| except OSError: |  | ||||||
|     ipc_key = None |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def validate_auth(header: bytes) -> str | None: | def validate_auth(header: bytes) -> str | None: | ||||||
| @ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None: | |||||||
|     if user: |     if user: | ||||||
|         CTX_AUTH_VIA.set("secret_key") |         CTX_AUTH_VIA.set("secret_key") | ||||||
|         return user |         return user | ||||||
|     # then try to auth via secret key (for embedded outpost/etc) |  | ||||||
|     user = token_ipc(auth_credentials) |  | ||||||
|     if user: |  | ||||||
|         CTX_AUTH_VIA.set("ipc") |  | ||||||
|         return user |  | ||||||
|     raise AuthenticationFailed("Token invalid/expired") |     raise AuthenticationFailed("Token invalid/expired") | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None: | |||||||
|     return outpost.user |     return outpost.user | ||||||
|  |  | ||||||
|  |  | ||||||
| class IPCUser(AnonymousUser): |  | ||||||
|     """'Virtual' user for IPC communication between authentik core and the authentik router""" |  | ||||||
|  |  | ||||||
|     username = "authentik:system" |  | ||||||
|     is_active = True |  | ||||||
|     is_superuser = True |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def type(self): |  | ||||||
|         return UserTypes.INTERNAL_SERVICE_ACCOUNT |  | ||||||
|  |  | ||||||
|     def has_perm(self, perm, obj=None): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def has_perms(self, perm_list, obj=None): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def has_module_perms(self, module): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_anonymous(self): |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_authenticated(self): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def token_ipc(value: str) -> User | None: |  | ||||||
|     """Check if the token is the secret key |  | ||||||
|     and return the service account for the managed outpost""" |  | ||||||
|     if not ipc_key or not compare_digest(value, ipc_key): |  | ||||||
|         return None |  | ||||||
|     return IPCUser() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TokenAuthentication(BaseAuthentication): | class TokenAuthentication(BaseAuthentication): | ||||||
|     """Token-based authentication using HTTP Bearer authentication""" |     """Token-based authentication using HTTP Bearer authentication""" | ||||||
|  |  | ||||||
|  | |||||||
| @ -54,7 +54,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom | |||||||
|     return component |     return component | ||||||
|  |  | ||||||
|  |  | ||||||
| def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): | def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):  # noqa: W0613 | ||||||
|     """Workaround to set a default response for endpoints. |     """Workaround to set a default response for endpoints. | ||||||
|     Workaround suggested at |     Workaround suggested at | ||||||
|     <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> |     <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> | ||||||
|  | |||||||
| @ -72,33 +72,20 @@ class Command(BaseCommand): | |||||||
|                     "additionalProperties": True, |                     "additionalProperties": True, | ||||||
|                 }, |                 }, | ||||||
|                 "entries": { |                 "entries": { | ||||||
|                     "anyOf": [ |  | ||||||
|                         { |  | ||||||
|                     "type": "array", |                     "type": "array", | ||||||
|                             "items": {"$ref": "#/$defs/blueprint_entry"}, |                     "items": { | ||||||
|                         }, |                         "oneOf": [], | ||||||
|                         { |  | ||||||
|                             "type": "object", |  | ||||||
|                             "additionalProperties": { |  | ||||||
|                                 "type": "array", |  | ||||||
|                                 "items": {"$ref": "#/$defs/blueprint_entry"}, |  | ||||||
|                     }, |                     }, | ||||||
|                 }, |                 }, | ||||||
|                     ], |  | ||||||
|             }, |             }, | ||||||
|             }, |             "$defs": {}, | ||||||
|             "$defs": {"blueprint_entry": {"oneOf": []}}, |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|     def add_arguments(self, parser): |  | ||||||
|         parser.add_argument("--file", type=str) |  | ||||||
|  |  | ||||||
|     @no_translations |     @no_translations | ||||||
|     def handle(self, *args, file: str, **options): |     def handle(self, *args, **options): | ||||||
|         """Generate JSON Schema for blueprints""" |         """Generate JSON Schema for blueprints""" | ||||||
|         self.build() |         self.build() | ||||||
|         with open(file, "w") as _schema: |         self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default)) | ||||||
|             _schema.write(dumps(self.schema, indent=4, default=Command.json_default)) |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def json_default(value: Any) -> Any: |     def json_default(value: Any) -> Any: | ||||||
| @ -125,7 +112,7 @@ class Command(BaseCommand): | |||||||
|                 } |                 } | ||||||
|             ) |             ) | ||||||
|             model_path = f"{model._meta.app_label}.{model._meta.model_name}" |             model_path = f"{model._meta.app_label}.{model._meta.model_name}" | ||||||
|             self.schema["$defs"]["blueprint_entry"]["oneOf"].append( |             self.schema["properties"]["entries"]["items"]["oneOf"].append( | ||||||
|                 self.template_entry(model_path, model, serializer) |                 self.template_entry(model_path, model, serializer) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
| @ -147,7 +134,7 @@ class Command(BaseCommand): | |||||||
|                 "id": {"type": "string"}, |                 "id": {"type": "string"}, | ||||||
|                 "state": { |                 "state": { | ||||||
|                     "type": "string", |                     "type": "string", | ||||||
|                     "enum": sorted([s.value for s in BlueprintEntryDesiredState]), |                     "enum": [s.value for s in BlueprintEntryDesiredState], | ||||||
|                     "default": "present", |                     "default": "present", | ||||||
|                 }, |                 }, | ||||||
|                 "conditions": {"type": "array", "items": {"type": "boolean"}}, |                 "conditions": {"type": "array", "items": {"type": "boolean"}}, | ||||||
| @ -218,7 +205,7 @@ class Command(BaseCommand): | |||||||
|                 "type": "object", |                 "type": "object", | ||||||
|                 "required": ["permission"], |                 "required": ["permission"], | ||||||
|                 "properties": { |                 "properties": { | ||||||
|                     "permission": {"type": "string", "enum": sorted(perms)}, |                     "permission": {"type": "string", "enum": perms}, | ||||||
|                     "user": {"type": "integer"}, |                     "user": {"type": "integer"}, | ||||||
|                     "role": {"type": "string"}, |                     "role": {"type": "string"}, | ||||||
|                 }, |                 }, | ||||||
|  | |||||||
| @ -1,6 +1,5 @@ | |||||||
| version: 1 | version: 1 | ||||||
| entries: | entries: | ||||||
|   foo: |  | ||||||
|     - identifiers: |     - identifiers: | ||||||
|           name: "%(id)s" |           name: "%(id)s" | ||||||
|           slug: "%(id)s" |           slug: "%(id)s" | ||||||
|  | |||||||
| @ -1,14 +0,0 @@ | |||||||
| from django.test import TestCase |  | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig |  | ||||||
| from authentik.enterprise.apps import EnterpriseConfig |  | ||||||
| from authentik.lib.utils.reflection import get_apps |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestManagedAppConfig(TestCase): |  | ||||||
|     def test_apps_use_managed_app_config(self): |  | ||||||
|         for app in get_apps(): |  | ||||||
|             if app.name.startswith("authentik.enterprise"): |  | ||||||
|                 self.assertIn(EnterpriseConfig, app.__class__.__bases__) |  | ||||||
|             else: |  | ||||||
|                 self.assertIn(ManagedAppConfig, app.__class__.__bases__) |  | ||||||
| @ -164,7 +164,9 @@ class BlueprintEntry: | |||||||
|         """Get the blueprint model, with yaml tags resolved if present""" |         """Get the blueprint model, with yaml tags resolved if present""" | ||||||
|         return str(self.tag_resolver(self.model, blueprint)) |         return str(self.tag_resolver(self.model, blueprint)) | ||||||
|  |  | ||||||
|     def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]: |     def get_permissions( | ||||||
|  |         self, blueprint: "Blueprint" | ||||||
|  |     ) -> Generator[BlueprintEntryPermission, None, None]: | ||||||
|         """Get permissions of this entry, with all yaml tags resolved""" |         """Get permissions of this entry, with all yaml tags resolved""" | ||||||
|         for perm in self.permissions: |         for perm in self.permissions: | ||||||
|             yield BlueprintEntryPermission( |             yield BlueprintEntryPermission( | ||||||
| @ -191,18 +193,11 @@ class Blueprint: | |||||||
|     """Dataclass used for a full export""" |     """Dataclass used for a full export""" | ||||||
|  |  | ||||||
|     version: int = field(default=1) |     version: int = field(default=1) | ||||||
|     entries: list[BlueprintEntry] | dict[str, list[BlueprintEntry]] = field(default_factory=list) |     entries: list[BlueprintEntry] = field(default_factory=list) | ||||||
|     context: dict = field(default_factory=dict) |     context: dict = field(default_factory=dict) | ||||||
|  |  | ||||||
|     metadata: BlueprintMetadata | None = field(default=None) |     metadata: BlueprintMetadata | None = field(default=None) | ||||||
|  |  | ||||||
|     def iter_entries(self) -> Iterable[BlueprintEntry]: |  | ||||||
|         if isinstance(self.entries, dict): |  | ||||||
|             for _section, entries in self.entries.items(): |  | ||||||
|                 yield from entries |  | ||||||
|         else: |  | ||||||
|             yield from self.entries |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class YAMLTag: | class YAMLTag: | ||||||
|     """Base class for all YAML Tags""" |     """Base class for all YAML Tags""" | ||||||
| @ -233,7 +228,7 @@ class KeyOf(YAMLTag): | |||||||
|         self.id_from = node.value |         self.id_from = node.value | ||||||
|  |  | ||||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: |     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||||
|         for _entry in blueprint.iter_entries(): |         for _entry in blueprint.entries: | ||||||
|             if _entry.id == self.id_from and _entry._state.instance: |             if _entry.id == self.id_from and _entry._state.instance: | ||||||
|                 # Special handling for PolicyBindingModels, as they'll have a different PK |                 # Special handling for PolicyBindingModels, as they'll have a different PK | ||||||
|                 # which is used when creating policy bindings |                 # which is used when creating policy bindings | ||||||
|  | |||||||
| @ -384,7 +384,7 @@ class Importer: | |||||||
|     def _apply_models(self, raise_errors=False) -> bool: |     def _apply_models(self, raise_errors=False) -> bool: | ||||||
|         """Apply (create/update) models yaml""" |         """Apply (create/update) models yaml""" | ||||||
|         self.__pk_map = {} |         self.__pk_map = {} | ||||||
|         for entry in self._import.iter_entries(): |         for entry in self._import.entries: | ||||||
|             model_app_label, model_name = entry.get_model(self._import).split(".") |             model_app_label, model_name = entry.get_model(self._import).split(".") | ||||||
|             try: |             try: | ||||||
|                 model: type[SerializerModel] = registry.get_model(model_app_label, model_name) |                 model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||||
|  | |||||||
| @ -47,7 +47,7 @@ class MetaModelRegistry: | |||||||
|         models = apps.get_models() |         models = apps.get_models() | ||||||
|         for _, value in self.models.items(): |         for _, value in self.models.items(): | ||||||
|             models.append(value) |             models.append(value) | ||||||
|         return sorted(models, key=str) |         return models | ||||||
|  |  | ||||||
|     def get_model(self, app_label: str, model_id: str) -> type[Model]: |     def get_model(self, app_label: str, model_id: str) -> type[Model]: | ||||||
|         """Get model checks if any virtual models are registered, and falls back |         """Get model checks if any virtual models are registered, and falls back | ||||||
|  | |||||||
| @ -59,7 +59,6 @@ class BrandSerializer(ModelSerializer): | |||||||
|             "flow_device_code", |             "flow_device_code", | ||||||
|             "default_application", |             "default_application", | ||||||
|             "web_certificate", |             "web_certificate", | ||||||
|             "client_certificates", |  | ||||||
|             "attributes", |             "attributes", | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
| @ -121,7 +120,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "domain", |         "domain", | ||||||
|         "branding_title", |         "branding_title", | ||||||
|         "web_certificate__name", |         "web_certificate__name", | ||||||
|         "client_certificates__name", |  | ||||||
|     ] |     ] | ||||||
|     filterset_fields = [ |     filterset_fields = [ | ||||||
|         "brand_uuid", |         "brand_uuid", | ||||||
| @ -138,7 +136,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "flow_user_settings", |         "flow_user_settings", | ||||||
|         "flow_device_code", |         "flow_device_code", | ||||||
|         "web_certificate", |         "web_certificate", | ||||||
|         "client_certificates", |  | ||||||
|     ] |     ] | ||||||
|     ordering = ["domain"] |     ordering = ["domain"] | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,9 +1,9 @@ | |||||||
| """authentik brands app""" | """authentik brands app""" | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig | from django.apps import AppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikBrandsConfig(ManagedAppConfig): | class AuthentikBrandsConfig(AppConfig): | ||||||
|     """authentik Brand app""" |     """authentik Brand app""" | ||||||
|  |  | ||||||
|     name = "authentik.brands" |     name = "authentik.brands" | ||||||
| @ -12,4 +12,3 @@ class AuthentikBrandsConfig(ManagedAppConfig): | |||||||
|     mountpoints = { |     mountpoints = { | ||||||
|         "authentik.brands.urls_root": "", |         "authentik.brands.urls_root": "", | ||||||
|     } |     } | ||||||
|     default = True |  | ||||||
|  | |||||||
| @ -16,7 +16,7 @@ def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | |||||||
|     if not path.exists(): |     if not path.exists(): | ||||||
|         return |         return | ||||||
|     css = path.read_text() |     css = path.read_text() | ||||||
|     Brand.objects.using(db_alias).all().update(branding_custom_css=css) |     Brand.objects.using(db_alias).update(branding_custom_css=css) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  | |||||||
| @ -1,37 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-19 15:09 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0009_brand_branding_default_flow_background"), |  | ||||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="client_certificates", |  | ||||||
|             field=models.ManyToManyField( |  | ||||||
|                 blank=True, |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Certificates used for client authentication.", |  | ||||||
|                 to="authentik_crypto.certificatekeypair", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="web_certificate", |  | ||||||
|             field=models.ForeignKey( |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Web Certificate used by the authentik Core webserver.", |  | ||||||
|                 null=True, |  | ||||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, |  | ||||||
|                 related_name="+", |  | ||||||
|                 to="authentik_crypto.certificatekeypair", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -73,13 +73,6 @@ class Brand(SerializerModel): | |||||||
|         default=None, |         default=None, | ||||||
|         on_delete=models.SET_DEFAULT, |         on_delete=models.SET_DEFAULT, | ||||||
|         help_text=_("Web Certificate used by the authentik Core webserver."), |         help_text=_("Web Certificate used by the authentik Core webserver."), | ||||||
|         related_name="+", |  | ||||||
|     ) |  | ||||||
|     client_certificates = models.ManyToManyField( |  | ||||||
|         CertificateKeyPair, |  | ||||||
|         default=None, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_("Certificates used for client authentication."), |  | ||||||
|     ) |     ) | ||||||
|     attributes = models.JSONField(default=dict, blank=True) |     attributes = models.JSONField(default=dict, blank=True) | ||||||
|  |  | ||||||
|  | |||||||
| @ -148,14 +148,3 @@ class TestBrands(APITestCase): | |||||||
|                 "default_locale": "", |                 "default_locale": "", | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_custom_css(self): |  | ||||||
|         """Test custom_css""" |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         brand.branding_custom_css = """* { |  | ||||||
|             font-family: "Foo bar"; |  | ||||||
|         }""" |  | ||||||
|         brand.save() |  | ||||||
|         res = self.client.get(reverse("authentik_core:if-user")) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertIn(brand.branding_custom_css, res.content.decode()) |  | ||||||
|  | |||||||
| @ -5,12 +5,10 @@ from typing import Any | |||||||
| from django.db.models import F, Q | from django.db.models import F, Q | ||||||
| from django.db.models import Value as V | from django.db.models import Value as V | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
| from django.utils.html import _json_script_escapes | from sentry_sdk import get_current_span | ||||||
| from django.utils.safestring import mark_safe |  | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import get_full_version | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.lib.sentry import get_http_meta |  | ||||||
| from authentik.tenants.models import Tenant | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| _q_default = Q(default=True) | _q_default = Q(default=True) | ||||||
| @ -34,14 +32,13 @@ def context_processor(request: HttpRequest) -> dict[str, Any]: | |||||||
|     """Context Processor that injects brand object into every template""" |     """Context Processor that injects brand object into every template""" | ||||||
|     brand = getattr(request, "brand", DEFAULT_BRAND) |     brand = getattr(request, "brand", DEFAULT_BRAND) | ||||||
|     tenant = getattr(request, "tenant", Tenant()) |     tenant = getattr(request, "tenant", Tenant()) | ||||||
|     # similarly to `json_script` we escape everything HTML-related, however django |     trace = "" | ||||||
|     # only directly exposes this as a function that also wraps it in a <script> tag |     span = get_current_span() | ||||||
|     # which we dont want for CSS |     if span: | ||||||
|     brand_css = mark_safe(str(brand.branding_custom_css).translate(_json_script_escapes))  # nosec |         trace = span.to_traceparent() | ||||||
|     return { |     return { | ||||||
|         "brand": brand, |         "brand": brand, | ||||||
|         "brand_css": brand_css, |  | ||||||
|         "footer_links": tenant.footer_links, |         "footer_links": tenant.footer_links, | ||||||
|         "html_meta": {**get_http_meta()}, |         "sentry_trace": trace, | ||||||
|         "version": get_full_version(), |         "version": get_full_version(), | ||||||
|     } |     } | ||||||
|  | |||||||
| @ -2,9 +2,11 @@ | |||||||
|  |  | ||||||
| from collections.abc import Iterator | from collections.abc import Iterator | ||||||
| from copy import copy | from copy import copy | ||||||
|  | from datetime import timedelta | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.db.models import QuerySet | from django.db.models import QuerySet | ||||||
|  | from django.db.models.functions import ExtractHour | ||||||
| from django.shortcuts import get_object_or_404 | from django.shortcuts import get_object_or_404 | ||||||
| from drf_spectacular.types import OpenApiTypes | from drf_spectacular.types import OpenApiTypes | ||||||
| from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | ||||||
| @ -18,6 +20,7 @@ from rest_framework.response import Response | |||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
|  | from authentik.admin.api.metrics import CoordinateSerializer | ||||||
| from authentik.api.pagination import Pagination | from authentik.api.pagination import Pagination | ||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||||
| from authentik.core.api.providers import ProviderSerializer | from authentik.core.api.providers import ProviderSerializer | ||||||
| @ -25,6 +28,7 @@ from authentik.core.api.used_by import UsedByMixin | |||||||
| from authentik.core.api.utils import ModelSerializer | from authentik.core.api.utils import ModelSerializer | ||||||
| from authentik.core.models import Application, User | from authentik.core.models import Application, User | ||||||
| from authentik.events.logs import LogEventSerializer, capture_logs | from authentik.events.logs import LogEventSerializer, capture_logs | ||||||
|  | from authentik.events.models import EventAction | ||||||
| from authentik.lib.utils.file import ( | from authentik.lib.utils.file import ( | ||||||
|     FilePathSerializer, |     FilePathSerializer, | ||||||
|     FileUploadSerializer, |     FileUploadSerializer, | ||||||
| @ -317,3 +321,18 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | |||||||
|         """Set application icon (as URL)""" |         """Set application icon (as URL)""" | ||||||
|         app: Application = self.get_object() |         app: Application = self.get_object() | ||||||
|         return set_file_url(request, app, "meta_icon") |         return set_file_url(request, app, "meta_icon") | ||||||
|  |  | ||||||
|  |     @permission_required("authentik_core.view_application", ["authentik_events.view_event"]) | ||||||
|  |     @extend_schema(responses={200: CoordinateSerializer(many=True)}) | ||||||
|  |     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||||
|  |     def metrics(self, request: Request, slug: str): | ||||||
|  |         """Metrics for application logins""" | ||||||
|  |         app = self.get_object() | ||||||
|  |         return Response( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.AUTHORIZE_APPLICATION, | ||||||
|  |                 context__authorized_application__pk=app.pk.hex, | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  | |||||||
| @ -99,8 +99,9 @@ class GroupSerializer(ModelSerializer): | |||||||
|             if superuser |             if superuser | ||||||
|             else "authentik_core.disable_group_superuser" |             else "authentik_core.disable_group_superuser" | ||||||
|         ) |         ) | ||||||
|         if self.instance or superuser: |         has_perm = user.has_perm(perm) | ||||||
|             has_perm = user.has_perm(perm) or user.has_perm(perm, self.instance) |         if self.instance and not has_perm: | ||||||
|  |             has_perm = user.has_perm(perm, self.instance) | ||||||
|         if not has_perm: |         if not has_perm: | ||||||
|             raise ValidationError( |             raise ValidationError( | ||||||
|                 _( |                 _( | ||||||
|  | |||||||
| @ -6,6 +6,7 @@ from typing import Any | |||||||
|  |  | ||||||
| from django.contrib.auth import update_session_auth_hash | from django.contrib.auth import update_session_auth_hash | ||||||
| from django.contrib.auth.models import Permission | from django.contrib.auth.models import Permission | ||||||
|  | from django.db.models.functions import ExtractHour | ||||||
| from django.db.transaction import atomic | from django.db.transaction import atomic | ||||||
| from django.db.utils import IntegrityError | from django.db.utils import IntegrityError | ||||||
| from django.urls import reverse_lazy | from django.urls import reverse_lazy | ||||||
| @ -51,6 +52,7 @@ from rest_framework.validators import UniqueValidator | |||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
|  | from authentik.admin.api.metrics import CoordinateSerializer | ||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| @ -82,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN | |||||||
| from authentik.lib.avatars import get_avatar | from authentik.lib.avatars import get_avatar | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
| from authentik.rbac.models import get_permission_choices | from authentik.rbac.models import get_permission_choices | ||||||
| from authentik.stages.email.flow import pickle_flow_token_for_email |  | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.stages.email.tasks import send_mails | from authentik.stages.email.tasks import send_mails | ||||||
| from authentik.stages.email.utils import TemplateEmailMessage | from authentik.stages.email.utils import TemplateEmailMessage | ||||||
| @ -315,6 +316,53 @@ class SessionUserSerializer(PassiveSerializer): | |||||||
|     original = UserSelfSerializer(required=False) |     original = UserSelfSerializer(required=False) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class UserMetricsSerializer(PassiveSerializer): | ||||||
|  |     """User Metrics""" | ||||||
|  |  | ||||||
|  |     logins = SerializerMethodField() | ||||||
|  |     logins_failed = SerializerMethodField() | ||||||
|  |     authorizations = SerializerMethodField() | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins(self, _): | ||||||
|  |         """Get successful logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         request = self.context["request"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN, user__pk=user.pk | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins_failed(self, _): | ||||||
|  |         """Get failed logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         request = self.context["request"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN_FAILED, context__username=user.username | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_authorizations(self, _): | ||||||
|  |         """Get failed logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         request = self.context["request"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class UsersFilter(FilterSet): | class UsersFilter(FilterSet): | ||||||
|     """Filter for users""" |     """Filter for users""" | ||||||
|  |  | ||||||
| @ -403,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def list(self, request, *args, **kwargs): |     def list(self, request, *args, **kwargs): | ||||||
|         return super().list(request, *args, **kwargs) |         return super().list(request, *args, **kwargs) | ||||||
|  |  | ||||||
|     def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: |     def _create_recovery_link(self) -> tuple[str, Token]: | ||||||
|         """Create a recovery link (when the current brand has a recovery flow set), |         """Create a recovery link (when the current brand has a recovery flow set), | ||||||
|         that can either be shown to an admin or sent to the user directly""" |         that can either be shown to an admin or sent to the user directly""" | ||||||
|         brand: Brand = self.request._request.brand |         brand: Brand = self.request._request.brand | ||||||
| @ -425,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|             raise ValidationError( |             raise ValidationError( | ||||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} |                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||||
|             ) from None |             ) from None | ||||||
|         _plan = FlowToken.pickle(plan) |  | ||||||
|         if for_email: |  | ||||||
|             _plan = pickle_flow_token_for_email(plan) |  | ||||||
|         token, __ = FlowToken.objects.update_or_create( |         token, __ = FlowToken.objects.update_or_create( | ||||||
|             identifier=f"{user.uid}-password-reset", |             identifier=f"{user.uid}-password-reset", | ||||||
|             defaults={ |             defaults={ | ||||||
|                 "user": user, |                 "user": user, | ||||||
|                 "flow": flow, |                 "flow": flow, | ||||||
|                 "_plan": _plan, |                 "_plan": FlowToken.pickle(plan), | ||||||
|                 "revoke_on_execution": not for_email, |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         querystring = urlencode({QS_KEY_TOKEN: token.key}) |         querystring = urlencode({QS_KEY_TOKEN: token.key}) | ||||||
| @ -558,6 +602,17 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|             update_session_auth_hash(self.request, user) |             update_session_auth_hash(self.request, user) | ||||||
|         return Response(status=204) |         return Response(status=204) | ||||||
|  |  | ||||||
|  |     @permission_required("authentik_core.view_user", ["authentik_events.view_event"]) | ||||||
|  |     @extend_schema(responses={200: UserMetricsSerializer(many=False)}) | ||||||
|  |     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||||
|  |     def metrics(self, request: Request, pk: int) -> Response: | ||||||
|  |         """User metrics per 1h""" | ||||||
|  |         user: User = self.get_object() | ||||||
|  |         serializer = UserMetricsSerializer(instance={}) | ||||||
|  |         serializer.context["user"] = user | ||||||
|  |         serializer.context["request"] = request | ||||||
|  |         return Response(serializer.data) | ||||||
|  |  | ||||||
|     @permission_required("authentik_core.reset_user_password") |     @permission_required("authentik_core.reset_user_password") | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         responses={ |         responses={ | ||||||
| @ -593,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|         if for_user.email == "": |         if for_user.email == "": | ||||||
|             LOGGER.debug("User doesn't have an email address") |             LOGGER.debug("User doesn't have an email address") | ||||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) |             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||||
|         link, token = self._create_recovery_link(for_email=True) |         link, token = self._create_recovery_link() | ||||||
|         # Lookup the email stage to assure the current user can access it |         # Lookup the email stage to assure the current user can access it | ||||||
|         stages = get_objects_for_user( |         stages = get_objects_for_user( | ||||||
|             request.user, "authentik_stages_email.view_emailstage" |             request.user, "authentik_stages_email.view_emailstage" | ||||||
|  | |||||||
| @ -2,7 +2,6 @@ | |||||||
|  |  | ||||||
| from django.apps import apps | from django.apps import apps | ||||||
| from django.contrib.auth.management import create_permissions | from django.contrib.auth.management import create_permissions | ||||||
| from django.core.management import call_command |  | ||||||
| from django.core.management.base import BaseCommand, no_translations | from django.core.management.base import BaseCommand, no_translations | ||||||
| from guardian.management import create_anonymous_user | from guardian.management import create_anonymous_user | ||||||
|  |  | ||||||
| @ -17,10 +16,6 @@ class Command(BaseCommand): | |||||||
|         """Check permissions for all apps""" |         """Check permissions for all apps""" | ||||||
|         for tenant in Tenant.objects.filter(ready=True): |         for tenant in Tenant.objects.filter(ready=True): | ||||||
|             with tenant: |             with tenant: | ||||||
|                 # See https://code.djangoproject.com/ticket/28417 |  | ||||||
|                 # Remove potential lingering old permissions |  | ||||||
|                 call_command("remove_stale_contenttypes", "--no-input") |  | ||||||
|  |  | ||||||
|                 for app in apps.get_app_configs(): |                 for app in apps.get_app_configs(): | ||||||
|                     self.stdout.write(f"Checking app {app.name} ({app.label})\n") |                     self.stdout.write(f"Checking app {app.name} ({app.label})\n") | ||||||
|                     create_permissions(app, verbosity=0) |                     create_permissions(app, verbosity=0) | ||||||
|  | |||||||
| @ -31,10 +31,7 @@ class PickleSerializer: | |||||||
|  |  | ||||||
|     def loads(self, data): |     def loads(self, data): | ||||||
|         """Unpickle data to be loaded from redis""" |         """Unpickle data to be loaded from redis""" | ||||||
|         try: |  | ||||||
|         return pickle.loads(data)  # nosec |         return pickle.loads(data)  # nosec | ||||||
|         except Exception: |  | ||||||
|             return {} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _migrate_session( | def _migrate_session( | ||||||
| @ -79,7 +76,6 @@ def _migrate_session( | |||||||
|         AuthenticatedSession.objects.using(db_alias).create( |         AuthenticatedSession.objects.using(db_alias).create( | ||||||
|             session=session, |             session=session, | ||||||
|             user=old_auth_session.user, |             user=old_auth_session.user, | ||||||
|             uuid=old_auth_session.uuid, |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,103 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-14 11:15 |  | ||||||
|  |  | ||||||
| from django.apps.registry import Apps, apps as global_apps |  | ||||||
| from django.db import migrations |  | ||||||
| from django.contrib.contenttypes.management import create_contenttypes |  | ||||||
| from django.contrib.auth.management import create_permissions |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession""" |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the |  | ||||||
|     # real config for creating permissions and content types |  | ||||||
|     authentik_core_config = global_apps.get_app_config("authentik_core") |  | ||||||
|     # These are only ran by django after all migrations, but we need them right now. |  | ||||||
|     # `global_apps` is needed, |  | ||||||
|     create_permissions(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|     create_contenttypes(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|  |  | ||||||
|     # But from now on, this is just a regular migration, so use `apps` |  | ||||||
|     Permission = apps.get_model("auth", "Permission") |  | ||||||
|     ContentType = apps.get_model("contenttypes", "ContentType") |  | ||||||
|  |  | ||||||
|     try: |  | ||||||
|         old_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="oldauthenticatedsession" |  | ||||||
|         ) |  | ||||||
|         new_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="authenticatedsession" |  | ||||||
|         ) |  | ||||||
|     except ContentType.DoesNotExist: |  | ||||||
|         # This should exist at this point, but if not, let's cut our losses |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     # Get all permissions for the old content type |  | ||||||
|     old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct) |  | ||||||
|  |  | ||||||
|     # Create equivalent permissions for the new content type |  | ||||||
|     for old_perm in old_perms: |  | ||||||
|         new_perm = ( |  | ||||||
|             Permission.objects.using(db_alias) |  | ||||||
|             .filter( |  | ||||||
|                 content_type=new_ct, |  | ||||||
|                 codename=old_perm.codename, |  | ||||||
|             ) |  | ||||||
|             .first() |  | ||||||
|         ) |  | ||||||
|         if not new_perm: |  | ||||||
|             # This should exist at this point, but if not, let's cut our losses |  | ||||||
|             continue |  | ||||||
|  |  | ||||||
|         # Global user permissions |  | ||||||
|         User = apps.get_model("authentik_core", "User") |  | ||||||
|         User.user_permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Global role permissions |  | ||||||
|         DjangoGroup = apps.get_model("auth", "Group") |  | ||||||
|         DjangoGroup.permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Object user permissions |  | ||||||
|         UserObjectPermission = apps.get_model("guardian", "UserObjectPermission") |  | ||||||
|         UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # Object role permissions |  | ||||||
|         GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission") |  | ||||||
|         GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def remove_old_authenticated_session_content_type( |  | ||||||
|     apps: Apps, schema_editor: BaseDatabaseSchemaEditor |  | ||||||
| ): |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|     ContentType = apps.get_model("contenttypes", "ContentType") |  | ||||||
|  |  | ||||||
|     ContentType.objects.using(db_alias).filter(model="oldauthenticatedsession").delete() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0047_delete_oldauthenticatedsession"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=migrate_authenticated_session_permissions, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=remove_old_authenticated_session_content_type, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -16,14 +16,12 @@ | |||||||
|         {% block head_before %} |         {% block head_before %} | ||||||
|         {% endblock %} |         {% endblock %} | ||||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> |         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> | ||||||
|         <style>{{ brand_css }}</style> |         <style>{{ brand.branding_custom_css }}</style> | ||||||
|         <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> |         <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> | ||||||
|         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> |         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> | ||||||
|         {% block head %} |         {% block head %} | ||||||
|         {% endblock %} |         {% endblock %} | ||||||
|         {% for key, value in html_meta.items %} |         <meta name="sentry-trace" content="{{ sentry_trace }}" /> | ||||||
|         <meta name="{{key}}" content="{{ value }}" /> |  | ||||||
|         {% endfor %} |  | ||||||
|     </head> |     </head> | ||||||
|     <body> |     <body> | ||||||
|         {% block body %} |         {% block body %} | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ | |||||||
| {% endblock %} | {% endblock %} | ||||||
|  |  | ||||||
| {% block body %} | {% block body %} | ||||||
| <ak-message-container alignment="bottom"></ak-message-container> | <ak-message-container></ak-message-container> | ||||||
| <ak-interface-admin> | <ak-interface-admin> | ||||||
|     <ak-loading></ak-loading> |     <ak-loading></ak-loading> | ||||||
| </ak-interface-admin> | </ak-interface-admin> | ||||||
|  | |||||||
| @ -124,16 +124,6 @@ class TestGroupsAPI(APITestCase): | |||||||
|             {"is_superuser": ["User does not have permission to set superuser status to True."]}, |             {"is_superuser": ["User does not have permission to set superuser status to True."]}, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_superuser_no_perm_no_superuser(self): |  | ||||||
|         """Test creating a group without permission and without superuser flag""" |  | ||||||
|         assign_perm("authentik_core.add_group", self.login_user) |  | ||||||
|         self.client.force_login(self.login_user) |  | ||||||
|         res = self.client.post( |  | ||||||
|             reverse("authentik_api:group-list"), |  | ||||||
|             data={"name": generate_id(), "is_superuser": False}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 201) |  | ||||||
|  |  | ||||||
|     def test_superuser_update_no_perm(self): |     def test_superuser_update_no_perm(self): | ||||||
|         """Test updating a superuser group without permission""" |         """Test updating a superuser group without permission""" | ||||||
|         group = Group.objects.create(name=generate_id(), is_superuser=True) |         group = Group.objects.create(name=generate_id(), is_superuser=True) | ||||||
|  | |||||||
| @ -13,10 +13,7 @@ from authentik.core.models import ( | |||||||
|     TokenIntents, |     TokenIntents, | ||||||
|     User, |     User, | ||||||
| ) | ) | ||||||
| from authentik.core.tasks import ( | from authentik.core.tasks import clean_expired_models, clean_temporary_users | ||||||
|     clean_expired_models, |  | ||||||
|     clean_temporary_users, |  | ||||||
| ) |  | ||||||
| from authentik.core.tests.utils import create_test_admin_user | from authentik.core.tests.utils import create_test_admin_user | ||||||
| from authentik.lib.generators import generate_id | from authentik.lib.generators import generate_id | ||||||
|  |  | ||||||
|  | |||||||
| @ -81,6 +81,22 @@ class TestUsersAPI(APITestCase): | |||||||
|         response = self.client.get(reverse("authentik_api:user-list"), {"include_groups": "true"}) |         response = self.client.get(reverse("authentik_api:user-list"), {"include_groups": "true"}) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|  |  | ||||||
|  |     def test_metrics(self): | ||||||
|  |         """Test user's metrics""" | ||||||
|  |         self.client.force_login(self.admin) | ||||||
|  |         response = self.client.get( | ||||||
|  |             reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk}) | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(response.status_code, 200) | ||||||
|  |  | ||||||
|  |     def test_metrics_denied(self): | ||||||
|  |         """Test user's metrics (non-superuser)""" | ||||||
|  |         self.client.force_login(self.user) | ||||||
|  |         response = self.client.get( | ||||||
|  |             reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk}) | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(response.status_code, 403) | ||||||
|  |  | ||||||
|     def test_recovery_no_flow(self): |     def test_recovery_no_flow(self): | ||||||
|         """Test user recovery link (no recovery flow set)""" |         """Test user recovery link (no recovery flow set)""" | ||||||
|         self.client.force_login(self.admin) |         self.client.force_login(self.admin) | ||||||
|  | |||||||
| @ -30,7 +30,6 @@ from structlog.stdlib import get_logger | |||||||
|  |  | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||||
| from authentik.core.models import UserTypes |  | ||||||
| from authentik.crypto.apps import MANAGED_KEY | from authentik.crypto.apps import MANAGED_KEY | ||||||
| from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | ||||||
| from authentik.crypto.models import CertificateKeyPair | from authentik.crypto.models import CertificateKeyPair | ||||||
| @ -273,7 +272,6 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def view_certificate(self, request: Request, pk: str) -> Response: |     def view_certificate(self, request: Request, pk: str) -> Response: | ||||||
|         """Return certificate-key pairs certificate and log access""" |         """Return certificate-key pairs certificate and log access""" | ||||||
|         certificate: CertificateKeyPair = self.get_object() |         certificate: CertificateKeyPair = self.get_object() | ||||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: |  | ||||||
|         Event.new(  # noqa # nosec |         Event.new(  # noqa # nosec | ||||||
|             EventAction.SECRET_VIEW, |             EventAction.SECRET_VIEW, | ||||||
|             secret=certificate, |             secret=certificate, | ||||||
| @ -304,7 +302,6 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def view_private_key(self, request: Request, pk: str) -> Response: |     def view_private_key(self, request: Request, pk: str) -> Response: | ||||||
|         """Return certificate-key pairs private key and log access""" |         """Return certificate-key pairs private key and log access""" | ||||||
|         certificate: CertificateKeyPair = self.get_object() |         certificate: CertificateKeyPair = self.get_object() | ||||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: |  | ||||||
|         Event.new(  # noqa # nosec |         Event.new(  # noqa # nosec | ||||||
|             EventAction.SECRET_VIEW, |             EventAction.SECRET_VIEW, | ||||||
|             secret=certificate, |             secret=certificate, | ||||||
|  | |||||||
| @ -132,14 +132,13 @@ class LicenseKey: | |||||||
|         """Get a summarized version of all (not expired) licenses""" |         """Get a summarized version of all (not expired) licenses""" | ||||||
|         total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0) |         total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0) | ||||||
|         for lic in License.objects.all(): |         for lic in License.objects.all(): | ||||||
|             if lic.is_valid: |  | ||||||
|             total.internal_users += lic.internal_users |             total.internal_users += lic.internal_users | ||||||
|             total.external_users += lic.external_users |             total.external_users += lic.external_users | ||||||
|                 total.license_flags.extend(lic.status.license_flags) |  | ||||||
|             exp_ts = int(mktime(lic.expiry.timetuple())) |             exp_ts = int(mktime(lic.expiry.timetuple())) | ||||||
|             if total.exp == 0: |             if total.exp == 0: | ||||||
|                 total.exp = exp_ts |                 total.exp = exp_ts | ||||||
|             total.exp = max(total.exp, exp_ts) |             total.exp = max(total.exp, exp_ts) | ||||||
|  |             total.license_flags.extend(lic.status.license_flags) | ||||||
|         return total |         return total | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|  | |||||||
| @ -39,10 +39,6 @@ class License(SerializerModel): | |||||||
|     internal_users = models.BigIntegerField() |     internal_users = models.BigIntegerField() | ||||||
|     external_users = models.BigIntegerField() |     external_users = models.BigIntegerField() | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_valid(self) -> bool: |  | ||||||
|         return self.expiry >= now() |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> type[BaseSerializer]: |     def serializer(self) -> type[BaseSerializer]: | ||||||
|         from authentik.enterprise.api import LicenseSerializer |         from authentik.enterprise.api import LicenseSerializer | ||||||
|  | |||||||
| @ -1,27 +0,0 @@ | |||||||
| from rest_framework.viewsets import ModelViewSet |  | ||||||
|  |  | ||||||
| from authentik.core.api.used_by import UsedByMixin |  | ||||||
| from authentik.enterprise.api import EnterpriseRequiredMixin |  | ||||||
| from authentik.enterprise.policies.unique_password.models import UniquePasswordPolicy |  | ||||||
| from authentik.policies.api.policies import PolicySerializer |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UniquePasswordPolicySerializer(EnterpriseRequiredMixin, PolicySerializer): |  | ||||||
|     """Password Uniqueness Policy Serializer""" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         model = UniquePasswordPolicy |  | ||||||
|         fields = PolicySerializer.Meta.fields + [ |  | ||||||
|             "password_field", |  | ||||||
|             "num_historical_passwords", |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UniquePasswordPolicyViewSet(UsedByMixin, ModelViewSet): |  | ||||||
|     """Password Uniqueness Policy Viewset""" |  | ||||||
|  |  | ||||||
|     queryset = UniquePasswordPolicy.objects.all() |  | ||||||
|     serializer_class = UniquePasswordPolicySerializer |  | ||||||
|     filterset_fields = "__all__" |  | ||||||
|     ordering = ["name"] |  | ||||||
|     search_fields = ["name"] |  | ||||||
| @ -1,10 +0,0 @@ | |||||||
| """authentik Unique Password policy app config""" |  | ||||||
|  |  | ||||||
| from authentik.enterprise.apps import EnterpriseConfig |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikEnterprisePoliciesUniquePasswordConfig(EnterpriseConfig): |  | ||||||
|     name = "authentik.enterprise.policies.unique_password" |  | ||||||
|     label = "authentik_policies_unique_password" |  | ||||||
|     verbose_name = "authentik Enterprise.Policies.Unique Password" |  | ||||||
|     default = True |  | ||||||
| @ -1,81 +0,0 @@ | |||||||
| # Generated by Django 5.0.13 on 2025-03-26 23:02 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.conf import settings |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     initial = True |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_policies", "0011_policybinding_failure_result_and_more"), |  | ||||||
|         migrations.swappable_dependency(settings.AUTH_USER_MODEL), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="UniquePasswordPolicy", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "policy_ptr", |  | ||||||
|                     models.OneToOneField( |  | ||||||
|                         auto_created=True, |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         parent_link=True, |  | ||||||
|                         primary_key=True, |  | ||||||
|                         serialize=False, |  | ||||||
|                         to="authentik_policies.policy", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "password_field", |  | ||||||
|                     models.TextField( |  | ||||||
|                         default="password", |  | ||||||
|                         help_text="Field key to check, field keys defined in Prompt stages are available.", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "num_historical_passwords", |  | ||||||
|                     models.PositiveIntegerField( |  | ||||||
|                         default=1, help_text="Number of passwords to check against." |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "verbose_name": "Password Uniqueness Policy", |  | ||||||
|                 "verbose_name_plural": "Password Uniqueness Policies", |  | ||||||
|                 "indexes": [ |  | ||||||
|                     models.Index(fields=["policy_ptr_id"], name="authentik_p_policy__f559aa_idx") |  | ||||||
|                 ], |  | ||||||
|             }, |  | ||||||
|             bases=("authentik_policies.policy",), |  | ||||||
|         ), |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="UserPasswordHistory", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "id", |  | ||||||
|                     models.AutoField( |  | ||||||
|                         auto_created=True, primary_key=True, serialize=False, verbose_name="ID" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("old_password", models.CharField(max_length=128)), |  | ||||||
|                 ("created_at", models.DateTimeField(auto_now_add=True)), |  | ||||||
|                 ("hibp_prefix_sha1", models.CharField(max_length=5)), |  | ||||||
|                 ("hibp_pw_hash", models.TextField()), |  | ||||||
|                 ( |  | ||||||
|                     "user", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         related_name="old_passwords", |  | ||||||
|                         to=settings.AUTH_USER_MODEL, |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "verbose_name": "User Password History", |  | ||||||
|             }, |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,151 +0,0 @@ | |||||||
| from hashlib import sha1 |  | ||||||
|  |  | ||||||
| from django.contrib.auth.hashers import identify_hasher, make_password |  | ||||||
| from django.db import models |  | ||||||
| from django.utils.translation import gettext as _ |  | ||||||
| from rest_framework.serializers import BaseSerializer |  | ||||||
| from structlog.stdlib import get_logger |  | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.policies.models import Policy |  | ||||||
| from authentik.policies.types import PolicyRequest, PolicyResult |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
| LOGGER = get_logger() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UniquePasswordPolicy(Policy): |  | ||||||
|     """This policy prevents users from reusing old passwords.""" |  | ||||||
|  |  | ||||||
|     password_field = models.TextField( |  | ||||||
|         default="password", |  | ||||||
|         help_text=_("Field key to check, field keys defined in Prompt stages are available."), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     # Limit on the number of previous passwords the policy evaluates |  | ||||||
|     # Also controls number of old passwords the system stores. |  | ||||||
|     num_historical_passwords = models.PositiveIntegerField( |  | ||||||
|         default=1, |  | ||||||
|         help_text=_("Number of passwords to check against."), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def serializer(self) -> type[BaseSerializer]: |  | ||||||
|         from authentik.enterprise.policies.unique_password.api import UniquePasswordPolicySerializer |  | ||||||
|  |  | ||||||
|         return UniquePasswordPolicySerializer |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def component(self) -> str: |  | ||||||
|         return "ak-policy-password-uniqueness-form" |  | ||||||
|  |  | ||||||
|     def passes(self, request: PolicyRequest) -> PolicyResult: |  | ||||||
|         from authentik.enterprise.policies.unique_password.models import UserPasswordHistory |  | ||||||
|  |  | ||||||
|         password = request.context.get(PLAN_CONTEXT_PROMPT, {}).get( |  | ||||||
|             self.password_field, request.context.get(self.password_field) |  | ||||||
|         ) |  | ||||||
|         if not password: |  | ||||||
|             LOGGER.warning( |  | ||||||
|                 "Password field not found in request when checking UniquePasswordPolicy", |  | ||||||
|                 field=self.password_field, |  | ||||||
|                 fields=request.context.keys(), |  | ||||||
|             ) |  | ||||||
|             return PolicyResult(False, _("Password not set in context")) |  | ||||||
|         password = str(password) |  | ||||||
|  |  | ||||||
|         if not self.num_historical_passwords: |  | ||||||
|             # Policy not configured to check against any passwords |  | ||||||
|             return PolicyResult(True) |  | ||||||
|  |  | ||||||
|         num_to_check = self.num_historical_passwords |  | ||||||
|         password_history = UserPasswordHistory.objects.filter(user=request.user).order_by( |  | ||||||
|             "-created_at" |  | ||||||
|         )[:num_to_check] |  | ||||||
|  |  | ||||||
|         if not password_history: |  | ||||||
|             return PolicyResult(True) |  | ||||||
|  |  | ||||||
|         for record in password_history: |  | ||||||
|             if not record.old_password: |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             if self._passwords_match(new_password=password, old_password=record.old_password): |  | ||||||
|                 # Return on first match. Authentik does not consider timing attacks |  | ||||||
|                 # on old passwords to be an attack surface. |  | ||||||
|                 return PolicyResult( |  | ||||||
|                     False, |  | ||||||
|                     _("This password has been used previously. Please choose a different one."), |  | ||||||
|                 ) |  | ||||||
|  |  | ||||||
|         return PolicyResult(True) |  | ||||||
|  |  | ||||||
|     def _passwords_match(self, *, new_password: str, old_password: str) -> bool: |  | ||||||
|         try: |  | ||||||
|             hasher = identify_hasher(old_password) |  | ||||||
|         except ValueError: |  | ||||||
|             LOGGER.warning( |  | ||||||
|                 "Skipping password; could not load hash algorithm", |  | ||||||
|             ) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return hasher.verify(new_password, old_password) |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def is_in_use(cls): |  | ||||||
|         """Check if any UniquePasswordPolicy is in use, either through policy bindings |  | ||||||
|         or direct attachment to a PromptStage. |  | ||||||
|  |  | ||||||
|         Returns: |  | ||||||
|             bool: True if any policy is in use, False otherwise |  | ||||||
|         """ |  | ||||||
|         from authentik.policies.models import PolicyBinding |  | ||||||
|  |  | ||||||
|         # Check if any policy is in use through bindings |  | ||||||
|         if PolicyBinding.in_use.for_policy(cls).exists(): |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         # Check if any policy is attached to a PromptStage |  | ||||||
|         if cls.objects.filter(promptstage__isnull=False).exists(): |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     class Meta(Policy.PolicyMeta): |  | ||||||
|         verbose_name = _("Password Uniqueness Policy") |  | ||||||
|         verbose_name_plural = _("Password Uniqueness Policies") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserPasswordHistory(models.Model): |  | ||||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="old_passwords") |  | ||||||
|     # Mimic's column type of AbstractBaseUser.password |  | ||||||
|     old_password = models.CharField(max_length=128) |  | ||||||
|     created_at = models.DateTimeField(auto_now_add=True) |  | ||||||
|  |  | ||||||
|     hibp_prefix_sha1 = models.CharField(max_length=5) |  | ||||||
|     hibp_pw_hash = models.TextField() |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _("User Password History") |  | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |  | ||||||
|         timestamp = f"{self.created_at:%Y/%m/%d %X}" if self.created_at else "N/A" |  | ||||||
|         return f"Previous Password (user: {self.user_id}, recorded: {timestamp})" |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def create_for_user(cls, user: User, password: str): |  | ||||||
|         # To check users' passwords against Have I been Pwned, we need the first 5 chars |  | ||||||
|         # of the password hashed with SHA1 without a salt... |  | ||||||
|         pw_hash_sha1 = sha1(password.encode("utf-8")).hexdigest()  # nosec |  | ||||||
|         # ...however that'll give us a list of hashes from HIBP, and to compare that we still |  | ||||||
|         # need a full unsalted SHA1 of the password. We don't want to save that directly in |  | ||||||
|         # the database, so we hash that SHA1 again with a modern hashing alg, |  | ||||||
|         # and then when we check users' passwords against HIBP we can use `check_password` |  | ||||||
|         # which will take care of this. |  | ||||||
|         hibp_hash_hash = make_password(pw_hash_sha1) |  | ||||||
|         return cls.objects.create( |  | ||||||
|             user=user, |  | ||||||
|             old_password=password, |  | ||||||
|             hibp_prefix_sha1=pw_hash_sha1[:5], |  | ||||||
|             hibp_pw_hash=hibp_hash_hash, |  | ||||||
|         ) |  | ||||||
| @ -1,20 +0,0 @@ | |||||||
| """Unique Password Policy settings""" |  | ||||||
|  |  | ||||||
| from celery.schedules import crontab |  | ||||||
|  |  | ||||||
| from authentik.lib.utils.time import fqdn_rand |  | ||||||
|  |  | ||||||
| CELERY_BEAT_SCHEDULE = { |  | ||||||
|     "policies_unique_password_trim_history": { |  | ||||||
|         "task": "authentik.enterprise.policies.unique_password.tasks.trim_password_histories", |  | ||||||
|         "schedule": crontab(minute=fqdn_rand("policies_unique_password_trim"), hour="*/12"), |  | ||||||
|         "options": {"queue": "authentik_scheduled"}, |  | ||||||
|     }, |  | ||||||
|     "policies_unique_password_check_purge": { |  | ||||||
|         "task": ( |  | ||||||
|             "authentik.enterprise.policies.unique_password.tasks.check_and_purge_password_history" |  | ||||||
|         ), |  | ||||||
|         "schedule": crontab(minute=fqdn_rand("policies_unique_password_purge"), hour="*/24"), |  | ||||||
|         "options": {"queue": "authentik_scheduled"}, |  | ||||||
|     }, |  | ||||||
| } |  | ||||||
| @ -1,23 +0,0 @@ | |||||||
| """authentik policy signals""" |  | ||||||
|  |  | ||||||
| from django.dispatch import receiver |  | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.core.signals import password_changed |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @receiver(password_changed) |  | ||||||
| def copy_password_to_password_history(sender, user: User, *args, **kwargs): |  | ||||||
|     """Preserve the user's old password if UniquePasswordPolicy is enabled anywhere""" |  | ||||||
|     # Check if any UniquePasswordPolicy is in use |  | ||||||
|     unique_pwd_policy_in_use = UniquePasswordPolicy.is_in_use() |  | ||||||
|  |  | ||||||
|     if unique_pwd_policy_in_use: |  | ||||||
|         """NOTE: Because we run this in a signal after saving the user, |  | ||||||
|         we are not atomically guaranteed to save password history. |  | ||||||
|         """ |  | ||||||
|         UserPasswordHistory.create_for_user(user, user.password) |  | ||||||
| @ -1,66 +0,0 @@ | |||||||
| from django.db.models.aggregates import Count |  | ||||||
| from structlog import get_logger |  | ||||||
|  |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task |  | ||||||
| from authentik.root.celery import CELERY_APP |  | ||||||
|  |  | ||||||
| LOGGER = get_logger() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task(bind=True, base=SystemTask) |  | ||||||
| @prefill_task |  | ||||||
| def check_and_purge_password_history(self: SystemTask): |  | ||||||
|     """Check if any UniquePasswordPolicy exists, and if not, purge the password history table. |  | ||||||
|     This is run on a schedule instead of being triggered by policy binding deletion. |  | ||||||
|     """ |  | ||||||
|     if not UniquePasswordPolicy.objects.exists(): |  | ||||||
|         UserPasswordHistory.objects.all().delete() |  | ||||||
|         LOGGER.debug("Purged UserPasswordHistory table as no policies are in use") |  | ||||||
|         self.set_status(TaskStatus.SUCCESSFUL, "Successfully purged UserPasswordHistory") |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     self.set_status( |  | ||||||
|         TaskStatus.SUCCESSFUL, "Not purging password histories, a unique password policy exists" |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task(bind=True, base=SystemTask) |  | ||||||
| def trim_password_histories(self: SystemTask): |  | ||||||
|     """Removes rows from UserPasswordHistory older than |  | ||||||
|     the `n` most recent entries. |  | ||||||
|  |  | ||||||
|     The `n` is defined by the largest configured value for all bound |  | ||||||
|     UniquePasswordPolicy policies. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     # No policy, we'll let the cleanup above do its thing |  | ||||||
|     if not UniquePasswordPolicy.objects.exists(): |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     num_rows_to_preserve = 0 |  | ||||||
|     for policy in UniquePasswordPolicy.objects.all(): |  | ||||||
|         num_rows_to_preserve = max(num_rows_to_preserve, policy.num_historical_passwords) |  | ||||||
|  |  | ||||||
|     all_pks_to_keep = [] |  | ||||||
|  |  | ||||||
|     # Get all users who have password history entries |  | ||||||
|     users_with_history = ( |  | ||||||
|         UserPasswordHistory.objects.values("user") |  | ||||||
|         .annotate(count=Count("user")) |  | ||||||
|         .filter(count__gt=0) |  | ||||||
|         .values_list("user", flat=True) |  | ||||||
|     ) |  | ||||||
|     for user_pk in users_with_history: |  | ||||||
|         entries = UserPasswordHistory.objects.filter(user__pk=user_pk) |  | ||||||
|         pks_to_keep = entries.order_by("-created_at")[:num_rows_to_preserve].values_list( |  | ||||||
|             "pk", flat=True |  | ||||||
|         ) |  | ||||||
|         all_pks_to_keep.extend(pks_to_keep) |  | ||||||
|  |  | ||||||
|     num_deleted, _ = UserPasswordHistory.objects.exclude(pk__in=all_pks_to_keep).delete() |  | ||||||
|     LOGGER.debug("Deleted stale password history records", count=num_deleted) |  | ||||||
|     self.set_status(TaskStatus.SUCCESSFUL, f"Delete {num_deleted} stale password history records") |  | ||||||
| @ -1,108 +0,0 @@ | |||||||
| """Unique Password Policy flow tests""" |  | ||||||
|  |  | ||||||
| from django.contrib.auth.hashers import make_password |  | ||||||
| from django.urls.base import reverse |  | ||||||
|  |  | ||||||
| from authentik.core.tests.utils import create_test_flow, create_test_user |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
| from authentik.flows.models import FlowDesignation, FlowStageBinding |  | ||||||
| from authentik.flows.tests import FlowTestCase |  | ||||||
| from authentik.lib.generators import generate_id |  | ||||||
| from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestUniquePasswordPolicyFlow(FlowTestCase): |  | ||||||
|     """Test Unique Password Policy in a flow""" |  | ||||||
|  |  | ||||||
|     REUSED_PASSWORD = "hunter1"  # nosec B105 |  | ||||||
|  |  | ||||||
|     def setUp(self) -> None: |  | ||||||
|         self.user = create_test_user() |  | ||||||
|         self.flow = create_test_flow(FlowDesignation.AUTHENTICATION) |  | ||||||
|  |  | ||||||
|         password_prompt = Prompt.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|             field_key="password", |  | ||||||
|             label="PASSWORD_LABEL", |  | ||||||
|             type=FieldTypes.PASSWORD, |  | ||||||
|             required=True, |  | ||||||
|             placeholder="PASSWORD_PLACEHOLDER", |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         self.policy = UniquePasswordPolicy.objects.create( |  | ||||||
|             name="password_must_unique", |  | ||||||
|             password_field=password_prompt.field_key, |  | ||||||
|             num_historical_passwords=1, |  | ||||||
|         ) |  | ||||||
|         stage = PromptStage.objects.create(name="prompt-stage") |  | ||||||
|         stage.validation_policies.set([self.policy]) |  | ||||||
|         stage.fields.set( |  | ||||||
|             [ |  | ||||||
|                 password_prompt, |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|         FlowStageBinding.objects.create(target=self.flow, stage=stage, order=2) |  | ||||||
|  |  | ||||||
|         # Seed the user's password history |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, make_password(self.REUSED_PASSWORD)) |  | ||||||
|  |  | ||||||
|     def test_prompt_data(self): |  | ||||||
|         """Test policy attached to a prompt stage""" |  | ||||||
|         # Test the policy directly |  | ||||||
|         from authentik.policies.types import PolicyRequest |  | ||||||
|         from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
|         # Create a policy request with the reused password |  | ||||||
|         request = PolicyRequest(user=self.user) |  | ||||||
|         request.context[PLAN_CONTEXT_PROMPT] = {"password": self.REUSED_PASSWORD} |  | ||||||
|  |  | ||||||
|         # Test the policy directly |  | ||||||
|         result = self.policy.passes(request) |  | ||||||
|  |  | ||||||
|         # Verify that the policy fails (returns False) with the expected error message |  | ||||||
|         self.assertFalse(result.passing, "Policy should fail for reused password") |  | ||||||
|         self.assertEqual( |  | ||||||
|             result.messages[0], |  | ||||||
|             "This password has been used previously. Please choose a different one.", |  | ||||||
|             "Incorrect error message", |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # API-based testing approach: |  | ||||||
|  |  | ||||||
|         self.client.force_login(self.user) |  | ||||||
|  |  | ||||||
|         # Send a POST request to the flow executor with the reused password |  | ||||||
|         response = self.client.post( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|             {"password": self.REUSED_PASSWORD}, |  | ||||||
|         ) |  | ||||||
|         self.assertStageResponse( |  | ||||||
|             response, |  | ||||||
|             self.flow, |  | ||||||
|             component="ak-stage-prompt", |  | ||||||
|             fields=[ |  | ||||||
|                 { |  | ||||||
|                     "choices": None, |  | ||||||
|                     "field_key": "password", |  | ||||||
|                     "label": "PASSWORD_LABEL", |  | ||||||
|                     "order": 0, |  | ||||||
|                     "placeholder": "PASSWORD_PLACEHOLDER", |  | ||||||
|                     "initial_value": "", |  | ||||||
|                     "required": True, |  | ||||||
|                     "type": "password", |  | ||||||
|                     "sub_text": "", |  | ||||||
|                 } |  | ||||||
|             ], |  | ||||||
|             response_errors={ |  | ||||||
|                 "non_field_errors": [ |  | ||||||
|                     { |  | ||||||
|                         "code": "invalid", |  | ||||||
|                         "string": "This password has been used previously. " |  | ||||||
|                         "Please choose a different one.", |  | ||||||
|                     } |  | ||||||
|                 ] |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
| @ -1,77 +0,0 @@ | |||||||
| """Unique Password Policy tests""" |  | ||||||
|  |  | ||||||
| from django.contrib.auth.hashers import make_password |  | ||||||
| from django.test import TestCase |  | ||||||
| from guardian.shortcuts import get_anonymous_user |  | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
| from authentik.policies.types import PolicyRequest, PolicyResult |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestUniquePasswordPolicy(TestCase): |  | ||||||
|     """Test Password Uniqueness Policy""" |  | ||||||
|  |  | ||||||
|     def setUp(self) -> None: |  | ||||||
|         self.policy = UniquePasswordPolicy.objects.create( |  | ||||||
|             name="test_unique_password", num_historical_passwords=1 |  | ||||||
|         ) |  | ||||||
|         self.user = User.objects.create(username="test-user") |  | ||||||
|  |  | ||||||
|     def test_invalid(self): |  | ||||||
|         """Test without password present in request""" |  | ||||||
|         request = PolicyRequest(get_anonymous_user()) |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertFalse(result.passing) |  | ||||||
|         self.assertEqual(result.messages[0], "Password not set in context") |  | ||||||
|  |  | ||||||
|     def test_passes_no_previous_passwords(self): |  | ||||||
|         request = PolicyRequest(get_anonymous_user()) |  | ||||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}} |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertTrue(result.passing) |  | ||||||
|  |  | ||||||
|     def test_passes_passwords_are_different(self): |  | ||||||
|         # Seed database with an old password |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, make_password("hunter1")) |  | ||||||
|  |  | ||||||
|         request = PolicyRequest(self.user) |  | ||||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}} |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertTrue(result.passing) |  | ||||||
|  |  | ||||||
|     def test_passes_multiple_old_passwords(self): |  | ||||||
|         # Seed with multiple old passwords |  | ||||||
|         UserPasswordHistory.objects.bulk_create( |  | ||||||
|             [ |  | ||||||
|                 UserPasswordHistory(user=self.user, old_password=make_password("hunter1")), |  | ||||||
|                 UserPasswordHistory(user=self.user, old_password=make_password("hunter2")), |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|         request = PolicyRequest(self.user) |  | ||||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter3"}} |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertTrue(result.passing) |  | ||||||
|  |  | ||||||
|     def test_fails_password_matches_old_password(self): |  | ||||||
|         # Seed database with an old password |  | ||||||
|  |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, make_password("hunter1")) |  | ||||||
|  |  | ||||||
|         request = PolicyRequest(self.user) |  | ||||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter1"}} |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertFalse(result.passing) |  | ||||||
|  |  | ||||||
|     def test_fails_if_identical_password_with_different_hash_algos(self): |  | ||||||
|         UserPasswordHistory.create_for_user( |  | ||||||
|             self.user, make_password("hunter2", "somesalt", "scrypt") |  | ||||||
|         ) |  | ||||||
|         request = PolicyRequest(self.user) |  | ||||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}} |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertFalse(result.passing) |  | ||||||
| @ -1,90 +0,0 @@ | |||||||
| from django.urls import reverse |  | ||||||
|  |  | ||||||
| from authentik.core.models import Group, Source, User |  | ||||||
| from authentik.core.tests.utils import create_test_flow, create_test_user |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
| from authentik.flows.markers import StageMarker |  | ||||||
| from authentik.flows.models import FlowStageBinding |  | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan |  | ||||||
| from authentik.flows.tests import FlowTestCase |  | ||||||
| from authentik.flows.views.executor import SESSION_KEY_PLAN |  | ||||||
| from authentik.lib.generators import generate_key |  | ||||||
| from authentik.policies.models import PolicyBinding, PolicyBindingModel |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
| from authentik.stages.user_write.models import UserWriteStage |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestUserWriteStage(FlowTestCase): |  | ||||||
|     """Write tests""" |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         super().setUp() |  | ||||||
|         self.flow = create_test_flow() |  | ||||||
|         self.group = Group.objects.create(name="test-group") |  | ||||||
|         self.other_group = Group.objects.create(name="other-group") |  | ||||||
|         self.stage: UserWriteStage = UserWriteStage.objects.create( |  | ||||||
|             name="write", create_users_as_inactive=True, create_users_group=self.group |  | ||||||
|         ) |  | ||||||
|         self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2) |  | ||||||
|         self.source = Source.objects.create(name="fake_source") |  | ||||||
|  |  | ||||||
|     def test_save_password_history_if_policy_binding_enforced(self): |  | ||||||
|         """Test user's new password is recorded when ANY enabled UniquePasswordPolicy exists""" |  | ||||||
|         unique_password_policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) |  | ||||||
|         pbm = PolicyBindingModel.objects.create() |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=pbm, policy=unique_password_policy, order=0, enabled=True |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         test_user = create_test_user() |  | ||||||
|         # Store original password for verification |  | ||||||
|         original_password = test_user.password |  | ||||||
|  |  | ||||||
|         # We're changing our own password |  | ||||||
|         self.client.force_login(test_user) |  | ||||||
|  |  | ||||||
|         new_password = generate_key() |  | ||||||
|         plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]) |  | ||||||
|         plan.context[PLAN_CONTEXT_PENDING_USER] = test_user |  | ||||||
|         plan.context[PLAN_CONTEXT_PROMPT] = { |  | ||||||
|             "username": test_user.username, |  | ||||||
|             "password": new_password, |  | ||||||
|         } |  | ||||||
|         session = self.client.session |  | ||||||
|         session[SESSION_KEY_PLAN] = plan |  | ||||||
|         session.save() |  | ||||||
|         # Password history should be recorded |  | ||||||
|         user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user) |  | ||||||
|         self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded") |  | ||||||
|         self.assertEqual(len(user_password_history_qs), 1, "expected 1 recorded password") |  | ||||||
|  |  | ||||||
|         # Create a password history entry manually to simulate the signal behavior |  | ||||||
|         # This is what would happen if the signal worked correctly |  | ||||||
|         UserPasswordHistory.objects.create(user=test_user, old_password=original_password) |  | ||||||
|         user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user) |  | ||||||
|         self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded") |  | ||||||
|         self.assertEqual(len(user_password_history_qs), 2, "expected 2 recorded password") |  | ||||||
|  |  | ||||||
|         # Execute the flow by sending a POST request to the flow executor endpoint |  | ||||||
|         response = self.client.post( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # Verify that the request was successful |  | ||||||
|         self.assertEqual(response.status_code, 200) |  | ||||||
|         user_qs = User.objects.filter(username=plan.context[PLAN_CONTEXT_PROMPT]["username"]) |  | ||||||
|         self.assertTrue(user_qs.exists()) |  | ||||||
|  |  | ||||||
|         # Verify the password history entry exists |  | ||||||
|         user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user) |  | ||||||
|         self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded") |  | ||||||
|  |  | ||||||
|         self.assertEqual(len(user_password_history_qs), 3, "expected 3 recorded password") |  | ||||||
|         # Verify that one of the entries contains the original password |  | ||||||
|         self.assertTrue( |  | ||||||
|             any(entry.old_password == original_password for entry in user_password_history_qs), |  | ||||||
|             "original password should be in password history table", |  | ||||||
|         ) |  | ||||||
| @ -1,178 +0,0 @@ | |||||||
| from datetime import datetime, timedelta |  | ||||||
|  |  | ||||||
| from django.test import TestCase |  | ||||||
|  |  | ||||||
| from authentik.core.tests.utils import create_test_user |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
| from authentik.enterprise.policies.unique_password.tasks import ( |  | ||||||
|     check_and_purge_password_history, |  | ||||||
|     trim_password_histories, |  | ||||||
| ) |  | ||||||
| from authentik.policies.models import PolicyBinding, PolicyBindingModel |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestUniquePasswordPolicyModel(TestCase): |  | ||||||
|     """Test the UniquePasswordPolicy model methods""" |  | ||||||
|  |  | ||||||
|     def test_is_in_use_with_binding(self): |  | ||||||
|         """Test is_in_use returns True when a policy binding exists""" |  | ||||||
|         # Create a UniquePasswordPolicy and a PolicyBinding for it |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) |  | ||||||
|         pbm = PolicyBindingModel.objects.create() |  | ||||||
|         PolicyBinding.objects.create(target=pbm, policy=policy, order=0, enabled=True) |  | ||||||
|  |  | ||||||
|         # Verify is_in_use returns True |  | ||||||
|         self.assertTrue(UniquePasswordPolicy.is_in_use()) |  | ||||||
|  |  | ||||||
|     def test_is_in_use_with_promptstage(self): |  | ||||||
|         """Test is_in_use returns True when attached to a PromptStage""" |  | ||||||
|         from authentik.stages.prompt.models import PromptStage |  | ||||||
|  |  | ||||||
|         # Create a UniquePasswordPolicy and attach it to a PromptStage |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) |  | ||||||
|         prompt_stage = PromptStage.objects.create( |  | ||||||
|             name="Test Prompt Stage", |  | ||||||
|         ) |  | ||||||
|         # Use the set() method for many-to-many relationships |  | ||||||
|         prompt_stage.validation_policies.set([policy]) |  | ||||||
|  |  | ||||||
|         # Verify is_in_use returns True |  | ||||||
|         self.assertTrue(UniquePasswordPolicy.is_in_use()) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestTrimAllPasswordHistories(TestCase): |  | ||||||
|     """Test the task that trims password history for all users""" |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         self.user1 = create_test_user("test-user1") |  | ||||||
|         self.user2 = create_test_user("test-user2") |  | ||||||
|         self.pbm = PolicyBindingModel.objects.create() |  | ||||||
|         # Create a policy with a limit of 1 password |  | ||||||
|         self.policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1) |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=self.pbm, |  | ||||||
|             policy=self.policy, |  | ||||||
|             enabled=True, |  | ||||||
|             order=0, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestCheckAndPurgePasswordHistory(TestCase): |  | ||||||
|     """Test the scheduled task that checks if any policy is in use and purges if not""" |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         self.user = create_test_user("test-user") |  | ||||||
|         self.pbm = PolicyBindingModel.objects.create() |  | ||||||
|  |  | ||||||
|     def test_purge_when_no_policy_in_use(self): |  | ||||||
|         """Test that the task purges the table when no policy is in use""" |  | ||||||
|         # Create some password history entries |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") |  | ||||||
|  |  | ||||||
|         # Verify we have entries |  | ||||||
|         self.assertTrue(UserPasswordHistory.objects.exists()) |  | ||||||
|  |  | ||||||
|         # Run the task - should purge since no policy is in use |  | ||||||
|         check_and_purge_password_history() |  | ||||||
|  |  | ||||||
|         # Verify the table is empty |  | ||||||
|         self.assertFalse(UserPasswordHistory.objects.exists()) |  | ||||||
|  |  | ||||||
|     def test_no_purge_when_policy_in_use(self): |  | ||||||
|         """Test that the task doesn't purge when a policy is in use""" |  | ||||||
|         # Create a policy and binding |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=self.pbm, |  | ||||||
|             policy=policy, |  | ||||||
|             enabled=True, |  | ||||||
|             order=0, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # Create some password history entries |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") |  | ||||||
|  |  | ||||||
|         # Verify we have entries |  | ||||||
|         self.assertTrue(UserPasswordHistory.objects.exists()) |  | ||||||
|  |  | ||||||
|         # Run the task - should NOT purge since a policy is in use |  | ||||||
|         check_and_purge_password_history() |  | ||||||
|  |  | ||||||
|         # Verify the entries still exist |  | ||||||
|         self.assertTrue(UserPasswordHistory.objects.exists()) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestTrimPasswordHistory(TestCase): |  | ||||||
|     """Test password history cleanup task""" |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         self.user = create_test_user("test-user") |  | ||||||
|         self.pbm = PolicyBindingModel.objects.create() |  | ||||||
|  |  | ||||||
|     def test_trim_password_history_ok(self): |  | ||||||
|         """Test passwords over the define limit are deleted""" |  | ||||||
|         _now = datetime.now() |  | ||||||
|         UserPasswordHistory.objects.bulk_create( |  | ||||||
|             [ |  | ||||||
|                 UserPasswordHistory( |  | ||||||
|                     user=self.user, |  | ||||||
|                     old_password="hunter1",  # nosec B106 |  | ||||||
|                     created_at=_now - timedelta(days=3), |  | ||||||
|                 ), |  | ||||||
|                 UserPasswordHistory( |  | ||||||
|                     user=self.user, |  | ||||||
|                     old_password="hunter2",  # nosec B106 |  | ||||||
|                     created_at=_now - timedelta(days=2), |  | ||||||
|                 ), |  | ||||||
|                 UserPasswordHistory( |  | ||||||
|                     user=self.user, |  | ||||||
|                     old_password="hunter3",  # nosec B106 |  | ||||||
|                     created_at=_now, |  | ||||||
|                 ), |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1) |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=self.pbm, |  | ||||||
|             policy=policy, |  | ||||||
|             enabled=True, |  | ||||||
|             order=0, |  | ||||||
|         ) |  | ||||||
|         trim_password_histories.delay() |  | ||||||
|         user_pwd_history_qs = UserPasswordHistory.objects.filter(user=self.user) |  | ||||||
|         self.assertEqual(len(user_pwd_history_qs), 1) |  | ||||||
|  |  | ||||||
|     def test_trim_password_history_policy_diabled_no_op(self): |  | ||||||
|         """Test no passwords removed if policy binding is disabled""" |  | ||||||
|  |  | ||||||
|         # Insert a record to ensure it's not deleted after executing task |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") |  | ||||||
|  |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1) |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=self.pbm, |  | ||||||
|             policy=policy, |  | ||||||
|             enabled=False, |  | ||||||
|             order=0, |  | ||||||
|         ) |  | ||||||
|         trim_password_histories.delay() |  | ||||||
|         self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists()) |  | ||||||
|  |  | ||||||
|     def test_trim_password_history_fewer_records_than_maximum_is_no_op(self): |  | ||||||
|         """Test no passwords deleted if fewer passwords exist than limit""" |  | ||||||
|  |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") |  | ||||||
|  |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=2) |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=self.pbm, |  | ||||||
|             policy=policy, |  | ||||||
|             enabled=True, |  | ||||||
|             order=0, |  | ||||||
|         ) |  | ||||||
|         trim_password_histories.delay() |  | ||||||
|         self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists()) |  | ||||||
| @ -1,7 +0,0 @@ | |||||||
| """API URLs""" |  | ||||||
|  |  | ||||||
| from authentik.enterprise.policies.unique_password.api import UniquePasswordPolicyViewSet |  | ||||||
|  |  | ||||||
| api_urlpatterns = [ |  | ||||||
|     ("policies/unique_password", UniquePasswordPolicyViewSet), |  | ||||||
| ] |  | ||||||
| @ -25,7 +25,7 @@ class GoogleWorkspaceGroupClient( | |||||||
|     """Google client for groups""" |     """Google client for groups""" | ||||||
|  |  | ||||||
|     connection_type = GoogleWorkspaceProviderGroup |     connection_type = GoogleWorkspaceProviderGroup | ||||||
|     connection_attr = "googleworkspaceprovidergroup_set" |     connection_type_query = "group" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: |     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||||
|  | |||||||
| @ -20,7 +20,7 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP | |||||||
|     """Sync authentik users into google workspace""" |     """Sync authentik users into google workspace""" | ||||||
|  |  | ||||||
|     connection_type = GoogleWorkspaceProviderUser |     connection_type = GoogleWorkspaceProviderUser | ||||||
|     connection_attr = "googleworkspaceprovideruser_set" |     connection_type_query = "user" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: |     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||||
|  | |||||||
| @ -132,11 +132,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|         if type == User: |         if type == User: | ||||||
|             # Get queryset of all users with consistent ordering |             # Get queryset of all users with consistent ordering | ||||||
|             # according to the provider's settings |             # according to the provider's settings | ||||||
|             base = ( |             base = User.objects.all().exclude_anonymous() | ||||||
|                 User.objects.prefetch_related("googleworkspaceprovideruser_set") |  | ||||||
|                 .all() |  | ||||||
|                 .exclude_anonymous() |  | ||||||
|             ) |  | ||||||
|             if self.exclude_users_service_account: |             if self.exclude_users_service_account: | ||||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( |                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT |                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||||
| @ -146,11 +142,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|             return base.order_by("pk") |             return base.order_by("pk") | ||||||
|         if type == Group: |         if type == Group: | ||||||
|             # Get queryset of all groups with consistent ordering |             # Get queryset of all groups with consistent ordering | ||||||
|             return ( |             return Group.objects.all().order_by("pk") | ||||||
|                 Group.objects.prefetch_related("googleworkspaceprovidergroup_set") |  | ||||||
|                 .all() |  | ||||||
|                 .order_by("pk") |  | ||||||
|             ) |  | ||||||
|         raise ValueError(f"Invalid type {type}") |         raise ValueError(f"Invalid type {type}") | ||||||
|  |  | ||||||
|     def google_credentials(self): |     def google_credentials(self): | ||||||
|  | |||||||
| @ -29,7 +29,7 @@ class MicrosoftEntraGroupClient( | |||||||
|     """Microsoft client for groups""" |     """Microsoft client for groups""" | ||||||
|  |  | ||||||
|     connection_type = MicrosoftEntraProviderGroup |     connection_type = MicrosoftEntraProviderGroup | ||||||
|     connection_attr = "microsoftentraprovidergroup_set" |     connection_type_query = "group" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: MicrosoftEntraProvider) -> None: |     def __init__(self, provider: MicrosoftEntraProvider) -> None: | ||||||
|  | |||||||
| @ -24,7 +24,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv | |||||||
|     """Sync authentik users into microsoft entra""" |     """Sync authentik users into microsoft entra""" | ||||||
|  |  | ||||||
|     connection_type = MicrosoftEntraProviderUser |     connection_type = MicrosoftEntraProviderUser | ||||||
|     connection_attr = "microsoftentraprovideruser_set" |     connection_type_query = "user" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: MicrosoftEntraProvider) -> None: |     def __init__(self, provider: MicrosoftEntraProvider) -> None: | ||||||
|  | |||||||
| @ -121,11 +121,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|         if type == User: |         if type == User: | ||||||
|             # Get queryset of all users with consistent ordering |             # Get queryset of all users with consistent ordering | ||||||
|             # according to the provider's settings |             # according to the provider's settings | ||||||
|             base = ( |             base = User.objects.all().exclude_anonymous() | ||||||
|                 User.objects.prefetch_related("microsoftentraprovideruser_set") |  | ||||||
|                 .all() |  | ||||||
|                 .exclude_anonymous() |  | ||||||
|             ) |  | ||||||
|             if self.exclude_users_service_account: |             if self.exclude_users_service_account: | ||||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( |                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT |                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||||
| @ -135,11 +131,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|             return base.order_by("pk") |             return base.order_by("pk") | ||||||
|         if type == Group: |         if type == Group: | ||||||
|             # Get queryset of all groups with consistent ordering |             # Get queryset of all groups with consistent ordering | ||||||
|             return ( |             return Group.objects.all().order_by("pk") | ||||||
|                 Group.objects.prefetch_related("microsoftentraprovidergroup_set") |  | ||||||
|                 .all() |  | ||||||
|                 .order_by("pk") |  | ||||||
|             ) |  | ||||||
|         raise ValueError(f"Invalid type {type}") |         raise ValueError(f"Invalid type {type}") | ||||||
|  |  | ||||||
|     def microsoft_credentials(self): |     def microsoft_credentials(self): | ||||||
|  | |||||||
| @ -14,12 +14,10 @@ CELERY_BEAT_SCHEDULE = { | |||||||
|  |  | ||||||
| TENANT_APPS = [ | TENANT_APPS = [ | ||||||
|     "authentik.enterprise.audit", |     "authentik.enterprise.audit", | ||||||
|     "authentik.enterprise.policies.unique_password", |  | ||||||
|     "authentik.enterprise.providers.google_workspace", |     "authentik.enterprise.providers.google_workspace", | ||||||
|     "authentik.enterprise.providers.microsoft_entra", |     "authentik.enterprise.providers.microsoft_entra", | ||||||
|     "authentik.enterprise.providers.ssf", |     "authentik.enterprise.providers.ssf", | ||||||
|     "authentik.enterprise.stages.authenticator_endpoint_gdtc", |     "authentik.enterprise.stages.authenticator_endpoint_gdtc", | ||||||
|     "authentik.enterprise.stages.mtls", |  | ||||||
|     "authentik.enterprise.stages.source", |     "authentik.enterprise.stages.source", | ||||||
| ] | ] | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,31 +0,0 @@ | |||||||
| """Mutual TLS Stage API Views""" |  | ||||||
|  |  | ||||||
| from rest_framework.viewsets import ModelViewSet |  | ||||||
|  |  | ||||||
| from authentik.core.api.used_by import UsedByMixin |  | ||||||
| from authentik.enterprise.api import EnterpriseRequiredMixin |  | ||||||
| from authentik.enterprise.stages.mtls.models import MutualTLSStage |  | ||||||
| from authentik.flows.api.stages import StageSerializer |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MutualTLSStageSerializer(EnterpriseRequiredMixin, StageSerializer): |  | ||||||
|     """MutualTLSStage Serializer""" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         model = MutualTLSStage |  | ||||||
|         fields = StageSerializer.Meta.fields + [ |  | ||||||
|             "mode", |  | ||||||
|             "certificate_authorities", |  | ||||||
|             "cert_attribute", |  | ||||||
|             "user_attribute", |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MutualTLSStageViewSet(UsedByMixin, ModelViewSet): |  | ||||||
|     """MutualTLSStage Viewset""" |  | ||||||
|  |  | ||||||
|     queryset = MutualTLSStage.objects.all() |  | ||||||
|     serializer_class = MutualTLSStageSerializer |  | ||||||
|     filterset_fields = "__all__" |  | ||||||
|     ordering = ["name"] |  | ||||||
|     search_fields = ["name"] |  | ||||||
| @ -1,12 +0,0 @@ | |||||||
| """authentik stage app config""" |  | ||||||
|  |  | ||||||
| from authentik.enterprise.apps import EnterpriseConfig |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikEnterpriseStageMTLSConfig(EnterpriseConfig): |  | ||||||
|     """authentik MTLS stage config""" |  | ||||||
|  |  | ||||||
|     name = "authentik.enterprise.stages.mtls" |  | ||||||
|     label = "authentik_stages_mtls" |  | ||||||
|     verbose_name = "authentik Enterprise.Stages.MTLS" |  | ||||||
|     default = True |  | ||||||
| @ -1,68 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-19 18:29 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     initial = True |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), |  | ||||||
|         ("authentik_flows", "0027_auto_20231028_1424"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="MutualTLSStage", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "stage_ptr", |  | ||||||
|                     models.OneToOneField( |  | ||||||
|                         auto_created=True, |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         parent_link=True, |  | ||||||
|                         primary_key=True, |  | ||||||
|                         serialize=False, |  | ||||||
|                         to="authentik_flows.stage", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "mode", |  | ||||||
|                     models.TextField(choices=[("optional", "Optional"), ("required", "Required")]), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "cert_attribute", |  | ||||||
|                     models.TextField( |  | ||||||
|                         choices=[ |  | ||||||
|                             ("subject", "Subject"), |  | ||||||
|                             ("common_name", "Common Name"), |  | ||||||
|                             ("email", "Email"), |  | ||||||
|                         ] |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "user_attribute", |  | ||||||
|                     models.TextField(choices=[("username", "Username"), ("email", "Email")]), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "certificate_authorities", |  | ||||||
|                     models.ManyToManyField( |  | ||||||
|                         blank=True, |  | ||||||
|                         default=None, |  | ||||||
|                         help_text="Configure certificate authorities to validate the certificate against. This option has a higher priority than the `client_certificate` option on `Brand`.", |  | ||||||
|                         to="authentik_crypto.certificatekeypair", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "verbose_name": "Mutual TLS Stage", |  | ||||||
|                 "verbose_name_plural": "Mutual TLS Stages", |  | ||||||
|                 "permissions": [ |  | ||||||
|                     ("pass_outpost_certificate", "Permissions to pass Certificates for outposts.") |  | ||||||
|                 ], |  | ||||||
|             }, |  | ||||||
|             bases=("authentik_flows.stage",), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,71 +0,0 @@ | |||||||
| from django.db import models |  | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
| from rest_framework.serializers import Serializer |  | ||||||
|  |  | ||||||
| from authentik.crypto.models import CertificateKeyPair |  | ||||||
| from authentik.flows.models import Stage |  | ||||||
| from authentik.flows.stage import StageView |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TLSMode(models.TextChoices): |  | ||||||
|     """Modes the TLS Stage can operate in""" |  | ||||||
|  |  | ||||||
|     OPTIONAL = "optional" |  | ||||||
|     REQUIRED = "required" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class CertAttributes(models.TextChoices): |  | ||||||
|     """Certificate attribute used for user matching""" |  | ||||||
|  |  | ||||||
|     SUBJECT = "subject" |  | ||||||
|     COMMON_NAME = "common_name" |  | ||||||
|     EMAIL = "email" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserAttributes(models.TextChoices): |  | ||||||
|     """User attribute for user matching""" |  | ||||||
|  |  | ||||||
|     USERNAME = "username" |  | ||||||
|     EMAIL = "email" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MutualTLSStage(Stage): |  | ||||||
|     """Authenticate/enroll users using a client-certificate.""" |  | ||||||
|  |  | ||||||
|     mode = models.TextField(choices=TLSMode.choices) |  | ||||||
|  |  | ||||||
|     certificate_authorities = models.ManyToManyField( |  | ||||||
|         CertificateKeyPair, |  | ||||||
|         default=None, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_( |  | ||||||
|             "Configure certificate authorities to validate the certificate against. " |  | ||||||
|             "This option has a higher priority than the `client_certificate` option on `Brand`." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     cert_attribute = models.TextField(choices=CertAttributes.choices) |  | ||||||
|     user_attribute = models.TextField(choices=UserAttributes.choices) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def view(self) -> type[StageView]: |  | ||||||
|         from authentik.enterprise.stages.mtls.stage import MTLSStageView |  | ||||||
|  |  | ||||||
|         return MTLSStageView |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def serializer(self) -> type[Serializer]: |  | ||||||
|         from authentik.enterprise.stages.mtls.api import MutualTLSStageSerializer |  | ||||||
|  |  | ||||||
|         return MutualTLSStageSerializer |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def component(self) -> str: |  | ||||||
|         return "ak-stage-mtls-form" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _("Mutual TLS Stage") |  | ||||||
|         verbose_name_plural = _("Mutual TLS Stages") |  | ||||||
|         permissions = [ |  | ||||||
|             ("pass_outpost_certificate", _("Permissions to pass Certificates for outposts.")), |  | ||||||
|         ] |  | ||||||
| @ -1,230 +0,0 @@ | |||||||
| from binascii import hexlify |  | ||||||
| from urllib.parse import unquote_plus |  | ||||||
|  |  | ||||||
| from cryptography.exceptions import InvalidSignature |  | ||||||
| from cryptography.hazmat.primitives import hashes |  | ||||||
| from cryptography.x509 import ( |  | ||||||
|     Certificate, |  | ||||||
|     NameOID, |  | ||||||
|     ObjectIdentifier, |  | ||||||
|     UnsupportedGeneralNameType, |  | ||||||
|     load_pem_x509_certificate, |  | ||||||
| ) |  | ||||||
| from cryptography.x509.verification import PolicyBuilder, Store, VerificationError |  | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
|  |  | ||||||
| from authentik.brands.models import Brand |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.crypto.models import CertificateKeyPair |  | ||||||
| from authentik.enterprise.stages.mtls.models import ( |  | ||||||
|     CertAttributes, |  | ||||||
|     MutualTLSStage, |  | ||||||
|     TLSMode, |  | ||||||
|     UserAttributes, |  | ||||||
| ) |  | ||||||
| from authentik.flows.challenge import AccessDeniedChallenge |  | ||||||
| from authentik.flows.models import FlowDesignation |  | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER |  | ||||||
| from authentik.flows.stage import ChallengeStageView |  | ||||||
| from authentik.root.middleware import ClientIPMiddleware |  | ||||||
| from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
| # All of these headers must only be accepted from "trusted" reverse proxies |  | ||||||
| # See internal/web/proxy.go:39 |  | ||||||
| HEADER_PROXY_FORWARDED = "X-Forwarded-Client-Cert" |  | ||||||
| HEADER_NGINX_FORWARDED = "SSL-Client-Cert" |  | ||||||
| HEADER_TRAEFIK_FORWARDED = "X-Forwarded-TLS-Client-Cert" |  | ||||||
| HEADER_OUTPOST_FORWARDED = "X-Authentik-Outpost-Certificate" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| PLAN_CONTEXT_CERTIFICATE = "certificate" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MTLSStageView(ChallengeStageView): |  | ||||||
|  |  | ||||||
|     def __parse_single_cert(self, raw: str | None) -> list[Certificate]: |  | ||||||
|         """Helper to parse a single certificate""" |  | ||||||
|         if not raw: |  | ||||||
|             return [] |  | ||||||
|         try: |  | ||||||
|             cert = load_pem_x509_certificate(unquote_plus(raw).encode()) |  | ||||||
|             return [cert] |  | ||||||
|         except ValueError as exc: |  | ||||||
|             self.logger.info("Failed to parse certificate", exc=exc) |  | ||||||
|             return [] |  | ||||||
|  |  | ||||||
|     def _parse_cert_xfcc(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format given to us in |  | ||||||
|         the format of the authentik router/envoy""" |  | ||||||
|         xfcc_raw = self.request.headers.get(HEADER_PROXY_FORWARDED) |  | ||||||
|         if not xfcc_raw: |  | ||||||
|             return [] |  | ||||||
|         certs = [] |  | ||||||
|         for r_cert in xfcc_raw.split(","): |  | ||||||
|             el = r_cert.split(";") |  | ||||||
|             raw_cert = {k.split("=")[0]: k.split("=")[1] for k in el} |  | ||||||
|             if "Cert" not in raw_cert: |  | ||||||
|                 continue |  | ||||||
|             certs.extend(self.__parse_single_cert(raw_cert["Cert"])) |  | ||||||
|         return certs |  | ||||||
|  |  | ||||||
|     def _parse_cert_nginx(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format nginx-ingress gives to us""" |  | ||||||
|         sslcc_raw = self.request.headers.get(HEADER_NGINX_FORWARDED) |  | ||||||
|         return self.__parse_single_cert(sslcc_raw) |  | ||||||
|  |  | ||||||
|     def _parse_cert_traefik(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format traefik gives to us""" |  | ||||||
|         ftcc_raw = self.request.headers.get(HEADER_TRAEFIK_FORWARDED) |  | ||||||
|         return self.__parse_single_cert(ftcc_raw) |  | ||||||
|  |  | ||||||
|     def _parse_cert_outpost(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format outposts give to us. Also authenticates |  | ||||||
|         the outpost to ensure it has the permission to do so""" |  | ||||||
|         user = ClientIPMiddleware.get_outpost_user(self.request) |  | ||||||
|         if not user: |  | ||||||
|             return [] |  | ||||||
|         if not user.has_perm( |  | ||||||
|             "pass_outpost_certificate", self.executor.current_stage |  | ||||||
|         ) and not user.has_perm("authentik_stages_mtls.pass_outpost_certificate"): |  | ||||||
|             return [] |  | ||||||
|         outpost_raw = self.request.headers.get(HEADER_OUTPOST_FORWARDED) |  | ||||||
|         return self.__parse_single_cert(outpost_raw) |  | ||||||
|  |  | ||||||
|     def get_authorities(self) -> list[CertificateKeyPair] | None: |  | ||||||
|         # We can't access `certificate_authorities` on `self.executor.current_stage`, as that would |  | ||||||
|         # load the certificate into the directly referenced foreign key, which we have to pickle |  | ||||||
|         # as part of the flow plan, and cryptography certs can't be pickled |  | ||||||
|         stage: MutualTLSStage = ( |  | ||||||
|             MutualTLSStage.objects.filter(pk=self.executor.current_stage.pk) |  | ||||||
|             .prefetch_related("certificate_authorities") |  | ||||||
|             .first() |  | ||||||
|         ) |  | ||||||
|         if stage.certificate_authorities.exists(): |  | ||||||
|             return stage.certificate_authorities.order_by("name") |  | ||||||
|         brand: Brand = self.request.brand |  | ||||||
|         if brand.client_certificates.exists(): |  | ||||||
|             return brand.client_certificates.order_by("name") |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def validate_cert(self, authorities: list[CertificateKeyPair], certs: list[Certificate]): |  | ||||||
|         authorities_cert = [x.certificate for x in authorities] |  | ||||||
|         for _cert in certs: |  | ||||||
|             try: |  | ||||||
|                 PolicyBuilder().store(Store(authorities_cert)).build_client_verifier().verify( |  | ||||||
|                     _cert, [] |  | ||||||
|                 ) |  | ||||||
|                 return _cert |  | ||||||
|             except ( |  | ||||||
|                 InvalidSignature, |  | ||||||
|                 TypeError, |  | ||||||
|                 ValueError, |  | ||||||
|                 VerificationError, |  | ||||||
|                 UnsupportedGeneralNameType, |  | ||||||
|             ) as exc: |  | ||||||
|                 self.logger.warning("Discarding invalid certificate", cert=_cert, exc=exc) |  | ||||||
|                 continue |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def check_if_user(self, cert: Certificate): |  | ||||||
|         stage: MutualTLSStage = self.executor.current_stage |  | ||||||
|         cert_attr = None |  | ||||||
|         user_attr = None |  | ||||||
|         match stage.cert_attribute: |  | ||||||
|             case CertAttributes.SUBJECT: |  | ||||||
|                 cert_attr = cert.subject.rfc4514_string() |  | ||||||
|             case CertAttributes.COMMON_NAME: |  | ||||||
|                 cert_attr = self.get_cert_attribute(cert, NameOID.COMMON_NAME) |  | ||||||
|             case CertAttributes.EMAIL: |  | ||||||
|                 cert_attr = self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS) |  | ||||||
|         match stage.user_attribute: |  | ||||||
|             case UserAttributes.USERNAME: |  | ||||||
|                 user_attr = "username" |  | ||||||
|             case UserAttributes.EMAIL: |  | ||||||
|                 user_attr = "email" |  | ||||||
|         if not user_attr or not cert_attr: |  | ||||||
|             return None |  | ||||||
|         return User.objects.filter(**{user_attr: cert_attr}).first() |  | ||||||
|  |  | ||||||
|     def _cert_to_dict(self, cert: Certificate) -> dict: |  | ||||||
|         """Represent a certificate in a dictionary, as certificate objects cannot be pickled""" |  | ||||||
|         return { |  | ||||||
|             "serial_number": str(cert.serial_number), |  | ||||||
|             "subject": cert.subject.rfc4514_string(), |  | ||||||
|             "issuer": cert.issuer.rfc4514_string(), |  | ||||||
|             "fingerprint_sha256": hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8"), |  | ||||||
|             "fingerprint_sha1": hexlify(cert.fingerprint(hashes.SHA1()), ":").decode(  # nosec |  | ||||||
|                 "utf-8" |  | ||||||
|             ), |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     def auth_user(self, user: User, cert: Certificate): |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] = user |  | ||||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD, "mtls") |  | ||||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD_ARGS, {}) |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_METHOD_ARGS].update( |  | ||||||
|             {"certificate": self._cert_to_dict(cert)} |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def enroll_prepare_user(self, cert: Certificate): |  | ||||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_PROMPT, {}) |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_PROMPT].update( |  | ||||||
|             { |  | ||||||
|                 "email": self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS), |  | ||||||
|                 "name": self.get_cert_attribute(cert, NameOID.COMMON_NAME), |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_CERTIFICATE] = self._cert_to_dict(cert) |  | ||||||
|  |  | ||||||
|     def get_cert_attribute(self, cert: Certificate, oid: ObjectIdentifier) -> str | None: |  | ||||||
|         attr = cert.subject.get_attributes_for_oid(oid) |  | ||||||
|         if len(attr) < 1: |  | ||||||
|             return None |  | ||||||
|         return str(attr[0].value) |  | ||||||
|  |  | ||||||
|     def dispatch(self, request, *args, **kwargs): |  | ||||||
|         stage: MutualTLSStage = self.executor.current_stage |  | ||||||
|         certs = [ |  | ||||||
|             *self._parse_cert_xfcc(), |  | ||||||
|             *self._parse_cert_nginx(), |  | ||||||
|             *self._parse_cert_traefik(), |  | ||||||
|             *self._parse_cert_outpost(), |  | ||||||
|         ] |  | ||||||
|         authorities = self.get_authorities() |  | ||||||
|         if not authorities: |  | ||||||
|             self.logger.warning("No Certificate authority found") |  | ||||||
|             if stage.mode == TLSMode.OPTIONAL: |  | ||||||
|                 return self.executor.stage_ok() |  | ||||||
|             if stage.mode == TLSMode.REQUIRED: |  | ||||||
|                 return super().dispatch(request, *args, **kwargs) |  | ||||||
|         cert = self.validate_cert(authorities, certs) |  | ||||||
|         if not cert and stage.mode == TLSMode.REQUIRED: |  | ||||||
|             self.logger.warning("Client certificate required but no certificates given") |  | ||||||
|             return super().dispatch( |  | ||||||
|                 request, |  | ||||||
|                 *args, |  | ||||||
|                 error_message=_("Certificate required but no certificate was given."), |  | ||||||
|                 **kwargs, |  | ||||||
|             ) |  | ||||||
|         if not cert and stage.mode == TLSMode.OPTIONAL: |  | ||||||
|             self.logger.info("No certificate given, continuing") |  | ||||||
|             return self.executor.stage_ok() |  | ||||||
|         existing_user = self.check_if_user(cert) |  | ||||||
|         if self.executor.flow.designation == FlowDesignation.ENROLLMENT: |  | ||||||
|             self.enroll_prepare_user(cert) |  | ||||||
|         elif existing_user: |  | ||||||
|             self.auth_user(existing_user, cert) |  | ||||||
|         else: |  | ||||||
|             return super().dispatch( |  | ||||||
|                 request, *args, error_message=_("No user found for certificate."), **kwargs |  | ||||||
|             ) |  | ||||||
|         return self.executor.stage_ok() |  | ||||||
|  |  | ||||||
|     def get_challenge(self, *args, error_message: str | None = None, **kwargs): |  | ||||||
|         return AccessDeniedChallenge( |  | ||||||
|             data={ |  | ||||||
|                 "component": "ak-stage-access-denied", |  | ||||||
|                 "error_message": str(error_message or "Unknown error"), |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
| @ -1,31 +0,0 @@ | |||||||
| -----BEGIN CERTIFICATE----- |  | ||||||
| MIIFXDCCA0SgAwIBAgIUBmV7zREyC1SPr72/75/L9zpwV18wDQYJKoZIhvcNAQEL |  | ||||||
| BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl |  | ||||||
| bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNDI3MTgzMDUwWhcNMzUw |  | ||||||
| MzA3MTgzMDUwWjBGMRowGAYDVQQDDBFhdXRoZW50aWsgVGVzdCBDQTESMBAGA1UE |  | ||||||
| CgwJYXV0aGVudGlrMRQwEgYDVQQLDAtTZWxmLXNpZ25lZDCCAiIwDQYJKoZIhvcN |  | ||||||
| AQEBBQADggIPADCCAgoCggIBAMc0NxZj7j1mPu0aRToo8oMPdC3T99xgxnqdr18x |  | ||||||
| LV4pWyi/YLghgZHqNQY2xNP6JIlSeUZD6KFUYT2sPL4Av/zSg5zO8bl+/lf7ckje |  | ||||||
| O1/Bt5A8xtL0CpmpMDGiI6ibdDElaywM6AohisbxrV29pygSKGq2wugF/urqGtE+ |  | ||||||
| 5z4y5Kt6qMdKkd0iXT+WagbQTIUlykFKgB0+qqTLzDl01lVDa/DoLl8Hqp45mVx2 |  | ||||||
| pqrGsSa3TCErLIv9hUlZklF7A8UV4ZB4JL20UKcP8dKzQClviNie17tpsUpOuy3A |  | ||||||
| SQ6+guWTHTLJNCSdLn1xIqc5q+f5wd2dIDf8zXCTHj+Xp0bJE3Vgaq5R31K9+b+1 |  | ||||||
| 2dDWz1KcNJaLEnw2+b0O8M64wTMLxhqOv7QfLUr6Pmg1ZymghjLcZ6bnU9e31Vza |  | ||||||
| hlPKhxjqYQUC4Kq+oaYF6qdUeJy+dsYf0iDv5tTC+eReZDWIjxTPrNpwA773ZwT7 |  | ||||||
| WVmL7ULGpuP2g9rNvFBcZiN+i6d7CUoN+jd/iRdo79lrI0dfXiyy4bYgW/2HeZfF |  | ||||||
| HaOsc1xsoqnJdWbWkX/ooyaCjAfm07kS3HiOzz4q3QW4wgGrwV8lEraLPxYYeOQu |  | ||||||
| YcGMOM8NfnVkjc8gmyXUxedCje5Vz/Tu5fKrQEInnCmXxVsWbwr/LzEjMKAM/ivY |  | ||||||
| 0TXxAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0G |  | ||||||
| A1UdDgQWBBTa+Ns6QzqlNvnTGszkouQQtZnVJDANBgkqhkiG9w0BAQsFAAOCAgEA |  | ||||||
| NpJEDMXjuEIzSzafkxSshvjnt5sMYmzmvjNoRlkxgN2YcWvPoxbalGAYzcpyggT2 |  | ||||||
| 6xZY8R4tvB1oNTCArqwf860kkofUoJCr88D/pU3Cv4JhjCWs4pmXTsvSqlBSlJbo |  | ||||||
| +jPBZwbn6it/6jcit6Be3rW2PtHe8tASd9Lf8/2r1ZvupXwPzcR84R4Z10ve2lqV |  | ||||||
| xxcWlMmBh51CaYI0b1/WTe9Ua+wgkCVkxbf9zNcDQXjxw2ICWK+nR/4ld4nmqVm2 |  | ||||||
| C7nhvXwU8FAHl7ZgR2Z3PLrwPuhd+kd6NXQqNkS9A+n+1vSRLbRjmV8pwIPpdPEq |  | ||||||
| nslUAGJJBHDUBArxC3gOJSB+WtmaCfzDu2gepMf9Ng1H2ZhwSF/FH3v3fsJqZkzz |  | ||||||
| NBstT9KuNGQRYiCmAPJaoVAc9BoLa+BFML1govtWtpdmbFk8PZEcuUsP7iAZqFF1 |  | ||||||
| uuldPyZ8huGpQSR6Oq2bILRHowfGY0npTZAyxg0Vs8UMy1HTwNOp9OuRtArMZmsJ |  | ||||||
| jFIx1QzRf9S1i6bYpOzOudoXj4ARkS1KmVExGjJFcIT0xlFSSERie2fEKSeEYOyG |  | ||||||
| G+PA2qRt/F51FGOMm1ZscjPXqk2kt3C4BFbz6Vvxsq7D3lmhvFLn4jVA8+OidsM0 |  | ||||||
| YUrVMtWET/RkjEIbADbgRXxNUNo+jtQZDU9C1IiAdfk= |  | ||||||
| -----END CERTIFICATE----- |  | ||||||
| @ -1,31 +0,0 @@ | |||||||
| -----BEGIN CERTIFICATE----- |  | ||||||
| MIIFWTCCA0GgAwIBAgIUDEnKCSmIXG/akySGes7bhOGrN/8wDQYJKoZIhvcNAQEL |  | ||||||
| BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl |  | ||||||
| bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNTE5MTIzODQ2WhcNMjYw |  | ||||||
| NTE1MTIzODQ2WjARMQ8wDQYDVQQDDAZjbGllbnQwggIiMA0GCSqGSIb3DQEBAQUA |  | ||||||
| A4ICDwAwggIKAoICAQCkPkS1V6l0gj0ulxMznkxkgrw4p9Tjd8teSsGZt02A2Eo6 |  | ||||||
| 7D8FbJ7pp3d5fYW/TWuEKVBLWTID6rijW5EGcdgTM5Jxf/QR+aZTEK6umQxUd4yO |  | ||||||
| mOtp+xVS3KlcsSej2dFpeE5h5VkZizHpvh5xkoAP8W5VtQLOVF0hIeumHnJmaeLj |  | ||||||
| +mhK9PBFpO7k9SFrYYhd/uLrYbIdANihbIO2Q74rNEJHewhFNM7oNSjjEWzRd/7S |  | ||||||
| qNdQij9JGrVG7u8YJJscEQHqyHMYFVCEMjxmsge5BO6Vx5OWmUE3wXPzb5TbyTS4 |  | ||||||
| +yg88g9rYTUXrzz+poCyKpaur45qBsdw35lJ8nq69VJj2xJLGQDwoTgGSXRuPciC |  | ||||||
| 3OilQI+Ma+j8qQGJxJ8WJxISlf1cuhp+V4ZUd1lawlM5hAXyXmHRlH4pun4y+g7O |  | ||||||
| O34+fE3pK25JjVCicMT/rC2A/sb95j/fHTzzJpbB70U0I50maTcIsOkyw6aiF//E |  | ||||||
| 0ShTDz14x22SCMolUc6hxTDZvBB6yrcJHd7d9CCnFH2Sgo13QrtNJ/atXgm13HGh |  | ||||||
| wBzRwK38XUGl/J4pJaxAupTVCPriStUM3m0EYHNelRRUE91pbyeGT0rvOuv00uLw |  | ||||||
| Rj7K7hJZR8avTKWmKrVBVpq+gSojGW1DwBS0NiDNkZs0d/IjB1wkzczEgdZjXwID |  | ||||||
| AQABo3QwcjAfBgNVHSMEGDAWgBTa+Ns6QzqlNvnTGszkouQQtZnVJDAdBgNVHSUE |  | ||||||
| FjAUBggrBgEFBQcDAgYIKwYBBQUHAwEwEQYDVR0RBAowCIIGY2xpZW50MB0GA1Ud |  | ||||||
| DgQWBBT1xg5sXkypRBwvCxBuyfoanaiZ5jANBgkqhkiG9w0BAQsFAAOCAgEAvUAz |  | ||||||
| YwIjxY/0KHZDU8owdILVqKChzfLcy9OHNPyEI3TSOI8X6gNtBO+HE6r8aWGcC9vw |  | ||||||
| zzeIsNQ3UEjvRWi2r+vUVbiPTbFdZboNDSZv6ZmGHxwd85VsjXRGoXV6koCT/9zi |  | ||||||
| 9/lCM1DwqwYSwBphMJdRVFRUMluSYk1oHflGeA18xgGuts4eFivJwhabGm1AdVVQ |  | ||||||
| /CYvqCuTxd/DCzWZBdyxYpDru64i/kyeJCt1pThKEFDWmpumFdBI4CxJ0OhxVSGp |  | ||||||
| dOXzK+Y6ULepxCvi6/OpSog52jQ6PnNd1ghiYtq7yO1T4GQz65M1vtHHVvQ3gfBE |  | ||||||
| AuKYQp6io7ypitRx+LpjsBQenyP4FFGfrq7pm90nLluOBOArfSdF0N+CP2wo/YFV |  | ||||||
| 9BGf89OtvRi3BXCm2NXkE/Sc4We26tY8x7xNLOmNs8YOT0O3r/EQ690W9GIwRMx0 |  | ||||||
| m0r/RXWn5V3o4Jib9r8eH9NzaDstD8g9dECcGfM4fHoM/DAGFaRrNcjMsS1APP3L |  | ||||||
| jp7+BfBSXtrz9V6rVJ3CBLXlLK0AuSm7bqd1MJsGA9uMLpsVZIUA+KawcmPGdPU+ |  | ||||||
| NxdpBCtzyurQSUyaTLtVqSeP35gMAwaNzUDph8Uh+vHz+kRwgXS19OQvTaud5LJu |  | ||||||
| nQe4JNS+u5e2VDEBWUxt8NTpu6eShDN0iIEHtxA= |  | ||||||
| -----END CERTIFICATE----- |  | ||||||
| @ -1,228 +0,0 @@ | |||||||
| from unittest.mock import MagicMock, patch |  | ||||||
| from urllib.parse import quote_plus |  | ||||||
|  |  | ||||||
| from django.urls import reverse |  | ||||||
| from guardian.shortcuts import assign_perm |  | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.core.tests.utils import ( |  | ||||||
|     create_test_brand, |  | ||||||
|     create_test_cert, |  | ||||||
|     create_test_flow, |  | ||||||
|     create_test_user, |  | ||||||
| ) |  | ||||||
| from authentik.crypto.models import CertificateKeyPair |  | ||||||
| from authentik.enterprise.stages.mtls.models import ( |  | ||||||
|     CertAttributes, |  | ||||||
|     MutualTLSStage, |  | ||||||
|     TLSMode, |  | ||||||
|     UserAttributes, |  | ||||||
| ) |  | ||||||
| from authentik.enterprise.stages.mtls.stage import PLAN_CONTEXT_CERTIFICATE |  | ||||||
| from authentik.flows.models import FlowDesignation, FlowStageBinding |  | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER |  | ||||||
| from authentik.flows.tests import FlowTestCase |  | ||||||
| from authentik.lib.generators import generate_id |  | ||||||
| from authentik.lib.tests.utils import load_fixture |  | ||||||
| from authentik.outposts.models import Outpost, OutpostType |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MTLSStageTests(FlowTestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         super().setUp() |  | ||||||
|         self.flow = create_test_flow(FlowDesignation.AUTHENTICATION) |  | ||||||
|         self.ca = CertificateKeyPair.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|             certificate_data=load_fixture("fixtures/ca.pem"), |  | ||||||
|         ) |  | ||||||
|         self.stage = MutualTLSStage.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|             mode=TLSMode.REQUIRED, |  | ||||||
|             cert_attribute=CertAttributes.COMMON_NAME, |  | ||||||
|             user_attribute=UserAttributes.USERNAME, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         self.stage.certificate_authorities.add(self.ca) |  | ||||||
|         self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=0) |  | ||||||
|         self.client_cert = load_fixture("fixtures/cert_client.pem") |  | ||||||
|         # User matching the certificate |  | ||||||
|         User.objects.filter(username="client").delete() |  | ||||||
|         self.cert_user = create_test_user(username="client") |  | ||||||
|  |  | ||||||
|     def test_parse_xfcc(self): |  | ||||||
|         """Test authentik Proxy/Envoy's XFCC format""" |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-Client-Cert": f"Cert={quote_plus(self.client_cert)}"}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_nginx(self): |  | ||||||
|         """Test nginx's format""" |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"SSL-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_traefik(self): |  | ||||||
|         """Test traefik's format""" |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_outpost_object(self): |  | ||||||
|         """Test outposts's format""" |  | ||||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) |  | ||||||
|         assign_perm("pass_outpost_certificate", outpost.user, self.stage) |  | ||||||
|         with patch( |  | ||||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", |  | ||||||
|             MagicMock(return_value=outpost.user), |  | ||||||
|         ): |  | ||||||
|             with self.assertFlowFinishes() as plan: |  | ||||||
|                 res = self.client.get( |  | ||||||
|                     reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                     headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, |  | ||||||
|                 ) |  | ||||||
|                 self.assertEqual(res.status_code, 200) |  | ||||||
|                 self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|             self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_outpost_global(self): |  | ||||||
|         """Test outposts's format""" |  | ||||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) |  | ||||||
|         assign_perm("authentik_stages_mtls.pass_outpost_certificate", outpost.user) |  | ||||||
|         with patch( |  | ||||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", |  | ||||||
|             MagicMock(return_value=outpost.user), |  | ||||||
|         ): |  | ||||||
|             with self.assertFlowFinishes() as plan: |  | ||||||
|                 res = self.client.get( |  | ||||||
|                     reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                     headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, |  | ||||||
|                 ) |  | ||||||
|                 self.assertEqual(res.status_code, 200) |  | ||||||
|                 self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|             self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_outpost_no_perm(self): |  | ||||||
|         """Test outposts's format""" |  | ||||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) |  | ||||||
|         with patch( |  | ||||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", |  | ||||||
|             MagicMock(return_value=outpost.user), |  | ||||||
|         ): |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|  |  | ||||||
|     def test_invalid_cert(self): |  | ||||||
|         """Test invalid certificate""" |  | ||||||
|         cert = create_test_cert() |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(cert.certificate_data)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|         self.assertNotIn(PLAN_CONTEXT_PENDING_USER, plan().context) |  | ||||||
|  |  | ||||||
|     def test_auth_no_user(self): |  | ||||||
|         """Test auth with no user""" |  | ||||||
|         User.objects.filter(username="client").delete() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|  |  | ||||||
|     def test_brand_ca(self): |  | ||||||
|         """Test using a CA from the brand""" |  | ||||||
|         self.stage.certificate_authorities.clear() |  | ||||||
|  |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         brand.client_certificates.add(self.ca) |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_no_ca_optional(self): |  | ||||||
|         """Test using no CA Set""" |  | ||||||
|         self.stage.mode = TLSMode.OPTIONAL |  | ||||||
|         self.stage.certificate_authorities.clear() |  | ||||||
|         self.stage.save() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|  |  | ||||||
|     def test_no_ca_required(self): |  | ||||||
|         """Test using no CA Set""" |  | ||||||
|         self.stage.certificate_authorities.clear() |  | ||||||
|         self.stage.save() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|  |  | ||||||
|     def test_no_cert_optional(self): |  | ||||||
|         """Test using no cert Set""" |  | ||||||
|         self.stage.mode = TLSMode.OPTIONAL |  | ||||||
|         self.stage.save() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|  |  | ||||||
|     def test_enroll(self): |  | ||||||
|         """Test Enrollment flow""" |  | ||||||
|         self.flow.designation = FlowDesignation.ENROLLMENT |  | ||||||
|         self.flow.save() |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PROMPT], {"email": None, "name": "client"}) |  | ||||||
|         self.assertEqual( |  | ||||||
|             plan().context[PLAN_CONTEXT_CERTIFICATE], |  | ||||||
|             { |  | ||||||
|                 "fingerprint_sha1": "52:39:ca:1e:3a:1f:78:3a:9f:26:3b:c2:84:99:48:68:99:99:81:8a", |  | ||||||
|                 "fingerprint_sha256": ( |  | ||||||
|                     "c1:07:8b:7c:e9:02:57:87:1e:92:e5:81:83:21:bc:92:c7:47:65:e3:97:fb:05:97:6f:36:9e:b5:31:77:98:b7" |  | ||||||
|                 ), |  | ||||||
|                 "issuer": "OU=Self-signed,O=authentik,CN=authentik Test CA", |  | ||||||
|                 "serial_number": "70153443448884702681996102271549704759327537151", |  | ||||||
|                 "subject": "CN=client", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
| @ -1,5 +0,0 @@ | |||||||
| """API URLs""" |  | ||||||
|  |  | ||||||
| from authentik.enterprise.stages.mtls.api import MutualTLSStageViewSet |  | ||||||
|  |  | ||||||
| api_urlpatterns = [("stages/mtls", MutualTLSStageViewSet)] |  | ||||||
| @ -8,7 +8,6 @@ from django.test import TestCase | |||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from rest_framework.exceptions import ValidationError | from rest_framework.exceptions import ValidationError | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.enterprise.license import LicenseKey | from authentik.enterprise.license import LicenseKey | ||||||
| from authentik.enterprise.models import ( | from authentik.enterprise.models import ( | ||||||
|     THRESHOLD_READ_ONLY_WEEKS, |     THRESHOLD_READ_ONLY_WEEKS, | ||||||
| @ -72,9 +71,9 @@ class TestEnterpriseLicense(TestCase): | |||||||
|     ) |     ) | ||||||
|     def test_valid_multiple(self): |     def test_valid_multiple(self): | ||||||
|         """Check license verification""" |         """Check license verification""" | ||||||
|         lic = License.objects.create(key=generate_id(), expiry=expiry_valid) |         lic = License.objects.create(key=generate_id()) | ||||||
|         self.assertTrue(lic.status.status().is_valid) |         self.assertTrue(lic.status.status().is_valid) | ||||||
|         lic2 = License.objects.create(key=generate_id(), expiry=expiry_valid) |         lic2 = License.objects.create(key=generate_id()) | ||||||
|         self.assertTrue(lic2.status.status().is_valid) |         self.assertTrue(lic2.status.status().is_valid) | ||||||
|         total = LicenseKey.get_total() |         total = LicenseKey.get_total() | ||||||
|         self.assertEqual(total.internal_users, 200) |         self.assertEqual(total.internal_users, 200) | ||||||
| @ -233,9 +232,7 @@ class TestEnterpriseLicense(TestCase): | |||||||
|     ) |     ) | ||||||
|     def test_expiry_expired(self): |     def test_expiry_expired(self): | ||||||
|         """Check license verification""" |         """Check license verification""" | ||||||
|         User.objects.all().delete() |         License.objects.create(key=generate_id()) | ||||||
|         License.objects.all().delete() |  | ||||||
|         License.objects.create(key=generate_id(), expiry=expiry_expired) |  | ||||||
|         self.assertEqual(LicenseKey.get_total().summary().status, LicenseUsageStatus.EXPIRED) |         self.assertEqual(LicenseKey.get_total().summary().status, LicenseUsageStatus.EXPIRED) | ||||||
|  |  | ||||||
|     @patch( |     @patch( | ||||||
|  | |||||||
| @ -1,36 +1,28 @@ | |||||||
| """Events API Views""" | """Events API Views""" | ||||||
|  |  | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
|  | from json import loads | ||||||
|  |  | ||||||
| import django_filters | import django_filters | ||||||
| from django.db.models import Count, ExpressionWrapper, F, QuerySet | from django.db.models.aggregates import Count | ||||||
| from django.db.models import DateTimeField as DjangoDateTimeField |  | ||||||
| from django.db.models.fields.json import KeyTextTransform, KeyTransform | from django.db.models.fields.json import KeyTextTransform, KeyTransform | ||||||
| from django.db.models.functions import TruncHour | from django.db.models.functions import ExtractDay, ExtractHour | ||||||
| from django.db.models.query_utils import Q | from django.db.models.query_utils import Q | ||||||
| from django.utils.timezone import now |  | ||||||
| from drf_spectacular.types import OpenApiTypes | from drf_spectacular.types import OpenApiTypes | ||||||
| from drf_spectacular.utils import OpenApiParameter, extend_schema | from drf_spectacular.utils import OpenApiParameter, extend_schema | ||||||
| from guardian.shortcuts import get_objects_for_user | from guardian.shortcuts import get_objects_for_user | ||||||
| from rest_framework.decorators import action | from rest_framework.decorators import action | ||||||
| from rest_framework.fields import ChoiceField, DateTimeField, DictField, IntegerField | from rest_framework.fields import DictField, IntegerField | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
|  | from authentik.admin.api.metrics import CoordinateSerializer | ||||||
| from authentik.core.api.object_types import TypeCreateSerializer | from authentik.core.api.object_types import TypeCreateSerializer | ||||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
|  |  | ||||||
|  |  | ||||||
| class EventVolumeSerializer(PassiveSerializer): |  | ||||||
|     """Count of events of action created on day""" |  | ||||||
|  |  | ||||||
|     action = ChoiceField(choices=EventAction.choices) |  | ||||||
|     time = DateTimeField() |  | ||||||
|     count = IntegerField() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class EventSerializer(ModelSerializer): | class EventSerializer(ModelSerializer): | ||||||
|     """Event Serializer""" |     """Event Serializer""" | ||||||
|  |  | ||||||
| @ -61,7 +53,7 @@ class EventsFilter(django_filters.FilterSet): | |||||||
|     """Filter for events""" |     """Filter for events""" | ||||||
|  |  | ||||||
|     username = django_filters.CharFilter( |     username = django_filters.CharFilter( | ||||||
|         field_name="user", label="Username", method="filter_username" |         field_name="user", lookup_expr="username", label="Username" | ||||||
|     ) |     ) | ||||||
|     context_model_pk = django_filters.CharFilter( |     context_model_pk = django_filters.CharFilter( | ||||||
|         field_name="context", |         field_name="context", | ||||||
| @ -86,19 +78,12 @@ class EventsFilter(django_filters.FilterSet): | |||||||
|         field_name="action", |         field_name="action", | ||||||
|         lookup_expr="icontains", |         lookup_expr="icontains", | ||||||
|     ) |     ) | ||||||
|     actions = django_filters.MultipleChoiceFilter( |  | ||||||
|         field_name="action", |  | ||||||
|         choices=EventAction.choices, |  | ||||||
|     ) |  | ||||||
|     brand_name = django_filters.CharFilter( |     brand_name = django_filters.CharFilter( | ||||||
|         field_name="brand", |         field_name="brand", | ||||||
|         lookup_expr="name", |         lookup_expr="name", | ||||||
|         label="Brand name", |         label="Brand name", | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     def filter_username(self, queryset, name, value): |  | ||||||
|         return queryset.filter(Q(user__username=value) | Q(context__username=value)) |  | ||||||
|  |  | ||||||
|     def filter_context_model_pk(self, queryset, name, value): |     def filter_context_model_pk(self, queryset, name, value): | ||||||
|         """Because we store the PK as UUID.hex, |         """Because we store the PK as UUID.hex, | ||||||
|         we need to remove the dashes that a client may send. We can't use a |         we need to remove the dashes that a client may send. We can't use a | ||||||
| @ -171,37 +156,45 @@ class EventViewSet(ModelViewSet): | |||||||
|         return Response(EventTopPerUserSerializer(instance=events, many=True).data) |         return Response(EventTopPerUserSerializer(instance=events, many=True).data) | ||||||
|  |  | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         responses={200: EventVolumeSerializer(many=True)}, |         responses={200: CoordinateSerializer(many=True)}, | ||||||
|         parameters=[ |  | ||||||
|             OpenApiParameter( |  | ||||||
|                 "history_days", |  | ||||||
|                 type=OpenApiTypes.NUMBER, |  | ||||||
|                 location=OpenApiParameter.QUERY, |  | ||||||
|                 required=False, |  | ||||||
|                 default=7, |  | ||||||
|             ), |  | ||||||
|         ], |  | ||||||
|     ) |     ) | ||||||
|     @action(detail=False, methods=["GET"], pagination_class=None) |     @action(detail=False, methods=["GET"], pagination_class=None) | ||||||
|     def volume(self, request: Request) -> Response: |     def volume(self, request: Request) -> Response: | ||||||
|         """Get event volume for specified filters and timeframe""" |         """Get event volume for specified filters and timeframe""" | ||||||
|         queryset: QuerySet[Event] = self.filter_queryset(self.get_queryset()) |         queryset = self.filter_queryset(self.get_queryset()) | ||||||
|         delta = timedelta(days=7) |         return Response(queryset.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)) | ||||||
|         time_delta = request.query_params.get("history_days", 7) |  | ||||||
|         if time_delta: |     @extend_schema( | ||||||
|             delta = timedelta(days=min(int(time_delta), 60)) |         responses={200: CoordinateSerializer(many=True)}, | ||||||
|  |         filters=[], | ||||||
|  |         parameters=[ | ||||||
|  |             OpenApiParameter( | ||||||
|  |                 "action", | ||||||
|  |                 type=OpenApiTypes.STR, | ||||||
|  |                 location=OpenApiParameter.QUERY, | ||||||
|  |                 required=False, | ||||||
|  |             ), | ||||||
|  |             OpenApiParameter( | ||||||
|  |                 "query", | ||||||
|  |                 type=OpenApiTypes.STR, | ||||||
|  |                 location=OpenApiParameter.QUERY, | ||||||
|  |                 required=False, | ||||||
|  |             ), | ||||||
|  |         ], | ||||||
|  |     ) | ||||||
|  |     @action(detail=False, methods=["GET"], pagination_class=None) | ||||||
|  |     def per_month(self, request: Request): | ||||||
|  |         """Get the count of events per month""" | ||||||
|  |         filtered_action = request.query_params.get("action", EventAction.LOGIN) | ||||||
|  |         try: | ||||||
|  |             query = loads(request.query_params.get("query", "{}")) | ||||||
|  |         except ValueError: | ||||||
|  |             return Response(status=400) | ||||||
|         return Response( |         return Response( | ||||||
|             queryset.filter(created__gte=now() - delta) |             get_objects_for_user(request.user, "authentik_events.view_event") | ||||||
|             .annotate(hour=TruncHour("created")) |             .filter(action=filtered_action) | ||||||
|             .annotate( |             .filter(**query) | ||||||
|                 time=ExpressionWrapper( |             .get_events_per(timedelta(weeks=4), ExtractDay, 30) | ||||||
|                     F("hour") - (F("hour__hour") % 6) * timedelta(hours=1), |  | ||||||
|                     output_field=DjangoDateTimeField(), |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|             .values("time", "action") |  | ||||||
|             .annotate(count=Count("pk")) |  | ||||||
|             .order_by("time", "action") |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) |     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||||
|  | |||||||
| @ -57,7 +57,7 @@ class LogEventSerializer(PassiveSerializer): | |||||||
|  |  | ||||||
|  |  | ||||||
| @contextmanager | @contextmanager | ||||||
| def capture_logs(log_default_output=True) -> Generator[list[LogEvent]]: | def capture_logs(log_default_output=True) -> Generator[list[LogEvent], None, None]: | ||||||
|     """Capture log entries created""" |     """Capture log entries created""" | ||||||
|     logs = [] |     logs = [] | ||||||
|     cap = LogCapture() |     cap = LogCapture() | ||||||
|  | |||||||
| @ -1,5 +1,7 @@ | |||||||
| """authentik events models""" | """authentik events models""" | ||||||
|  |  | ||||||
|  | import time | ||||||
|  | from collections import Counter | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
| from difflib import get_close_matches | from difflib import get_close_matches | ||||||
| from functools import lru_cache | from functools import lru_cache | ||||||
| @ -9,6 +11,11 @@ from uuid import uuid4 | |||||||
|  |  | ||||||
| from django.apps import apps | from django.apps import apps | ||||||
| from django.db import connection, models | from django.db import connection, models | ||||||
|  | from django.db.models import Count, ExpressionWrapper, F | ||||||
|  | from django.db.models.fields import DurationField | ||||||
|  | from django.db.models.functions import Extract | ||||||
|  | from django.db.models.manager import Manager | ||||||
|  | from django.db.models.query import QuerySet | ||||||
| from django.http import HttpRequest | from django.http import HttpRequest | ||||||
| from django.http.request import QueryDict | from django.http.request import QueryDict | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| @ -117,6 +124,60 @@ class EventAction(models.TextChoices): | |||||||
|     CUSTOM_PREFIX = "custom_" |     CUSTOM_PREFIX = "custom_" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class EventQuerySet(QuerySet): | ||||||
|  |     """Custom events query set with helper functions""" | ||||||
|  |  | ||||||
|  |     def get_events_per( | ||||||
|  |         self, | ||||||
|  |         time_since: timedelta, | ||||||
|  |         extract: Extract, | ||||||
|  |         data_points: int, | ||||||
|  |     ) -> list[dict[str, int]]: | ||||||
|  |         """Get event count by hour in the last day, fill with zeros""" | ||||||
|  |         _now = now() | ||||||
|  |         max_since = timedelta(days=60) | ||||||
|  |         # Allow maximum of 60 days to limit load | ||||||
|  |         if time_since.total_seconds() > max_since.total_seconds(): | ||||||
|  |             time_since = max_since | ||||||
|  |         date_from = _now - time_since | ||||||
|  |         result = ( | ||||||
|  |             self.filter(created__gte=date_from) | ||||||
|  |             .annotate(age=ExpressionWrapper(_now - F("created"), output_field=DurationField())) | ||||||
|  |             .annotate(age_interval=extract("age")) | ||||||
|  |             .values("age_interval") | ||||||
|  |             .annotate(count=Count("pk")) | ||||||
|  |             .order_by("age_interval") | ||||||
|  |         ) | ||||||
|  |         data = Counter({int(d["age_interval"]): d["count"] for d in result}) | ||||||
|  |         results = [] | ||||||
|  |         interval_delta = time_since / data_points | ||||||
|  |         for interval in range(1, -data_points, -1): | ||||||
|  |             results.append( | ||||||
|  |                 { | ||||||
|  |                     "x_cord": time.mktime((_now + (interval_delta * interval)).timetuple()) * 1000, | ||||||
|  |                     "y_cord": data[interval * -1], | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |         return results | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class EventManager(Manager): | ||||||
|  |     """Custom helper methods for Events""" | ||||||
|  |  | ||||||
|  |     def get_queryset(self) -> QuerySet: | ||||||
|  |         """use custom queryset""" | ||||||
|  |         return EventQuerySet(self.model, using=self._db) | ||||||
|  |  | ||||||
|  |     def get_events_per( | ||||||
|  |         self, | ||||||
|  |         time_since: timedelta, | ||||||
|  |         extract: Extract, | ||||||
|  |         data_points: int, | ||||||
|  |     ) -> list[dict[str, int]]: | ||||||
|  |         """Wrap method from queryset""" | ||||||
|  |         return self.get_queryset().get_events_per(time_since, extract, data_points) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Event(SerializerModel, ExpiringModel): | class Event(SerializerModel, ExpiringModel): | ||||||
|     """An individual Audit/Metrics/Notification/Error Event""" |     """An individual Audit/Metrics/Notification/Error Event""" | ||||||
|  |  | ||||||
| @ -132,6 +193,8 @@ class Event(SerializerModel, ExpiringModel): | |||||||
|     # Shadow the expires attribute from ExpiringModel to override the default duration |     # Shadow the expires attribute from ExpiringModel to override the default duration | ||||||
|     expires = models.DateTimeField(default=default_event_duration) |     expires = models.DateTimeField(default=default_event_duration) | ||||||
|  |  | ||||||
|  |     objects = EventManager() | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def _get_app_from_request(request: HttpRequest) -> str: |     def _get_app_from_request(request: HttpRequest) -> str: | ||||||
|         if not isinstance(request, HttpRequest): |         if not isinstance(request, HttpRequest): | ||||||
|  | |||||||
| @ -1,18 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-27 12:52 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_flows", "0027_auto_20231028_1424"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="flowtoken", |  | ||||||
|             name="revoke_on_execution", |  | ||||||
|             field=models.BooleanField(default=True), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -303,10 +303,9 @@ class FlowToken(Token): | |||||||
|  |  | ||||||
|     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) |     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) | ||||||
|     _plan = models.TextField() |     _plan = models.TextField() | ||||||
|     revoke_on_execution = models.BooleanField(default=True) |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def pickle(plan: "FlowPlan") -> str: |     def pickle(plan) -> str: | ||||||
|         """Pickle into string""" |         """Pickle into string""" | ||||||
|         data = dumps(plan) |         data = dumps(plan) | ||||||
|         return b64encode(data).decode() |         return b64encode(data).decode() | ||||||
|  | |||||||
| @ -99,10 +99,9 @@ class ChallengeStageView(StageView): | |||||||
|             self.logger.debug("Got StageInvalidException", exc=exc) |             self.logger.debug("Got StageInvalidException", exc=exc) | ||||||
|             return self.executor.stage_invalid() |             return self.executor.stage_invalid() | ||||||
|         if not challenge.is_valid(): |         if not challenge.is_valid(): | ||||||
|             self.logger.error( |             self.logger.warning( | ||||||
|                 "f(ch): Invalid challenge", |                 "f(ch): Invalid challenge", | ||||||
|                 errors=challenge.errors, |                 errors=challenge.errors, | ||||||
|                 challenge=challenge.data, |  | ||||||
|             ) |             ) | ||||||
|         return HttpChallengeResponse(challenge) |         return HttpChallengeResponse(challenge) | ||||||
|  |  | ||||||
|  | |||||||
| @ -15,7 +15,6 @@ | |||||||
|         {% endblock %} |         {% endblock %} | ||||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/sfe/bootstrap.min.css' %}"> |         <link rel="stylesheet" type="text/css" href="{% static 'dist/sfe/bootstrap.min.css' %}"> | ||||||
|         <meta name="sentry-trace" content="{{ sentry_trace }}" /> |         <meta name="sentry-trace" content="{{ sentry_trace }}" /> | ||||||
|         <link rel="prefetch" href="{{ flow_background_url }}" /> |  | ||||||
|         {% include "base/header_js.html" %} |         {% include "base/header_js.html" %} | ||||||
|         <style> |         <style> | ||||||
|           html, |           html, | ||||||
| @ -23,7 +22,7 @@ | |||||||
|             height: 100%; |             height: 100%; | ||||||
|           } |           } | ||||||
|           body { |           body { | ||||||
|             background-image: url("{{ flow_background_url }}"); |             background-image: url("{{ flow.background_url }}"); | ||||||
|             background-repeat: no-repeat; |             background-repeat: no-repeat; | ||||||
|             background-size: cover; |             background-size: cover; | ||||||
|           } |           } | ||||||
|  | |||||||
| @ -5,9 +5,9 @@ | |||||||
|  |  | ||||||
| {% block head_before %} | {% block head_before %} | ||||||
| {{ block.super }} | {{ block.super }} | ||||||
| <link rel="prefetch" href="{{ flow_background_url }}" /> | <link rel="prefetch" href="{{ flow.background_url }}" /> | ||||||
| {% if flow.compatibility_mode and not inspector %} | {% if flow.compatibility_mode and not inspector %} | ||||||
| <script>ShadyDOM = { force: true };</script> | <script>ShadyDOM = { force: !navigator.webdriver };</script> | ||||||
| {% endif %} | {% endif %} | ||||||
| {% include "base/header_js.html" %} | {% include "base/header_js.html" %} | ||||||
| <script> | <script> | ||||||
| @ -21,7 +21,7 @@ window.authentik.flow = { | |||||||
| <script src="{% versioned_script 'dist/flow/FlowInterface-%v.js' %}" type="module"></script> | <script src="{% versioned_script 'dist/flow/FlowInterface-%v.js' %}" type="module"></script> | ||||||
| <style> | <style> | ||||||
| :root { | :root { | ||||||
|     --ak-flow-background: url("{{ flow_background_url }}"); |     --ak-flow-background: url("{{ flow.background_url }}"); | ||||||
| } | } | ||||||
| </style> | </style> | ||||||
| {% endblock %} | {% endblock %} | ||||||
|  | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	