Compare commits
	
		
			10 Commits
		
	
	
		
			docusaurus
			...
			workspace-
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| ab315504a4 | |||
| 89a24dc508 | |||
| 1fe72ee377 | |||
| 9f596079d9 | |||
| 6c444dffc6 | |||
| 3bcbb2c0f9 | |||
| 4e284818cf | |||
| ced3f16310 | |||
| b65aabafdc | |||
| b07439dbe7 | 
| @ -1,5 +1,5 @@ | |||||||
| [bumpversion] | [bumpversion] | ||||||
| current_version = 2025.6.3 | current_version = 2025.4.1 | ||||||
| tag = True | tag = True | ||||||
| commit = True | commit = True | ||||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||||
| @ -21,8 +21,6 @@ optional_value = final | |||||||
|  |  | ||||||
| [bumpversion:file:package.json] | [bumpversion:file:package.json] | ||||||
|  |  | ||||||
| [bumpversion:file:package-lock.json] |  | ||||||
|  |  | ||||||
| [bumpversion:file:docker-compose.yml] | [bumpversion:file:docker-compose.yml] | ||||||
|  |  | ||||||
| [bumpversion:file:schema.yml] | [bumpversion:file:schema.yml] | ||||||
| @ -33,4 +31,6 @@ optional_value = final | |||||||
|  |  | ||||||
| [bumpversion:file:internal/constants/constants.go] | [bumpversion:file:internal/constants/constants.go] | ||||||
|  |  | ||||||
|  | [bumpversion:file:web/src/common/constants.ts] | ||||||
|  |  | ||||||
| [bumpversion:file:lifecycle/aws/template.yaml] | [bumpversion:file:lifecycle/aws/template.yaml] | ||||||
|  | |||||||
| @ -5,10 +5,8 @@ dist/** | |||||||
| build/** | build/** | ||||||
| build_docs/** | build_docs/** | ||||||
| *Dockerfile | *Dockerfile | ||||||
| **/*Dockerfile |  | ||||||
| blueprints/local | blueprints/local | ||||||
| .git | .git | ||||||
| !gen-ts-api/node_modules | !gen-ts-api/node_modules | ||||||
| !gen-ts-api/dist/** | !gen-ts-api/dist/** | ||||||
| !gen-go-api/ | !gen-go-api/ | ||||||
| .venv |  | ||||||
|  | |||||||
| @ -7,9 +7,6 @@ charset = utf-8 | |||||||
| trim_trailing_whitespace = true | trim_trailing_whitespace = true | ||||||
| insert_final_newline = true | insert_final_newline = true | ||||||
|  |  | ||||||
| [*.toml] |  | ||||||
| indent_size = 2 |  | ||||||
|  |  | ||||||
| [*.html] | [*.html] | ||||||
| indent_size = 2 | indent_size = 2 | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										8
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -28,15 +28,15 @@ runs: | |||||||
|     - name: Setup node |     - name: Setup node | ||||||
|       uses: actions/setup-node@v4 |       uses: actions/setup-node@v4 | ||||||
|       with: |       with: | ||||||
|         node-version-file: web/package.json |         node-version-file: package.json | ||||||
|         cache: "npm" |         cache: "npm" | ||||||
|         cache-dependency-path: web/package-lock.json |         cache-dependency-path: package-lock.json | ||||||
|     - name: Setup go |     - name: Setup go | ||||||
|       uses: actions/setup-go@v5 |       uses: actions/setup-go@v5 | ||||||
|       with: |       with: | ||||||
|         go-version-file: "go.mod" |         go-version-file: "go.mod" | ||||||
|     - name: Setup docker cache |     - name: Setup docker cache | ||||||
|       uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 |       uses: ScribeMD/docker-cache@0.5.0 | ||||||
|       with: |       with: | ||||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} |         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} | ||||||
|     - name: Setup dependencies |     - name: Setup dependencies | ||||||
| @ -44,7 +44,7 @@ runs: | |||||||
|       run: | |       run: | | ||||||
|         export PSQL_TAG=${{ inputs.postgresql_version }} |         export PSQL_TAG=${{ inputs.postgresql_version }} | ||||||
|         docker compose -f .github/actions/setup/docker-compose.yml up -d |         docker compose -f .github/actions/setup/docker-compose.yml up -d | ||||||
|         cd web && npm ci |         npm ci | ||||||
|     - name: Generate config |     - name: Generate config | ||||||
|       shell: uv run python {0} |       shell: uv run python {0} | ||||||
|       run: | |       run: | | ||||||
|  | |||||||
							
								
								
									
										11
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										11
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -78,13 +78,13 @@ updates: | |||||||
|         patterns: |         patterns: | ||||||
|           - "@goauthentik/*" |           - "@goauthentik/*" | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/docs" |     directory: "/website" | ||||||
|     schedule: |     schedule: | ||||||
|       interval: daily |       interval: daily | ||||||
|       time: "04:00" |       time: "04:00" | ||||||
|     open-pull-requests-limit: 10 |     open-pull-requests-limit: 10 | ||||||
|     commit-message: |     commit-message: | ||||||
|       prefix: "docs:" |       prefix: "website:" | ||||||
|     labels: |     labels: | ||||||
|       - dependencies |       - dependencies | ||||||
|     groups: |     groups: | ||||||
| @ -100,13 +100,6 @@ updates: | |||||||
|       goauthentik: |       goauthentik: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@goauthentik/*" |           - "@goauthentik/*" | ||||||
|       eslint: |  | ||||||
|         patterns: |  | ||||||
|           - "@eslint/*" |  | ||||||
|           - "@typescript-eslint/*" |  | ||||||
|           - "eslint-*" |  | ||||||
|           - "eslint" |  | ||||||
|           - "typescript-eslint" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/lifecycle/aws" |     directory: "/lifecycle/aws" | ||||||
|     schedule: |     schedule: | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							| @ -31,4 +31,4 @@ If changes to the frontend have been made | |||||||
| If applicable | If applicable | ||||||
|  |  | ||||||
| -   [ ] The documentation has been updated | -   [ ] The documentation has been updated | ||||||
| -   [ ] The documentation has been formatted (`make docs`) | -   [ ] The documentation has been formatted (`make website`) | ||||||
|  | |||||||
| @ -38,8 +38,6 @@ jobs: | |||||||
|       # Needed for attestation |       # Needed for attestation | ||||||
|       id-token: write |       id-token: write | ||||||
|       attestations: write |       attestations: write | ||||||
|       # Needed for checkout |  | ||||||
|       contents: read |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - uses: docker/setup-qemu-action@v3.6.0 |       - uses: docker/setup-qemu-action@v3.6.0 | ||||||
|  | |||||||
							
								
								
									
										15
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -20,8 +20,11 @@ jobs: | |||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
|         with: |         with: | ||||||
|           node-version-file: web/package.json |           node-version-file: package.json | ||||||
|           registry-url: "https://registry.npmjs.org" |           registry-url: "https://registry.npmjs.org" | ||||||
|  |       - name: Prepare Dependencies | ||||||
|  |         run: | | ||||||
|  |           npm ci | ||||||
|       - name: Generate API Client |       - name: Generate API Client | ||||||
|         run: make gen-client-ts |         run: make gen-client-ts | ||||||
|       - name: Publish package |       - name: Publish package | ||||||
| @ -32,15 +35,13 @@ jobs: | |||||||
|         env: |         env: | ||||||
|           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} |           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} | ||||||
|       - name: Upgrade /web |       - name: Upgrade /web | ||||||
|         working-directory: web |  | ||||||
|         run: | |         run: | | ||||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` |           export VERSION=`node -e 'console.log(require("./gen-ts-api/package.json").version)'` | ||||||
|           npm i @goauthentik/api@$VERSION |           npm i @goauthentik/api@$VERSION -w @goauthentik/web | ||||||
|       - name: Upgrade /web/packages/sfe |       - name: Upgrade /web/packages/sfe | ||||||
|         working-directory: web/packages/sfe |  | ||||||
|         run: | |         run: | | ||||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` |           export VERSION=`node -e 'console.log(require("./gen-ts-api/package.json").version)'` | ||||||
|           npm i @goauthentik/api@$VERSION |           npm i @goauthentik/api@$VERSION -w @goauthentik/web-sfe | ||||||
|       - uses: peter-evans/create-pull-request@v7 |       - uses: peter-evans/create-pull-request@v7 | ||||||
|         id: cpr |         id: cpr | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										83
									
								
								.github/workflows/ci-api-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										83
									
								
								.github/workflows/ci-api-docs.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,83 +0,0 @@ | |||||||
| name: authentik-ci-api-docs |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   push: |  | ||||||
|     branches: |  | ||||||
|       - main |  | ||||||
|       - next |  | ||||||
|       - version-* |  | ||||||
|   pull_request: |  | ||||||
|     branches: |  | ||||||
|       - main |  | ||||||
|       - version-* |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   lint: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|       matrix: |  | ||||||
|         command: |  | ||||||
|           - prettier-check |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - name: Install Dependencies |  | ||||||
|         working-directory: docs/ |  | ||||||
|         run: npm ci |  | ||||||
|       - name: Lint |  | ||||||
|         working-directory: docs/ |  | ||||||
|         run: npm run ${{ matrix.command }} |  | ||||||
|   build: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|  |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - uses: actions/setup-node@v4 |  | ||||||
|         with: |  | ||||||
|           node-version-file: docs/package.json |  | ||||||
|           cache: "npm" |  | ||||||
|           cache-dependency-path: docs/package-lock.json |  | ||||||
|       - working-directory: docs/ |  | ||||||
|         name: Install Dependencies |  | ||||||
|         run: npm ci |  | ||||||
|       - name: Build API Docs via Docusaurus |  | ||||||
|         working-directory: docs |  | ||||||
|         run: npm run build -w api |  | ||||||
|       - uses: actions/upload-artifact@v4 |  | ||||||
|         with: |  | ||||||
|           name: api-docs |  | ||||||
|           path: docs/api/build |  | ||||||
|   deploy: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     needs: |  | ||||||
|       - lint |  | ||||||
|       - build |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - uses: actions/download-artifact@v4 |  | ||||||
|         with: |  | ||||||
|           name: api-docs |  | ||||||
|           path: docs/api/build |  | ||||||
|       - uses: actions/setup-node@v4 |  | ||||||
|         with: |  | ||||||
|           node-version-file: docs/package.json |  | ||||||
|           cache: "npm" |  | ||||||
|           cache-dependency-path: docs/package-lock.json |  | ||||||
|       - working-directory: docs/ |  | ||||||
|         name: Install Dependencies |  | ||||||
|         run: npm ci |  | ||||||
|       - name: Deploy Netlify (Production) |  | ||||||
|         if: github.event_name == 'push' && github.ref == 'refs/heads/main' |  | ||||||
|         env: |  | ||||||
|           NETLIFY_SITE_ID: authentik-api-docs.netlify.app |  | ||||||
|           NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} |  | ||||||
|         working-directory: docs/api |  | ||||||
|         run: npx netlify deploy --no-build --prod |  | ||||||
|  |  | ||||||
|       - name: Deploy Netlify (Preview) |  | ||||||
|         if: github.event_name == 'pull_request' || github.ref != 'refs/heads/main' |  | ||||||
|         env: |  | ||||||
|           NETLIFY_SITE_ID: authentik-api-docs.netlify.app |  | ||||||
|           NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} |  | ||||||
|         working-directory: docs/api |  | ||||||
|         run: npx netlify deploy --no-build --alias=deploy-preview-${{ github.event.number }} |  | ||||||
							
								
								
									
										123
									
								
								.github/workflows/ci-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										123
									
								
								.github/workflows/ci-docs.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,123 +0,0 @@ | |||||||
| name: authentik-ci-docs |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   push: |  | ||||||
|     branches: |  | ||||||
|       - main |  | ||||||
|       - next |  | ||||||
|       - version-* |  | ||||||
|   pull_request: |  | ||||||
|     branches: |  | ||||||
|       - main |  | ||||||
|       - version-* |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   lint: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|       matrix: |  | ||||||
|         command: |  | ||||||
|           - prettier-check |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - name: Install dependencies |  | ||||||
|         working-directory: docs/ |  | ||||||
|         run: npm ci |  | ||||||
|       - name: Lint |  | ||||||
|         working-directory: docs/ |  | ||||||
|         run: npm run ${{ matrix.command }} |  | ||||||
|   build-topics: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|  |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - uses: actions/setup-node@v4 |  | ||||||
|         with: |  | ||||||
|           node-version-file: docs/package.json |  | ||||||
|           cache: "npm" |  | ||||||
|           cache-dependency-path: docs/package-lock.json |  | ||||||
|       - working-directory: docs/ |  | ||||||
|         name: Install Dependencies |  | ||||||
|         run: npm ci |  | ||||||
|       - name: Build Documentation via Docusaurus |  | ||||||
|         working-directory: docs/ |  | ||||||
|         run: npm run build |  | ||||||
|   build-integrations: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|  |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - uses: actions/setup-node@v4 |  | ||||||
|         with: |  | ||||||
|           node-version-file: docs/package.json |  | ||||||
|           cache: "npm" |  | ||||||
|           cache-dependency-path: docs/package-lock.json |  | ||||||
|       - working-directory: docs/ |  | ||||||
|         name: Install Dependencies |  | ||||||
|         run: npm ci |  | ||||||
|       - name: Build Integrations via Docusaurus |  | ||||||
|         working-directory: docs/ |  | ||||||
|         run: npm run build -w integrations |  | ||||||
|   build-container: |  | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     permissions: |  | ||||||
|       # Needed to upload container images to ghcr.io |  | ||||||
|       packages: write |  | ||||||
|       # Needed for attestation |  | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|         with: |  | ||||||
|           ref: ${{ github.event.pull_request.head.sha }} |  | ||||||
|       - name: Set up QEMU |  | ||||||
|         uses: docker/setup-qemu-action@v3.6.0 |  | ||||||
|       - name: Set up Docker Buildx |  | ||||||
|         uses: docker/setup-buildx-action@v3 |  | ||||||
|       - name: prepare variables |  | ||||||
|         uses: ./.github/actions/docker-push-variables |  | ||||||
|         id: ev |  | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |  | ||||||
|           image-name: ghcr.io/goauthentik/dev-docs |  | ||||||
|       - name: Login to Container Registry |  | ||||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|         uses: docker/login-action@v3 |  | ||||||
|         with: |  | ||||||
|           registry: ghcr.io |  | ||||||
|           username: ${{ github.repository_owner }} |  | ||||||
|           password: ${{ secrets.GITHUB_TOKEN }} |  | ||||||
|       - name: Build Docker Image |  | ||||||
|         id: push |  | ||||||
|         uses: docker/build-push-action@v6 |  | ||||||
|         with: |  | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |  | ||||||
|           file: docs/Dockerfile |  | ||||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|           platforms: linux/amd64,linux/arm64 |  | ||||||
|           context: . |  | ||||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache |  | ||||||
|           cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }} |  | ||||||
|       - uses: actions/attest-build-provenance@v2 |  | ||||||
|         id: attest |  | ||||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   ci-docs-mark: |  | ||||||
|     if: always() |  | ||||||
|     needs: |  | ||||||
|       - lint |  | ||||||
|       - build-topics |  | ||||||
|       - build-integrations |  | ||||||
|       - build-container |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: re-actors/alls-green@release/v1 |  | ||||||
|         with: |  | ||||||
|           jobs: ${{ toJSON(needs) }} |  | ||||||
|           allowed-skips: ${{ github.repository == 'goauthentik/authentik-internal' && 'build-container' || '[]' }} |  | ||||||
							
								
								
									
										3
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							| @ -9,15 +9,14 @@ on: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   test-container: |   test-container: | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         version: |         version: | ||||||
|           - docs |           - docs | ||||||
|           - version-2025-4 |  | ||||||
|           - version-2025-2 |           - version-2025-2 | ||||||
|  |           - version-2024-12 | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - run: | |       - run: | | ||||||
|  | |||||||
							
								
								
									
										21
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										21
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -62,7 +62,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -117,7 +116,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -195,23 +193,22 @@ jobs: | |||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: Setup e2e env (chrome, etc) |       - name: Setup E2E environment (Chrome, etc) | ||||||
|         run: | |         run: | | ||||||
|           docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull |           docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull | ||||||
|       - id: cache-web |       - id: cache-web | ||||||
|         uses: actions/cache@v4 |         uses: actions/cache@v4 | ||||||
|         with: |         with: | ||||||
|           path: web/dist |           path: web/dist | ||||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b |           key: ${{ runner.os }}-web-${{ hashFiles('package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b | ||||||
|       - name: prepare web ui |       - name: Prepare Web UI | ||||||
|         if: steps.cache-web.outputs.cache-hit != 'true' |         if: steps.cache-web.outputs.cache-hit != 'true' | ||||||
|         working-directory: web |  | ||||||
|         run: | |         run: | | ||||||
|           npm ci |           npm ci | ||||||
|           make -C .. gen-client-ts |           make gen-client-ts | ||||||
|           npm run build |           npm run build -w @goauthentik/web | ||||||
|           npm run build:sfe |           npm run build -w @goauthentik/web-sfe | ||||||
|       - name: run e2e |       - name: Run E2E | ||||||
|         run: | |         run: | | ||||||
|           uv run coverage run manage.py test ${{ matrix.job.glob }} |           uv run coverage run manage.py test ${{ matrix.job.glob }} | ||||||
|           uv run coverage xml |           uv run coverage xml | ||||||
| @ -247,13 +244,11 @@ jobs: | |||||||
|       # Needed for attestation |       # Needed for attestation | ||||||
|       id-token: write |       id-token: write | ||||||
|       attestations: write |       attestations: write | ||||||
|       # Needed for checkout |  | ||||||
|       contents: read |  | ||||||
|     needs: ci-core-mark |     needs: ci-core-mark | ||||||
|     uses: ./.github/workflows/_reusable-docker-build.yaml |     uses: ./.github/workflows/_reusable-docker-build.yaml | ||||||
|     secrets: inherit |     secrets: inherit | ||||||
|     with: |     with: | ||||||
|       image_name: ${{ github.repository == 'goauthentik/authentik-internal' && 'ghcr.io/goauthentik/internal-server' || 'ghcr.io/goauthentik/dev-server' }} |       image_name: ghcr.io/goauthentik/dev-server | ||||||
|       release: false |       release: false | ||||||
|   pr-comment: |   pr-comment: | ||||||
|     needs: |     needs: | ||||||
|  | |||||||
							
								
								
									
										24
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										24
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -24,9 +24,9 @@ jobs: | |||||||
|         run: | |         run: | | ||||||
|           # Create folder structure for go embeds |           # Create folder structure for go embeds | ||||||
|           mkdir -p web/dist |           mkdir -p web/dist | ||||||
|           mkdir -p docs/help |           mkdir -p website/help | ||||||
|           touch web/dist/test docs/help/test |           touch web/dist/test website/help/test | ||||||
|       - name: Generate API |       - name: Generate Golang API Client | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: golangci-lint |       - name: golangci-lint | ||||||
|         uses: golangci/golangci-lint-action@v8 |         uses: golangci/golangci-lint-action@v8 | ||||||
| @ -43,7 +43,7 @@ jobs: | |||||||
|           go-version-file: "go.mod" |           go-version-file: "go.mod" | ||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: Generate API |       - name: Generate Golang API Client | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: Go unittests |       - name: Go unittests | ||||||
|         run: | |         run: | | ||||||
| @ -59,7 +59,6 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           jobs: ${{ toJSON(needs) }} |           jobs: ${{ toJSON(needs) }} | ||||||
|   build-container: |   build-container: | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     timeout-minutes: 120 |     timeout-minutes: 120 | ||||||
|     needs: |     needs: | ||||||
|       - ci-outpost-mark |       - ci-outpost-mark | ||||||
| @ -100,7 +99,7 @@ jobs: | |||||||
|           registry: ghcr.io |           registry: ghcr.io | ||||||
|           username: ${{ github.repository_owner }} |           username: ${{ github.repository_owner }} | ||||||
|           password: ${{ secrets.GITHUB_TOKEN }} |           password: ${{ secrets.GITHUB_TOKEN }} | ||||||
|       - name: Generate API |       - name: Generate Golang API Client | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: Build Docker Image |       - name: Build Docker Image | ||||||
|         id: push |         id: push | ||||||
| @ -146,16 +145,17 @@ jobs: | |||||||
|           go-version-file: "go.mod" |           go-version-file: "go.mod" | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
|         with: |         with: | ||||||
|           node-version-file: web/package.json |           node-version-file: package.json | ||||||
|           cache: "npm" |           cache: "npm" | ||||||
|           cache-dependency-path: web/package-lock.json |           cache-dependency-path: package-lock.json | ||||||
|       - name: Generate API |       - name: Generate Golang API Client | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: Build web |       - name: Prepare Dependencies | ||||||
|         working-directory: web/ |  | ||||||
|         run: | |         run: | | ||||||
|           npm ci |           npm ci | ||||||
|           npm run build-proxy |       - name: Run ESBuild | ||||||
|  |         run: | | ||||||
|  |           npm run build-proxy -w @goauthentik/web | ||||||
|       - name: Build outpost |       - name: Build outpost | ||||||
|         run: | |         run: | | ||||||
|           set -x |           set -x | ||||||
|  | |||||||
							
								
								
									
										43
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										43
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							| @ -19,47 +19,45 @@ jobs: | |||||||
|       matrix: |       matrix: | ||||||
|         command: |         command: | ||||||
|           - lint |           - lint | ||||||
|           - lint:lockfile |  | ||||||
|           - tsc |  | ||||||
|           - prettier-check |           - prettier-check | ||||||
|         project: |         project: | ||||||
|           - web |           - web | ||||||
|         include: |         include: | ||||||
|           - command: tsc |  | ||||||
|             project: web |  | ||||||
|           - command: lit-analyse |           - command: lit-analyse | ||||||
|             project: web |             project: web | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
|         with: |         with: | ||||||
|           node-version-file: ${{ matrix.project }}/package.json |           node-version-file: package.json | ||||||
|           cache: "npm" |           cache: "npm" | ||||||
|           cache-dependency-path: ${{ matrix.project }}/package-lock.json |           cache-dependency-path: package-lock.json | ||||||
|       - working-directory: ${{ matrix.project }}/ |       - name: Prepare Dependencies | ||||||
|         run: | |         run: | | ||||||
|           npm ci |           npm ci | ||||||
|       - name: Generate API |       - name: Generate TypeScript API | ||||||
|         run: make gen-client-ts |         run: make gen-client-ts | ||||||
|       - name: Lint |       - name: Lint Project | ||||||
|         working-directory: ${{ matrix.project }}/ |         run: | | ||||||
|         run: npm run ${{ matrix.command }} |           npm run build-locales -w @goauthentik/web | ||||||
|  |           npm run lint:types | ||||||
|  |       - name: Lint Web | ||||||
|  |         run: npm run ${{ matrix.command }} -w @goauthentik/web | ||||||
|   build: |   build: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
|         with: |         with: | ||||||
|           node-version-file: web/package.json |           node-version-file: package.json | ||||||
|           cache: "npm" |           cache: "npm" | ||||||
|           cache-dependency-path: web/package-lock.json |           cache-dependency-path: package-lock.json | ||||||
|       - working-directory: web/ |       - name: Prepare Dependencies | ||||||
|         run: npm ci |         run: npm ci | ||||||
|       - name: Generate API |       - name: Generate TypeScript API | ||||||
|         run: make gen-client-ts |         run: make gen-client-ts | ||||||
|       - name: build |       - name: build | ||||||
|         working-directory: web/ |         run: npm run build -w @goauthentik/web | ||||||
|         run: npm run build |  | ||||||
|   ci-web-mark: |   ci-web-mark: | ||||||
|     if: always() |     if: always() | ||||||
|     needs: |     needs: | ||||||
| @ -78,13 +76,12 @@ jobs: | |||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
|         with: |         with: | ||||||
|           node-version-file: web/package.json |           node-version-file: package.json | ||||||
|           cache: "npm" |           cache: "npm" | ||||||
|           cache-dependency-path: web/package-lock.json |           cache-dependency-path: package-lock.json | ||||||
|       - working-directory: web/ |       - name: Prepare Dependencies | ||||||
|         run: npm ci |         run: npm ci | ||||||
|       - name: Generate API |       - name: Generate TypeScript API | ||||||
|         run: make gen-client-ts |         run: make gen-client-ts | ||||||
|       - name: test |       - name: test | ||||||
|         working-directory: web/ |         run: npm run test -w @goauthentik/web || exit 0 | ||||||
|         run: npm run test || exit 0 |  | ||||||
|  | |||||||
							
								
								
									
										65
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @ -0,0 +1,65 @@ | |||||||
|  | name: authentik-ci-website | ||||||
|  |  | ||||||
|  | on: | ||||||
|  |   push: | ||||||
|  |     branches: | ||||||
|  |       - main | ||||||
|  |       - next | ||||||
|  |       - version-* | ||||||
|  |   pull_request: | ||||||
|  |     branches: | ||||||
|  |       - main | ||||||
|  |       - version-* | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   lint: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v4 | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: package-lock.json | ||||||
|  |       - name: Prepare Dependencies | ||||||
|  |         run: npm ci | ||||||
|  |       - name: Lint | ||||||
|  |         run: npm run prettier-check -w @goauthentik/docs | ||||||
|  |   test: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v4 | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: package-lock.json | ||||||
|  |       - name: Prepare Dependencies | ||||||
|  |         run: npm ci | ||||||
|  |       - name: test | ||||||
|  |         run: npm test -w @goauthentik/docs | ||||||
|  |   build: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v4 | ||||||
|  |  | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: package-lock.json | ||||||
|  |       - name: Prepare Dependencies | ||||||
|  |         run: npm ci | ||||||
|  |       - name: Run Docusaurus | ||||||
|  |         run: npm run build -w @goauthentik/docs | ||||||
|  |   ci-website-mark: | ||||||
|  |     if: always() | ||||||
|  |     needs: | ||||||
|  |       - lint | ||||||
|  |       - test | ||||||
|  |       - build | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - uses: re-actors/alls-green@release/v1 | ||||||
|  |         with: | ||||||
|  |           jobs: ${{ toJSON(needs) }} | ||||||
							
								
								
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @ -2,7 +2,7 @@ name: "CodeQL" | |||||||
|  |  | ||||||
| on: | on: | ||||||
|   push: |   push: | ||||||
|     branches: [main, next, version*] |     branches: [main, "*", next, version*] | ||||||
|   pull_request: |   pull_request: | ||||||
|     branches: [main] |     branches: [main] | ||||||
|   schedule: |   schedule: | ||||||
|  | |||||||
							
								
								
									
										4
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,7 +7,7 @@ on: | |||||||
|       - packages/eslint-config/** |       - packages/eslint-config/** | ||||||
|       - packages/prettier-config/** |       - packages/prettier-config/** | ||||||
|       - packages/tsconfig/** |       - packages/tsconfig/** | ||||||
|       - web/packages/esbuild-plugin-live-reload/** |       - packages/web/esbuild-plugin-live-reload/** | ||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
| jobs: | jobs: | ||||||
|   publish: |   publish: | ||||||
| @ -21,7 +21,7 @@ jobs: | |||||||
|           - packages/eslint-config |           - packages/eslint-config | ||||||
|           - packages/prettier-config |           - packages/prettier-config | ||||||
|           - packages/tsconfig |           - packages/tsconfig | ||||||
|           - web/packages/esbuild-plugin-live-reload |           - packages/web/esbuild-plugin-live-reload | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										57
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										57
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -20,49 +20,6 @@ jobs: | |||||||
|       release: true |       release: true | ||||||
|       registry_dockerhub: true |       registry_dockerhub: true | ||||||
|       registry_ghcr: true |       registry_ghcr: true | ||||||
|   build-docs: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     permissions: |  | ||||||
|       # Needed to upload container images to ghcr.io |  | ||||||
|       packages: write |  | ||||||
|       # Needed for attestation |  | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - name: Set up QEMU |  | ||||||
|         uses: docker/setup-qemu-action@v3.6.0 |  | ||||||
|       - name: Set up Docker Buildx |  | ||||||
|         uses: docker/setup-buildx-action@v3 |  | ||||||
|       - name: prepare variables |  | ||||||
|         uses: ./.github/actions/docker-push-variables |  | ||||||
|         id: ev |  | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |  | ||||||
|           image-name: ghcr.io/goauthentik/docs |  | ||||||
|       - name: Login to GitHub Container Registry |  | ||||||
|         uses: docker/login-action@v3 |  | ||||||
|         with: |  | ||||||
|           registry: ghcr.io |  | ||||||
|           username: ${{ github.repository_owner }} |  | ||||||
|           password: ${{ secrets.GITHUB_TOKEN }} |  | ||||||
|       - name: Build Docker Image |  | ||||||
|         id: push |  | ||||||
|         uses: docker/build-push-action@v6 |  | ||||||
|         with: |  | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |  | ||||||
|           file: docs/Dockerfile |  | ||||||
|           push: true |  | ||||||
|           platforms: linux/amd64,linux/arm64 |  | ||||||
|           context: . |  | ||||||
|       - uses: actions/attest-build-provenance@v2 |  | ||||||
|         id: attest |  | ||||||
|         if: true |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   build-outpost: |   build-outpost: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     permissions: |     permissions: | ||||||
| @ -149,14 +106,14 @@ jobs: | |||||||
|           go-version-file: "go.mod" |           go-version-file: "go.mod" | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
|         with: |         with: | ||||||
|           node-version-file: web/package.json |           node-version-file: package.json | ||||||
|           cache: "npm" |           cache: "npm" | ||||||
|           cache-dependency-path: web/package-lock.json |           cache-dependency-path: package-lock.json | ||||||
|       - name: Build web |       - name: Prepare Dependencies | ||||||
|         working-directory: web/ |         run: npm ci | ||||||
|  |       - name: Run ESBuild (Proxy) | ||||||
|         run: | |         run: | | ||||||
|           npm ci |           npm run build-proxy -w @goauthentik/web | ||||||
|           npm run build-proxy |  | ||||||
|       - name: Build outpost |       - name: Build outpost | ||||||
|         run: | |         run: | | ||||||
|           set -x |           set -x | ||||||
| @ -236,6 +193,6 @@ jobs: | |||||||
|           SENTRY_ORG: authentik-security-inc |           SENTRY_ORG: authentik-security-inc | ||||||
|           SENTRY_PROJECT: authentik |           SENTRY_PROJECT: authentik | ||||||
|         with: |         with: | ||||||
|           release: authentik@${{ steps.ev.outputs.version }} |           version: authentik@${{ steps.ev.outputs.version }} | ||||||
|           sourcemaps: "./web/dist" |           sourcemaps: "./web/dist" | ||||||
|           url_prefix: "~/static/dist" |           url_prefix: "~/static/dist" | ||||||
|  | |||||||
							
								
								
									
										21
									
								
								.github/workflows/repo-mirror-cleanup.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										21
									
								
								.github/workflows/repo-mirror-cleanup.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,21 +0,0 @@ | |||||||
| name: "authentik-repo-mirror-cleanup" |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   workflow_dispatch: |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   to_internal: |  | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|         with: |  | ||||||
|           fetch-depth: 0 |  | ||||||
|       - if: ${{ env.MIRROR_KEY != '' }} |  | ||||||
|         uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb |  | ||||||
|         with: |  | ||||||
|           target_repo_url: git@github.com:goauthentik/authentik-internal.git |  | ||||||
|           ssh_private_key: ${{ secrets.GH_MIRROR_KEY }} |  | ||||||
|           args: --tags --force --prune |  | ||||||
|         env: |  | ||||||
|           MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }} |  | ||||||
							
								
								
									
										9
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,10 +11,11 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|       - if: ${{ env.MIRROR_KEY != '' }} |       - if: ${{ env.MIRROR_KEY != '' }} | ||||||
|         uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb |         uses: pixta-dev/repository-mirroring-action@v1 | ||||||
|         with: |         with: | ||||||
|           target_repo_url: git@github.com:goauthentik/authentik-internal.git |           target_repo_url: | ||||||
|           ssh_private_key: ${{ secrets.GH_MIRROR_KEY }} |             git@github.com:goauthentik/authentik-internal.git | ||||||
|           args: --tags --force |           ssh_private_key: | ||||||
|  |             ${{ secrets.GH_MIRROR_KEY }} | ||||||
|         env: |         env: | ||||||
|           MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }} |           MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }} | ||||||
|  | |||||||
| @ -16,7 +16,6 @@ env: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   compile: |   compile: | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - id: generate_token |       - id: generate_token | ||||||
| @ -33,15 +32,25 @@ jobs: | |||||||
|         if: ${{ github.event_name == 'pull_request' }} |         if: ${{ github.event_name == 'pull_request' }} | ||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: Generate API |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: package-lock.json | ||||||
|  |       - name: Prepare Dependencies | ||||||
|  |         run: npm ci | ||||||
|  |       - name: Generate TypeScript API | ||||||
|         run: make gen-client-ts |         run: make gen-client-ts | ||||||
|       - name: run extract |       - name: Run extract | ||||||
|         run: | |         run: | | ||||||
|           uv run make i18n-extract |           uv run make i18n-extract | ||||||
|       - name: run compile |       - name: Run UV compile | ||||||
|         run: | |         run: | | ||||||
|           uv run ak compilemessages |           uv run ak compilemessages | ||||||
|           make web-check-compile |       - name: Lint Project | ||||||
|  |         run: | | ||||||
|  |           npm run build-locales -w @goauthentik/web | ||||||
|  |           npm run lint:types | ||||||
|       - name: Create Pull Request |       - name: Create Pull Request | ||||||
|         if: ${{ github.event_name != 'pull_request' }} |         if: ${{ github.event_name != 'pull_request' }} | ||||||
|         uses: peter-evans/create-pull-request@v7 |         uses: peter-evans/create-pull-request@v7 | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ coverage | |||||||
| dist | dist | ||||||
| out | out | ||||||
| .docusaurus | .docusaurus | ||||||
| docs/api/reference | website/docs/developer-docs/api/**/* | ||||||
|  |  | ||||||
| ## Environment | ## Environment | ||||||
| *.env | *.env | ||||||
| @ -36,12 +36,19 @@ coverage | |||||||
| *.mdx | *.mdx | ||||||
| *.md | *.md | ||||||
|  |  | ||||||
| ## Import order matters |  | ||||||
| poly.ts |  | ||||||
| src/locale-codes.ts |  | ||||||
| src/locales/ |  | ||||||
|  |  | ||||||
| # Storybook | # Storybook | ||||||
| storybook-static/ | storybook-static/ | ||||||
| .storybook/css-import-maps* | .storybook/css-import-maps* | ||||||
|  |  | ||||||
|  | # JSON Schemas | ||||||
|  | schemas/**/*.json | ||||||
|  | blueprints/**/*.json | ||||||
|  | authentik/**/*.json | ||||||
|  | lifecycle/**/*.json | ||||||
|  |  | ||||||
|  | # Locales | ||||||
|  | web/src/locale-codes.ts | ||||||
|  | web/src/locales/ | ||||||
|  |  | ||||||
|  | # Wireit's cache | ||||||
|  | .wireit | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @ -17,6 +17,6 @@ | |||||||
|         "ms-python.vscode-pylance", |         "ms-python.vscode-pylance", | ||||||
|         "redhat.vscode-yaml", |         "redhat.vscode-yaml", | ||||||
|         "Tobermory.es6-string-html", |         "Tobermory.es6-string-html", | ||||||
|         "unifiedjs.vscode-mdx", |         "unifiedjs.vscode-mdx" | ||||||
|     ] |     ] | ||||||
| } | } | ||||||
|  | |||||||
							
								
								
									
										4
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -6,15 +6,13 @@ | |||||||
|         "!Context scalar", |         "!Context scalar", | ||||||
|         "!Enumerate sequence", |         "!Enumerate sequence", | ||||||
|         "!Env scalar", |         "!Env scalar", | ||||||
|         "!Env sequence", |  | ||||||
|         "!Find sequence", |         "!Find sequence", | ||||||
|         "!Format sequence", |         "!Format sequence", | ||||||
|         "!If sequence", |         "!If sequence", | ||||||
|         "!Index scalar", |         "!Index scalar", | ||||||
|         "!KeyOf scalar", |         "!KeyOf scalar", | ||||||
|         "!Value scalar", |         "!Value scalar", | ||||||
|         "!AtIndex scalar", |         "!AtIndex scalar" | ||||||
|         "!ParseJSON scalar" |  | ||||||
|     ], |     ], | ||||||
|     "typescript.preferences.importModuleSpecifier": "non-relative", |     "typescript.preferences.importModuleSpecifier": "non-relative", | ||||||
|     "typescript.preferences.importModuleSpecifierEnding": "index", |     "typescript.preferences.importModuleSpecifierEnding": "index", | ||||||
|  | |||||||
							
								
								
									
										8
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @ -43,15 +43,15 @@ | |||||||
|             "group": "build" |             "group": "build" | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/docs: make", |             "label": "authentik/website: make", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["docs"], |             "args": ["website"], | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/docs: watch", |             "label": "authentik/website: watch", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["docs-watch"], |             "args": ["website-watch"], | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
|  | |||||||
| @ -32,8 +32,8 @@ tests/wdio/                     @goauthentik/frontend | |||||||
| locale/                         @goauthentik/backend @goauthentik/frontend | locale/                         @goauthentik/backend @goauthentik/frontend | ||||||
| web/xliff/                      @goauthentik/backend @goauthentik/frontend | web/xliff/                      @goauthentik/backend @goauthentik/frontend | ||||||
| # Docs & Website | # Docs & Website | ||||||
| docs/                           @goauthentik/docs | website/                        @goauthentik/docs | ||||||
| CODE_OF_CONDUCT.md              @goauthentik/docs | CODE_OF_CONDUCT.md              @goauthentik/docs | ||||||
| # Security | # Security | ||||||
| SECURITY.md                     @goauthentik/security @goauthentik/docs | SECURITY.md                     @goauthentik/security @goauthentik/docs | ||||||
| docs/security/                  @goauthentik/security @goauthentik/docs | website/docs/security/          @goauthentik/security @goauthentik/docs | ||||||
|  | |||||||
							
								
								
									
										58
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										58
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,28 +1,39 @@ | |||||||
| # syntax=docker/dockerfile:1 | # syntax=docker/dockerfile:1 | ||||||
|  |  | ||||||
| # Stage 1: Build webui | # Stage 1: Build Node packages | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24-slim AS node-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/node:24-slim AS node-packages | ||||||
|  |  | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||||
|  |  | ||||||
|  | WORKDIR /work | ||||||
|  |  | ||||||
|  | COPY ./SECURITY.md /work | ||||||
|  | COPY ./schema.yml /work | ||||||
|  | COPY ./docker-compose.yml /work | ||||||
|  | COPY ./blueprints /work/blueprints/ | ||||||
|  | COPY ./package.json /work | ||||||
|  | COPY ./package-lock.json /work | ||||||
|  | COPY ./tsconfig.json /work | ||||||
|  | COPY ./packages/ /work/packages/ | ||||||
|  | COPY ./web /work/web/ | ||||||
|  | COPY ./website /work/website/ | ||||||
|  | COPY ./gen-ts-api /work/gen-ts-api/ | ||||||
|  |  | ||||||
|  | RUN --mount=type=cache,id=npm-node,sharing=shared,target=/root/.npm \ | ||||||
|  |     npm ci | ||||||
|  |  | ||||||
|  | RUN cd ./gen-ts-api && npm link | ||||||
|  |  | ||||||
|  | RUN npm link @goauthentik/api -w @goauthentik/web | ||||||
|  |  | ||||||
| ENV NODE_ENV=production | ENV NODE_ENV=production | ||||||
|  |  | ||||||
| WORKDIR /work/web | RUN npm run build -w @goauthentik/web | ||||||
|  | RUN npm run build -w @goauthentik/web-sfe | ||||||
|  |  | ||||||
| RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | RUN npm run build:api -w @goauthentik/docs | ||||||
|     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ | RUN npm run build:docusaurus -w @goauthentik/docs | ||||||
|     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ |  | ||||||
|     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ |  | ||||||
|     --mount=type=cache,id=npm-ak,sharing=shared,target=/root/.npm \ |  | ||||||
|     npm ci --include=dev |  | ||||||
|  |  | ||||||
| COPY ./package.json /work |  | ||||||
| COPY ./web /work/web/ |  | ||||||
| COPY ./docs /work/docs/ |  | ||||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api |  | ||||||
|  |  | ||||||
| RUN npm run build && \ |  | ||||||
|     npm run build:sfe |  | ||||||
|  |  | ||||||
| # Stage 2: Build go proxy | # Stage 2: Build go proxy | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | ||||||
| @ -49,8 +60,8 @@ RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | |||||||
| COPY ./cmd /go/src/goauthentik.io/cmd | COPY ./cmd /go/src/goauthentik.io/cmd | ||||||
| COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib | COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib | ||||||
| COPY ./web/static.go /go/src/goauthentik.io/web/static.go | COPY ./web/static.go /go/src/goauthentik.io/web/static.go | ||||||
| COPY --from=node-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | COPY --from=node-packages /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | ||||||
| COPY --from=node-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt | COPY --from=node-packages /work/web/security.txt /go/src/goauthentik.io/web/security.txt | ||||||
| COPY ./internal /go/src/goauthentik.io/internal | COPY ./internal /go/src/goauthentik.io/internal | ||||||
| COPY ./go.mod /go/src/goauthentik.io/go.mod | COPY ./go.mod /go/src/goauthentik.io/go.mod | ||||||
| COPY ./go.sum /go/src/goauthentik.io/go.sum | COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||||
| @ -75,9 +86,9 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | |||||||
|     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" |     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||||
|  |  | ||||||
| # Stage 4: Download uv | # Stage 4: Download uv | ||||||
| FROM ghcr.io/astral-sh/uv:0.7.17 AS uv | FROM ghcr.io/astral-sh/uv:0.7.6 AS uv | ||||||
| # Stage 5: Base python image | # Stage 5: Base python image | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base | FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base | ||||||
|  |  | ||||||
| ENV VENV_PATH="/ak-root/.venv" \ | ENV VENV_PATH="/ak-root/.venv" \ | ||||||
|     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ |     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ | ||||||
| @ -168,8 +179,9 @@ COPY ./lifecycle/ /lifecycle | |||||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | ||||||
| COPY --from=go-builder /go/authentik /bin/authentik | COPY --from=go-builder /go/authentik /bin/authentik | ||||||
| COPY --from=python-deps /ak-root/.venv /ak-root/.venv | COPY --from=python-deps /ak-root/.venv /ak-root/.venv | ||||||
| COPY --from=node-builder /work/web/dist/ /web/dist/ | COPY --from=node-packages /work/web/dist/ /web/dist/ | ||||||
| COPY --from=node-builder /work/web/authentik/ /web/authentik/ | COPY --from=node-packages /work/web/authentik/ /web/authentik/ | ||||||
|  | COPY --from=node-packages /work/website/build/ /website/help/ | ||||||
| COPY --from=geoip /usr/share/GeoIP /geoip | COPY --from=geoip /usr/share/GeoIP /geoip | ||||||
|  |  | ||||||
| USER 1000 | USER 1000 | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							| @ -1,7 +1,7 @@ | |||||||
| Copyright (c) 2023 Jens Langhammer | Copyright (c) 2023 Jens Langhammer | ||||||
|  |  | ||||||
| Portions of this software are licensed as follows: | Portions of this software are licensed as follows: | ||||||
| * All content residing under the "docs/" directory of this repository is licensed under "Creative Commons: CC BY-SA 4.0 license". | * All content residing under the "website/" directory of this repository is licensed under "Creative Commons: CC BY-SA 4.0 license". | ||||||
| * All content that resides under the "authentik/enterprise/" directory of this repository, if that directory exists, is licensed under the license defined in "authentik/enterprise/LICENSE". | * All content that resides under the "authentik/enterprise/" directory of this repository, if that directory exists, is licensed under the license defined in "authentik/enterprise/LICENSE". | ||||||
| * All client-side JavaScript (when served directly or after being compiled, arranged, augmented, or combined), is licensed under the "MIT Expat" license. | * All client-side JavaScript (when served directly or after being compiled, arranged, augmented, or combined), is licensed under the "MIT Expat" license. | ||||||
| * All third party components incorporated into the authentik are licensed under the original license provided by the owner of the applicable component. | * All third party components incorporated into the authentik are licensed under the original license provided by the owner of the applicable component. | ||||||
|  | |||||||
							
								
								
									
										64
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										64
									
								
								Makefile
									
									
									
									
									
								
							| @ -1,6 +1,6 @@ | |||||||
| .PHONY: gen dev-reset all clean test web docs | .PHONY: gen dev-reset all clean test web website | ||||||
|  |  | ||||||
| SHELL := /usr/bin/env bash | SHELL := /bin/bash | ||||||
| .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail | .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail | ||||||
| PWD = $(shell pwd) | PWD = $(shell pwd) | ||||||
| UID = $(shell id -u) | UID = $(shell id -u) | ||||||
| @ -70,10 +70,10 @@ core-i18n-extract: | |||||||
| 		--ignore internal \ | 		--ignore internal \ | ||||||
| 		--ignore ${GEN_API_TS} \ | 		--ignore ${GEN_API_TS} \ | ||||||
| 		--ignore ${GEN_API_GO} \ | 		--ignore ${GEN_API_GO} \ | ||||||
| 		--ignore docs \ | 		--ignore website \ | ||||||
| 		-l en | 		-l en | ||||||
|  |  | ||||||
| install: web-install docs-install core-install  ## Install all requires dependencies for `web`, `docs` and `core` | install: npm-install core-install  ## Install all requires dependencies for `web`, `website` and `core` | ||||||
|  |  | ||||||
| dev-drop-db: | dev-drop-db: | ||||||
| 	dropdb -U ${pg_user} -h ${pg_host} ${pg_name} | 	dropdb -U ${pg_user} -h ${pg_host} ${pg_name} | ||||||
| @ -86,10 +86,6 @@ dev-create-db: | |||||||
|  |  | ||||||
| dev-reset: dev-drop-db dev-create-db migrate  ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state. | dev-reset: dev-drop-db dev-create-db migrate  ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state. | ||||||
|  |  | ||||||
| update-test-mmdb:  ## Update test GeoIP and ASN Databases |  | ||||||
| 	curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb -o ${PWD}/tests/GeoLite2-ASN-Test.mmdb |  | ||||||
| 	curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb -o ${PWD}/tests/GeoLite2-City-Test.mmdb |  | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| ## API Schema | ## API Schema | ||||||
| ######################### | ######################### | ||||||
| @ -98,7 +94,7 @@ gen-build:  ## Extract the schema from the database | |||||||
| 	AUTHENTIK_DEBUG=true \ | 	AUTHENTIK_DEBUG=true \ | ||||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||||
| 		uv run ak make_blueprint_schema --file blueprints/schema.json | 		uv run ak make_blueprint_schema > blueprints/schema.json | ||||||
| 	AUTHENTIK_DEBUG=true \ | 	AUTHENTIK_DEBUG=true \ | ||||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||||
| @ -150,9 +146,8 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | |||||||
| 		--additional-properties=npmVersion=${NPM_VERSION} \ | 		--additional-properties=npmVersion=${NPM_VERSION} \ | ||||||
| 		--git-repo-id authentik \ | 		--git-repo-id authentik \ | ||||||
| 		--git-user-id goauthentik | 		--git-user-id goauthentik | ||||||
|  | 	cd ./${GEN_API_TS} && npm link | ||||||
| 	cd ${PWD}/${GEN_API_TS} && npm link | 	npm link @goauthentik/api -w @goauthentik/web | ||||||
| 	cd ${PWD}/web && npm link @goauthentik/api |  | ||||||
|  |  | ||||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python | gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||||
| 	docker run \ | 	docker run \ | ||||||
| @ -187,56 +182,49 @@ gen: gen-build gen-client-ts | |||||||
| ## Web | ## Web | ||||||
| ######################### | ######################### | ||||||
|  |  | ||||||
| web-build: web-install  ## Build the Authentik UI | web-build: npm-install  ## Build the Authentik UI | ||||||
| 	cd web && npm run build | 	npm run build -w @goauthentik/web | ||||||
|  |  | ||||||
| web: web-lint-fix web-lint web-check-compile  ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it | web: web-lint-fix web-lint web-check-compile  ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it | ||||||
|  |  | ||||||
| web-install:  ## Install the necessary libraries to build the Authentik UI | npm-install:  ## Install the necessary libraries to build the Authentik UI | ||||||
| 	cd web && npm ci | 	npm ci | ||||||
|  |  | ||||||
| web-test: ## Run tests for the Authentik UI | web-test: ## Run tests for the Authentik UI | ||||||
| 	cd web && npm run test | 	npm run test -w @goauthentik/web | ||||||
|  |  | ||||||
| web-watch:  ## Build and watch the Authentik UI for changes, updating automatically | web-watch:  ## Build and watch the Authentik UI for changes, updating automatically | ||||||
| 	rm -rf web/dist/ | 	npm run watch -w @goauthentik/web | ||||||
| 	mkdir web/dist/ |  | ||||||
| 	touch web/dist/.gitkeep |  | ||||||
| 	cd web && npm run watch |  | ||||||
|  |  | ||||||
| web-storybook-watch:  ## Build and run the storybook documentation server | web-storybook-watch:  ## Build and run the storybook documentation server | ||||||
| 	cd web && npm run storybook | 	npm run storybook -w @goauthentik/web | ||||||
|  |  | ||||||
| web-lint-fix: | web-lint-fix: | ||||||
| 	cd web && npm run prettier | 	npm run prettier -w @goauthentik/web | ||||||
|  |  | ||||||
| web-lint: | web-lint: | ||||||
| 	cd web && npm run lint | 	npm run lint -w @goauthentik/web | ||||||
| 	cd web && npm run lit-analyse |  | ||||||
|  |  | ||||||
| web-check-compile: | web-check-compile: | ||||||
| 	cd web && npm run tsc | 	npm run lint:types | ||||||
|  |  | ||||||
| web-i18n-extract: | web-i18n-extract: | ||||||
| 	cd web && npm run extract-locales | 	npm run extract-locales -w @goauthentik/web | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| ## Docs | ## Website | ||||||
| ######################### | ######################### | ||||||
|  |  | ||||||
| docs: docs-lint-fix docs-build  ## Automatically fix formatting issues in the Authentik docs source code, lint the code, and compile it | website: website-lint-fix website-build  ## Automatically fix formatting issues in the Authentik website/docs source code, lint the code, and compile it | ||||||
|  |  | ||||||
| docs-install: | website-lint-fix: lint-codespell | ||||||
| 	npm ci --prefix docs | 	npm run prettier --prefix website | ||||||
|  |  | ||||||
| docs-lint-fix: lint-codespell | website-build: | ||||||
| 	npm run prettier --prefix docs | 	npm run build --prefix website | ||||||
|  |  | ||||||
| docs-build: | website-watch:  ## Build and watch the documentation website, updating automatically | ||||||
| 	npm run build --prefix docs | 	npm run watch --prefix website | ||||||
|  |  | ||||||
| docs-watch:  ## Build and watch the documentation website, updating automatically |  | ||||||
| 	npm run watch --prefix docs |  | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| ## Docker | ## Docker | ||||||
|  | |||||||
| @ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | |||||||
|  |  | ||||||
| | Version   | Supported | | | Version   | Supported | | ||||||
| | --------- | --------- | | | --------- | --------- | | ||||||
|  | | 2025.2.x  | ✅        | | ||||||
| | 2025.4.x  | ✅        | | | 2025.4.x  | ✅        | | ||||||
| | 2025.6.x  | ✅        | |  | ||||||
|  |  | ||||||
| ## Reporting a Vulnerability | ## Reporting a Vulnerability | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
|  |  | ||||||
| from os import environ | from os import environ | ||||||
|  |  | ||||||
| __version__ = "2025.6.3" | __version__ = "2025.4.1" | ||||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,79 @@ | |||||||
|  | """authentik administration metrics""" | ||||||
|  |  | ||||||
|  | from datetime import timedelta | ||||||
|  |  | ||||||
|  | from django.db.models.functions import ExtractHour | ||||||
|  | from drf_spectacular.utils import extend_schema, extend_schema_field | ||||||
|  | from guardian.shortcuts import get_objects_for_user | ||||||
|  | from rest_framework.fields import IntegerField, SerializerMethodField | ||||||
|  | from rest_framework.permissions import IsAuthenticated | ||||||
|  | from rest_framework.request import Request | ||||||
|  | from rest_framework.response import Response | ||||||
|  | from rest_framework.views import APIView | ||||||
|  |  | ||||||
|  | from authentik.core.api.utils import PassiveSerializer | ||||||
|  | from authentik.events.models import EventAction | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class CoordinateSerializer(PassiveSerializer): | ||||||
|  |     """Coordinates for diagrams""" | ||||||
|  |  | ||||||
|  |     x_cord = IntegerField(read_only=True) | ||||||
|  |     y_cord = IntegerField(read_only=True) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class LoginMetricsSerializer(PassiveSerializer): | ||||||
|  |     """Login Metrics per 1h""" | ||||||
|  |  | ||||||
|  |     logins = SerializerMethodField() | ||||||
|  |     logins_failed = SerializerMethodField() | ||||||
|  |     authorizations = SerializerMethodField() | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins(self, _): | ||||||
|  |         """Get successful logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins_failed(self, _): | ||||||
|  |         """Get failed logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN_FAILED | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_authorizations(self, _): | ||||||
|  |         """Get successful authorizations per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.AUTHORIZE_APPLICATION | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class AdministrationMetricsViewSet(APIView): | ||||||
|  |     """Login Metrics per 1h""" | ||||||
|  |  | ||||||
|  |     permission_classes = [IsAuthenticated] | ||||||
|  |  | ||||||
|  |     @extend_schema(responses={200: LoginMetricsSerializer(many=False)}) | ||||||
|  |     def get(self, request: Request) -> Response: | ||||||
|  |         """Login Metrics per 1h""" | ||||||
|  |         serializer = LoginMetricsSerializer(True) | ||||||
|  |         serializer.context["user"] = request.user | ||||||
|  |         return Response(serializer.data) | ||||||
| @ -1,7 +1,6 @@ | |||||||
| """authentik administration overview""" | """authentik administration overview""" | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django_tenants.utils import get_public_schema_name |  | ||||||
| from drf_spectacular.utils import extend_schema | from drf_spectacular.utils import extend_schema | ||||||
| from packaging.version import parse | from packaging.version import parse | ||||||
| from rest_framework.fields import SerializerMethodField | from rest_framework.fields import SerializerMethodField | ||||||
| @ -14,7 +13,6 @@ from authentik import __version__, get_build_hash | |||||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.outposts.models import Outpost | from authentik.outposts.models import Outpost | ||||||
| from authentik.tenants.utils import get_current_tenant |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionSerializer(PassiveSerializer): | class VersionSerializer(PassiveSerializer): | ||||||
| @ -37,8 +35,6 @@ class VersionSerializer(PassiveSerializer): | |||||||
|  |  | ||||||
|     def get_version_latest(self, _) -> str: |     def get_version_latest(self, _) -> str: | ||||||
|         """Get latest version from cache""" |         """Get latest version from cache""" | ||||||
|         if get_current_tenant().schema_name == get_public_schema_name(): |  | ||||||
|             return __version__ |  | ||||||
|         version_in_cache = cache.get(VERSION_CACHE_KEY) |         version_in_cache = cache.get(VERSION_CACHE_KEY) | ||||||
|         if not version_in_cache:  # pragma: no cover |         if not version_in_cache:  # pragma: no cover | ||||||
|             update_latest_version.delay() |             update_latest_version.delay() | ||||||
|  | |||||||
| @ -14,19 +14,3 @@ class AuthentikAdminConfig(ManagedAppConfig): | |||||||
|     label = "authentik_admin" |     label = "authentik_admin" | ||||||
|     verbose_name = "authentik Admin" |     verbose_name = "authentik Admin" | ||||||
|     default = True |     default = True | ||||||
|  |  | ||||||
|     @ManagedAppConfig.reconcile_global |  | ||||||
|     def clear_update_notifications(self): |  | ||||||
|         """Clear update notifications on startup if the notification was for the version |  | ||||||
|         we're running now.""" |  | ||||||
|         from packaging.version import parse |  | ||||||
|  |  | ||||||
|         from authentik.admin.tasks import LOCAL_VERSION |  | ||||||
|         from authentik.events.models import EventAction, Notification |  | ||||||
|  |  | ||||||
|         for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): |  | ||||||
|             if "new_version" not in notification.event.context: |  | ||||||
|                 continue |  | ||||||
|             notification_version = notification.event.context["new_version"] |  | ||||||
|             if LOCAL_VERSION >= parse(notification_version): |  | ||||||
|                 notification.delete() |  | ||||||
|  | |||||||
| @ -1,7 +1,6 @@ | |||||||
| """authentik admin settings""" | """authentik admin settings""" | ||||||
|  |  | ||||||
| from celery.schedules import crontab | from celery.schedules import crontab | ||||||
| from django_tenants.utils import get_public_schema_name |  | ||||||
|  |  | ||||||
| from authentik.lib.utils.time import fqdn_rand | from authentik.lib.utils.time import fqdn_rand | ||||||
|  |  | ||||||
| @ -9,7 +8,6 @@ CELERY_BEAT_SCHEDULE = { | |||||||
|     "admin_latest_version": { |     "admin_latest_version": { | ||||||
|         "task": "authentik.admin.tasks.update_latest_version", |         "task": "authentik.admin.tasks.update_latest_version", | ||||||
|         "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"), |         "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"), | ||||||
|         "tenant_schemas": [get_public_schema_name()], |  | ||||||
|         "options": {"queue": "authentik_scheduled"}, |         "options": {"queue": "authentik_scheduled"}, | ||||||
|     } |     } | ||||||
| } | } | ||||||
|  | |||||||
| @ -1,6 +1,7 @@ | |||||||
| """authentik admin tasks""" | """authentik admin tasks""" | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
|  | from django.db import DatabaseError, InternalError, ProgrammingError | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from packaging.version import parse | from packaging.version import parse | ||||||
| from requests import RequestException | from requests import RequestException | ||||||
| @ -8,7 +9,7 @@ from structlog.stdlib import get_logger | |||||||
|  |  | ||||||
| from authentik import __version__, get_build_hash | from authentik import __version__, get_build_hash | ||||||
| from authentik.admin.apps import PROM_INFO | from authentik.admin.apps import PROM_INFO | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction, Notification | ||||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.utils.http import get_http_session | from authentik.lib.utils.http import get_http_session | ||||||
| @ -32,6 +33,20 @@ def _set_prom_info(): | |||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @CELERY_APP.task( | ||||||
|  |     throws=(DatabaseError, ProgrammingError, InternalError), | ||||||
|  | ) | ||||||
|  | def clear_update_notifications(): | ||||||
|  |     """Clear update notifications on startup if the notification was for the version | ||||||
|  |     we're running now.""" | ||||||
|  |     for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): | ||||||
|  |         if "new_version" not in notification.event.context: | ||||||
|  |             continue | ||||||
|  |         notification_version = notification.event.context["new_version"] | ||||||
|  |         if LOCAL_VERSION >= parse(notification_version): | ||||||
|  |             notification.delete() | ||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task(bind=True, base=SystemTask) | @CELERY_APP.task(bind=True, base=SystemTask) | ||||||
| @prefill_task | @prefill_task | ||||||
| def update_latest_version(self: SystemTask): | def update_latest_version(self: SystemTask): | ||||||
|  | |||||||
| @ -36,6 +36,11 @@ class TestAdminAPI(TestCase): | |||||||
|         body = loads(response.content) |         body = loads(response.content) | ||||||
|         self.assertEqual(len(body), 0) |         self.assertEqual(len(body), 0) | ||||||
|  |  | ||||||
|  |     def test_metrics(self): | ||||||
|  |         """Test metrics API""" | ||||||
|  |         response = self.client.get(reverse("authentik_api:admin_metrics")) | ||||||
|  |         self.assertEqual(response.status_code, 200) | ||||||
|  |  | ||||||
|     def test_apps(self): |     def test_apps(self): | ||||||
|         """Test apps API""" |         """Test apps API""" | ||||||
|         response = self.client.get(reverse("authentik_api:apps-list")) |         response = self.client.get(reverse("authentik_api:apps-list")) | ||||||
|  | |||||||
| @ -1,12 +1,12 @@ | |||||||
| """test admin tasks""" | """test admin tasks""" | ||||||
|  |  | ||||||
| from django.apps import apps |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
| from requests_mock import Mocker | from requests_mock import Mocker | ||||||
|  |  | ||||||
| from authentik.admin.tasks import ( | from authentik.admin.tasks import ( | ||||||
|     VERSION_CACHE_KEY, |     VERSION_CACHE_KEY, | ||||||
|  |     clear_update_notifications, | ||||||
|     update_latest_version, |     update_latest_version, | ||||||
| ) | ) | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
| @ -72,13 +72,12 @@ class TestAdminTasks(TestCase): | |||||||
|  |  | ||||||
|     def test_clear_update_notifications(self): |     def test_clear_update_notifications(self): | ||||||
|         """Test clear of previous notification""" |         """Test clear of previous notification""" | ||||||
|         admin_config = apps.get_app_config("authentik_admin") |  | ||||||
|         Event.objects.create( |         Event.objects.create( | ||||||
|             action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"} |             action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"} | ||||||
|         ) |         ) | ||||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) |         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) | ||||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) |         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) | ||||||
|         admin_config.clear_update_notifications() |         clear_update_notifications() | ||||||
|         self.assertFalse( |         self.assertFalse( | ||||||
|             Event.objects.filter( |             Event.objects.filter( | ||||||
|                 action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" |                 action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" | ||||||
|  | |||||||
| @ -3,6 +3,7 @@ | |||||||
| from django.urls import path | from django.urls import path | ||||||
|  |  | ||||||
| from authentik.admin.api.meta import AppsViewSet, ModelViewSet | from authentik.admin.api.meta import AppsViewSet, ModelViewSet | ||||||
|  | from authentik.admin.api.metrics import AdministrationMetricsViewSet | ||||||
| from authentik.admin.api.system import SystemView | from authentik.admin.api.system import SystemView | ||||||
| from authentik.admin.api.version import VersionView | from authentik.admin.api.version import VersionView | ||||||
| from authentik.admin.api.version_history import VersionHistoryViewSet | from authentik.admin.api.version_history import VersionHistoryViewSet | ||||||
| @ -11,6 +12,11 @@ from authentik.admin.api.workers import WorkerView | |||||||
| api_urlpatterns = [ | api_urlpatterns = [ | ||||||
|     ("admin/apps", AppsViewSet, "apps"), |     ("admin/apps", AppsViewSet, "apps"), | ||||||
|     ("admin/models", ModelViewSet, "models"), |     ("admin/models", ModelViewSet, "models"), | ||||||
|  |     path( | ||||||
|  |         "admin/metrics/", | ||||||
|  |         AdministrationMetricsViewSet.as_view(), | ||||||
|  |         name="admin_metrics", | ||||||
|  |     ), | ||||||
|     path("admin/version/", VersionView.as_view(), name="admin_version"), |     path("admin/version/", VersionView.as_view(), name="admin_version"), | ||||||
|     ("admin/version/history", VersionHistoryViewSet, "version_history"), |     ("admin/version/history", VersionHistoryViewSet, "version_history"), | ||||||
|     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), |     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), | ||||||
|  | |||||||
| @ -1,13 +1,12 @@ | |||||||
| """authentik API AppConfig""" | """authentik API AppConfig""" | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig | from django.apps import AppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikAPIConfig(ManagedAppConfig): | class AuthentikAPIConfig(AppConfig): | ||||||
|     """authentik API Config""" |     """authentik API Config""" | ||||||
|  |  | ||||||
|     name = "authentik.api" |     name = "authentik.api" | ||||||
|     label = "authentik_api" |     label = "authentik_api" | ||||||
|     mountpoint = "api/" |     mountpoint = "api/" | ||||||
|     verbose_name = "authentik API" |     verbose_name = "authentik API" | ||||||
|     default = True |  | ||||||
|  | |||||||
| @ -72,33 +72,20 @@ class Command(BaseCommand): | |||||||
|                     "additionalProperties": True, |                     "additionalProperties": True, | ||||||
|                 }, |                 }, | ||||||
|                 "entries": { |                 "entries": { | ||||||
|                     "anyOf": [ |                     "type": "array", | ||||||
|                         { |                     "items": { | ||||||
|                             "type": "array", |                         "oneOf": [], | ||||||
|                             "items": {"$ref": "#/$defs/blueprint_entry"}, |                     }, | ||||||
|                         }, |  | ||||||
|                         { |  | ||||||
|                             "type": "object", |  | ||||||
|                             "additionalProperties": { |  | ||||||
|                                 "type": "array", |  | ||||||
|                                 "items": {"$ref": "#/$defs/blueprint_entry"}, |  | ||||||
|                             }, |  | ||||||
|                         }, |  | ||||||
|                     ], |  | ||||||
|                 }, |                 }, | ||||||
|             }, |             }, | ||||||
|             "$defs": {"blueprint_entry": {"oneOf": []}}, |             "$defs": {}, | ||||||
|         } |         } | ||||||
|  |  | ||||||
|     def add_arguments(self, parser): |  | ||||||
|         parser.add_argument("--file", type=str) |  | ||||||
|  |  | ||||||
|     @no_translations |     @no_translations | ||||||
|     def handle(self, *args, file: str, **options): |     def handle(self, *args, **options): | ||||||
|         """Generate JSON Schema for blueprints""" |         """Generate JSON Schema for blueprints""" | ||||||
|         self.build() |         self.build() | ||||||
|         with open(file, "w") as _schema: |         self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default)) | ||||||
|             _schema.write(dumps(self.schema, indent=4, default=Command.json_default)) |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def json_default(value: Any) -> Any: |     def json_default(value: Any) -> Any: | ||||||
| @ -125,7 +112,7 @@ class Command(BaseCommand): | |||||||
|                 } |                 } | ||||||
|             ) |             ) | ||||||
|             model_path = f"{model._meta.app_label}.{model._meta.model_name}" |             model_path = f"{model._meta.app_label}.{model._meta.model_name}" | ||||||
|             self.schema["$defs"]["blueprint_entry"]["oneOf"].append( |             self.schema["properties"]["entries"]["items"]["oneOf"].append( | ||||||
|                 self.template_entry(model_path, model, serializer) |                 self.template_entry(model_path, model, serializer) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
| @ -147,7 +134,7 @@ class Command(BaseCommand): | |||||||
|                 "id": {"type": "string"}, |                 "id": {"type": "string"}, | ||||||
|                 "state": { |                 "state": { | ||||||
|                     "type": "string", |                     "type": "string", | ||||||
|                     "enum": sorted([s.value for s in BlueprintEntryDesiredState]), |                     "enum": [s.value for s in BlueprintEntryDesiredState], | ||||||
|                     "default": "present", |                     "default": "present", | ||||||
|                 }, |                 }, | ||||||
|                 "conditions": {"type": "array", "items": {"type": "boolean"}}, |                 "conditions": {"type": "array", "items": {"type": "boolean"}}, | ||||||
| @ -218,7 +205,7 @@ class Command(BaseCommand): | |||||||
|                 "type": "object", |                 "type": "object", | ||||||
|                 "required": ["permission"], |                 "required": ["permission"], | ||||||
|                 "properties": { |                 "properties": { | ||||||
|                     "permission": {"type": "string", "enum": sorted(perms)}, |                     "permission": {"type": "string", "enum": perms}, | ||||||
|                     "user": {"type": "integer"}, |                     "user": {"type": "integer"}, | ||||||
|                     "role": {"type": "string"}, |                     "role": {"type": "string"}, | ||||||
|                 }, |                 }, | ||||||
|  | |||||||
| @ -1,11 +1,10 @@ | |||||||
| version: 1 | version: 1 | ||||||
| entries: | entries: | ||||||
|   foo: |     - identifiers: | ||||||
|       - identifiers: |           name: "%(id)s" | ||||||
|             name: "%(id)s" |           slug: "%(id)s" | ||||||
|             slug: "%(id)s" |       model: authentik_flows.flow | ||||||
|         model: authentik_flows.flow |       state: present | ||||||
|         state: present |       attrs: | ||||||
|         attrs: |           designation: stage_configuration | ||||||
|             designation: stage_configuration |           title: foo | ||||||
|             title: foo |  | ||||||
|  | |||||||
| @ -37,7 +37,6 @@ entries: | |||||||
|     - attrs: |     - attrs: | ||||||
|           attributes: |           attributes: | ||||||
|               env_null: !Env [bar-baz, null] |               env_null: !Env [bar-baz, null] | ||||||
|               json_parse: !ParseJSON '{"foo": "bar"}' |  | ||||||
|               policy_pk1: |               policy_pk1: | ||||||
|                   !Format [ |                   !Format [ | ||||||
|                       "%s-%s", |                       "%s-%s", | ||||||
|  | |||||||
| @ -1,14 +0,0 @@ | |||||||
| from django.test import TestCase |  | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig |  | ||||||
| from authentik.enterprise.apps import EnterpriseConfig |  | ||||||
| from authentik.lib.utils.reflection import get_apps |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestManagedAppConfig(TestCase): |  | ||||||
|     def test_apps_use_managed_app_config(self): |  | ||||||
|         for app in get_apps(): |  | ||||||
|             if app.name.startswith("authentik.enterprise"): |  | ||||||
|                 self.assertIn(EnterpriseConfig, app.__class__.__bases__) |  | ||||||
|             else: |  | ||||||
|                 self.assertIn(ManagedAppConfig, app.__class__.__bases__) |  | ||||||
| @ -35,6 +35,6 @@ def blueprint_tester(file_name: Path) -> Callable: | |||||||
|  |  | ||||||
|  |  | ||||||
| for blueprint_file in Path("blueprints/").glob("**/*.yaml"): | for blueprint_file in Path("blueprints/").glob("**/*.yaml"): | ||||||
|     if "local" in str(blueprint_file) or "testing" in str(blueprint_file): |     if "local" in str(blueprint_file): | ||||||
|         continue |         continue | ||||||
|     setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file)) |     setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file)) | ||||||
|  | |||||||
| @ -5,6 +5,7 @@ from collections.abc import Callable | |||||||
| from django.apps import apps | from django.apps import apps | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
|  |  | ||||||
|  | from authentik.blueprints.v1.importer import is_model_allowed | ||||||
| from authentik.lib.models import SerializerModel | from authentik.lib.models import SerializerModel | ||||||
| from authentik.providers.oauth2.models import RefreshToken | from authentik.providers.oauth2.models import RefreshToken | ||||||
|  |  | ||||||
| @ -21,13 +22,10 @@ def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable: | |||||||
|             return |             return | ||||||
|         model_class = test_model() |         model_class = test_model() | ||||||
|         self.assertTrue(isinstance(model_class, SerializerModel)) |         self.assertTrue(isinstance(model_class, SerializerModel)) | ||||||
|         # Models that have subclasses don't have to have a serializer |  | ||||||
|         if len(test_model.__subclasses__()) > 0: |  | ||||||
|             return |  | ||||||
|         self.assertIsNotNone(model_class.serializer) |         self.assertIsNotNone(model_class.serializer) | ||||||
|         if model_class.serializer.Meta().model == RefreshToken: |         if model_class.serializer.Meta().model == RefreshToken: | ||||||
|             return |             return | ||||||
|         self.assertTrue(issubclass(test_model, model_class.serializer.Meta().model)) |         self.assertEqual(model_class.serializer.Meta().model, test_model) | ||||||
|  |  | ||||||
|     return tester |     return tester | ||||||
|  |  | ||||||
| @ -36,6 +34,6 @@ for app in apps.get_app_configs(): | |||||||
|     if not app.label.startswith("authentik"): |     if not app.label.startswith("authentik"): | ||||||
|         continue |         continue | ||||||
|     for model in app.get_models(): |     for model in app.get_models(): | ||||||
|         if not issubclass(model, SerializerModel): |         if not is_model_allowed(model): | ||||||
|             continue |             continue | ||||||
|         setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model)) |         setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model)) | ||||||
|  | |||||||
| @ -215,7 +215,6 @@ class TestBlueprintsV1(TransactionTestCase): | |||||||
|                     }, |                     }, | ||||||
|                     "nested_context": "context-nested-value", |                     "nested_context": "context-nested-value", | ||||||
|                     "env_null": None, |                     "env_null": None, | ||||||
|                     "json_parse": {"foo": "bar"}, |  | ||||||
|                     "at_index_sequence": "foo", |                     "at_index_sequence": "foo", | ||||||
|                     "at_index_sequence_default": "non existent", |                     "at_index_sequence_default": "non existent", | ||||||
|                     "at_index_mapping": 2, |                     "at_index_mapping": 2, | ||||||
|  | |||||||
| @ -6,7 +6,6 @@ from copy import copy | |||||||
| from dataclasses import asdict, dataclass, field, is_dataclass | from dataclasses import asdict, dataclass, field, is_dataclass | ||||||
| from enum import Enum | from enum import Enum | ||||||
| from functools import reduce | from functools import reduce | ||||||
| from json import JSONDecodeError, loads |  | ||||||
| from operator import ixor | from operator import ixor | ||||||
| from os import getenv | from os import getenv | ||||||
| from typing import Any, Literal, Union | from typing import Any, Literal, Union | ||||||
| @ -192,18 +191,11 @@ class Blueprint: | |||||||
|     """Dataclass used for a full export""" |     """Dataclass used for a full export""" | ||||||
|  |  | ||||||
|     version: int = field(default=1) |     version: int = field(default=1) | ||||||
|     entries: list[BlueprintEntry] | dict[str, list[BlueprintEntry]] = field(default_factory=list) |     entries: list[BlueprintEntry] = field(default_factory=list) | ||||||
|     context: dict = field(default_factory=dict) |     context: dict = field(default_factory=dict) | ||||||
|  |  | ||||||
|     metadata: BlueprintMetadata | None = field(default=None) |     metadata: BlueprintMetadata | None = field(default=None) | ||||||
|  |  | ||||||
|     def iter_entries(self) -> Iterable[BlueprintEntry]: |  | ||||||
|         if isinstance(self.entries, dict): |  | ||||||
|             for _section, entries in self.entries.items(): |  | ||||||
|                 yield from entries |  | ||||||
|         else: |  | ||||||
|             yield from self.entries |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class YAMLTag: | class YAMLTag: | ||||||
|     """Base class for all YAML Tags""" |     """Base class for all YAML Tags""" | ||||||
| @ -234,7 +226,7 @@ class KeyOf(YAMLTag): | |||||||
|         self.id_from = node.value |         self.id_from = node.value | ||||||
|  |  | ||||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: |     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||||
|         for _entry in blueprint.iter_entries(): |         for _entry in blueprint.entries: | ||||||
|             if _entry.id == self.id_from and _entry._state.instance: |             if _entry.id == self.id_from and _entry._state.instance: | ||||||
|                 # Special handling for PolicyBindingModels, as they'll have a different PK |                 # Special handling for PolicyBindingModels, as they'll have a different PK | ||||||
|                 # which is used when creating policy bindings |                 # which is used when creating policy bindings | ||||||
| @ -292,22 +284,6 @@ class Context(YAMLTag): | |||||||
|         return value |         return value | ||||||
|  |  | ||||||
|  |  | ||||||
| class ParseJSON(YAMLTag): |  | ||||||
|     """Parse JSON from context/env/etc value""" |  | ||||||
|  |  | ||||||
|     raw: str |  | ||||||
|  |  | ||||||
|     def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None: |  | ||||||
|         super().__init__() |  | ||||||
|         self.raw = node.value |  | ||||||
|  |  | ||||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: |  | ||||||
|         try: |  | ||||||
|             return loads(self.raw) |  | ||||||
|         except JSONDecodeError as exc: |  | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Format(YAMLTag): | class Format(YAMLTag): | ||||||
|     """Format a string""" |     """Format a string""" | ||||||
|  |  | ||||||
| @ -683,7 +659,6 @@ class BlueprintLoader(SafeLoader): | |||||||
|         self.add_constructor("!Value", Value) |         self.add_constructor("!Value", Value) | ||||||
|         self.add_constructor("!Index", Index) |         self.add_constructor("!Index", Index) | ||||||
|         self.add_constructor("!AtIndex", AtIndex) |         self.add_constructor("!AtIndex", AtIndex) | ||||||
|         self.add_constructor("!ParseJSON", ParseJSON) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class EntryInvalidError(SentryIgnoredException): | class EntryInvalidError(SentryIgnoredException): | ||||||
|  | |||||||
| @ -384,7 +384,7 @@ class Importer: | |||||||
|     def _apply_models(self, raise_errors=False) -> bool: |     def _apply_models(self, raise_errors=False) -> bool: | ||||||
|         """Apply (create/update) models yaml""" |         """Apply (create/update) models yaml""" | ||||||
|         self.__pk_map = {} |         self.__pk_map = {} | ||||||
|         for entry in self._import.iter_entries(): |         for entry in self._import.entries: | ||||||
|             model_app_label, model_name = entry.get_model(self._import).split(".") |             model_app_label, model_name = entry.get_model(self._import).split(".") | ||||||
|             try: |             try: | ||||||
|                 model: type[SerializerModel] = registry.get_model(model_app_label, model_name) |                 model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||||
|  | |||||||
| @ -47,7 +47,7 @@ class MetaModelRegistry: | |||||||
|         models = apps.get_models() |         models = apps.get_models() | ||||||
|         for _, value in self.models.items(): |         for _, value in self.models.items(): | ||||||
|             models.append(value) |             models.append(value) | ||||||
|         return sorted(models, key=str) |         return models | ||||||
|  |  | ||||||
|     def get_model(self, app_label: str, model_id: str) -> type[Model]: |     def get_model(self, app_label: str, model_id: str) -> type[Model]: | ||||||
|         """Get model checks if any virtual models are registered, and falls back |         """Get model checks if any virtual models are registered, and falls back | ||||||
|  | |||||||
| @ -1,9 +1,9 @@ | |||||||
| """authentik brands app""" | """authentik brands app""" | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig | from django.apps import AppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikBrandsConfig(ManagedAppConfig): | class AuthentikBrandsConfig(AppConfig): | ||||||
|     """authentik Brand app""" |     """authentik Brand app""" | ||||||
|  |  | ||||||
|     name = "authentik.brands" |     name = "authentik.brands" | ||||||
| @ -12,4 +12,3 @@ class AuthentikBrandsConfig(ManagedAppConfig): | |||||||
|     mountpoints = { |     mountpoints = { | ||||||
|         "authentik.brands.urls_root": "", |         "authentik.brands.urls_root": "", | ||||||
|     } |     } | ||||||
|     default = True |  | ||||||
|  | |||||||
| @ -148,14 +148,3 @@ class TestBrands(APITestCase): | |||||||
|                 "default_locale": "", |                 "default_locale": "", | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_custom_css(self): |  | ||||||
|         """Test custom_css""" |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         brand.branding_custom_css = """* { |  | ||||||
|             font-family: "Foo bar"; |  | ||||||
|         }""" |  | ||||||
|         brand.save() |  | ||||||
|         res = self.client.get(reverse("authentik_core:if-user")) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertIn(brand.branding_custom_css, res.content.decode()) |  | ||||||
|  | |||||||
| @ -5,8 +5,6 @@ from typing import Any | |||||||
| from django.db.models import F, Q | from django.db.models import F, Q | ||||||
| from django.db.models import Value as V | from django.db.models import Value as V | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
| from django.utils.html import _json_script_escapes |  | ||||||
| from django.utils.safestring import mark_safe |  | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import get_full_version | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| @ -34,13 +32,8 @@ def context_processor(request: HttpRequest) -> dict[str, Any]: | |||||||
|     """Context Processor that injects brand object into every template""" |     """Context Processor that injects brand object into every template""" | ||||||
|     brand = getattr(request, "brand", DEFAULT_BRAND) |     brand = getattr(request, "brand", DEFAULT_BRAND) | ||||||
|     tenant = getattr(request, "tenant", Tenant()) |     tenant = getattr(request, "tenant", Tenant()) | ||||||
|     # similarly to `json_script` we escape everything HTML-related, however django |  | ||||||
|     # only directly exposes this as a function that also wraps it in a <script> tag |  | ||||||
|     # which we dont want for CSS |  | ||||||
|     brand_css = mark_safe(str(brand.branding_custom_css).translate(_json_script_escapes))  # nosec |  | ||||||
|     return { |     return { | ||||||
|         "brand": brand, |         "brand": brand, | ||||||
|         "brand_css": brand_css, |  | ||||||
|         "footer_links": tenant.footer_links, |         "footer_links": tenant.footer_links, | ||||||
|         "html_meta": {**get_http_meta()}, |         "html_meta": {**get_http_meta()}, | ||||||
|         "version": get_full_version(), |         "version": get_full_version(), | ||||||
|  | |||||||
| @ -2,9 +2,11 @@ | |||||||
|  |  | ||||||
| from collections.abc import Iterator | from collections.abc import Iterator | ||||||
| from copy import copy | from copy import copy | ||||||
|  | from datetime import timedelta | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.db.models import QuerySet | from django.db.models import QuerySet | ||||||
|  | from django.db.models.functions import ExtractHour | ||||||
| from django.shortcuts import get_object_or_404 | from django.shortcuts import get_object_or_404 | ||||||
| from drf_spectacular.types import OpenApiTypes | from drf_spectacular.types import OpenApiTypes | ||||||
| from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | ||||||
| @ -18,6 +20,7 @@ from rest_framework.response import Response | |||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
|  | from authentik.admin.api.metrics import CoordinateSerializer | ||||||
| from authentik.api.pagination import Pagination | from authentik.api.pagination import Pagination | ||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||||
| from authentik.core.api.providers import ProviderSerializer | from authentik.core.api.providers import ProviderSerializer | ||||||
| @ -25,6 +28,7 @@ from authentik.core.api.used_by import UsedByMixin | |||||||
| from authentik.core.api.utils import ModelSerializer | from authentik.core.api.utils import ModelSerializer | ||||||
| from authentik.core.models import Application, User | from authentik.core.models import Application, User | ||||||
| from authentik.events.logs import LogEventSerializer, capture_logs | from authentik.events.logs import LogEventSerializer, capture_logs | ||||||
|  | from authentik.events.models import EventAction | ||||||
| from authentik.lib.utils.file import ( | from authentik.lib.utils.file import ( | ||||||
|     FilePathSerializer, |     FilePathSerializer, | ||||||
|     FileUploadSerializer, |     FileUploadSerializer, | ||||||
| @ -317,3 +321,18 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | |||||||
|         """Set application icon (as URL)""" |         """Set application icon (as URL)""" | ||||||
|         app: Application = self.get_object() |         app: Application = self.get_object() | ||||||
|         return set_file_url(request, app, "meta_icon") |         return set_file_url(request, app, "meta_icon") | ||||||
|  |  | ||||||
|  |     @permission_required("authentik_core.view_application", ["authentik_events.view_event"]) | ||||||
|  |     @extend_schema(responses={200: CoordinateSerializer(many=True)}) | ||||||
|  |     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||||
|  |     def metrics(self, request: Request, slug: str): | ||||||
|  |         """Metrics for application logins""" | ||||||
|  |         app = self.get_object() | ||||||
|  |         return Response( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.AUTHORIZE_APPLICATION, | ||||||
|  |                 context__authorized_application__pk=app.pk.hex, | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  | |||||||
| @ -1,6 +1,8 @@ | |||||||
| """Authenticator Devices API Views""" | """Authenticator Devices API Views""" | ||||||
|  |  | ||||||
| from drf_spectacular.utils import extend_schema | from django.utils.translation import gettext_lazy as _ | ||||||
|  | from drf_spectacular.types import OpenApiTypes | ||||||
|  | from drf_spectacular.utils import OpenApiParameter, extend_schema | ||||||
| from guardian.shortcuts import get_objects_for_user | from guardian.shortcuts import get_objects_for_user | ||||||
| from rest_framework.fields import ( | from rest_framework.fields import ( | ||||||
|     BooleanField, |     BooleanField, | ||||||
| @ -13,7 +15,6 @@ from rest_framework.request import Request | |||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.viewsets import ViewSet | from rest_framework.viewsets import ViewSet | ||||||
|  |  | ||||||
| from authentik.core.api.users import ParamUserSerializer |  | ||||||
| from authentik.core.api.utils import MetaNameSerializer | from authentik.core.api.utils import MetaNameSerializer | ||||||
| from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice | from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice | ||||||
| from authentik.stages.authenticator import device_classes, devices_for_user | from authentik.stages.authenticator import device_classes, devices_for_user | ||||||
| @ -22,7 +23,7 @@ from authentik.stages.authenticator_webauthn.models import WebAuthnDevice | |||||||
|  |  | ||||||
|  |  | ||||||
| class DeviceSerializer(MetaNameSerializer): | class DeviceSerializer(MetaNameSerializer): | ||||||
|     """Serializer for authenticator devices""" |     """Serializer for Duo authenticator devices""" | ||||||
|  |  | ||||||
|     pk = CharField() |     pk = CharField() | ||||||
|     name = CharField() |     name = CharField() | ||||||
| @ -32,27 +33,22 @@ class DeviceSerializer(MetaNameSerializer): | |||||||
|     last_updated = DateTimeField(read_only=True) |     last_updated = DateTimeField(read_only=True) | ||||||
|     last_used = DateTimeField(read_only=True, allow_null=True) |     last_used = DateTimeField(read_only=True, allow_null=True) | ||||||
|     extra_description = SerializerMethodField() |     extra_description = SerializerMethodField() | ||||||
|     external_id = SerializerMethodField() |  | ||||||
|  |  | ||||||
|     def get_type(self, instance: Device) -> str: |     def get_type(self, instance: Device) -> str: | ||||||
|         """Get type of device""" |         """Get type of device""" | ||||||
|         return instance._meta.label |         return instance._meta.label | ||||||
|  |  | ||||||
|     def get_extra_description(self, instance: Device) -> str | None: |     def get_extra_description(self, instance: Device) -> str: | ||||||
|         """Get extra description""" |         """Get extra description""" | ||||||
|         if isinstance(instance, WebAuthnDevice): |         if isinstance(instance, WebAuthnDevice): | ||||||
|             return instance.device_type.description if instance.device_type else None |             return ( | ||||||
|  |                 instance.device_type.description | ||||||
|  |                 if instance.device_type | ||||||
|  |                 else _("Extra description not available") | ||||||
|  |             ) | ||||||
|         if isinstance(instance, EndpointDevice): |         if isinstance(instance, EndpointDevice): | ||||||
|             return instance.data.get("deviceSignals", {}).get("deviceModel") |             return instance.data.get("deviceSignals", {}).get("deviceModel") | ||||||
|         return None |         return "" | ||||||
|  |  | ||||||
|     def get_external_id(self, instance: Device) -> str | None: |  | ||||||
|         """Get external Device ID""" |  | ||||||
|         if isinstance(instance, WebAuthnDevice): |  | ||||||
|             return instance.device_type.aaguid if instance.device_type else None |  | ||||||
|         if isinstance(instance, EndpointDevice): |  | ||||||
|             return instance.data.get("deviceSignals", {}).get("deviceModel") |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeviceViewSet(ViewSet): | class DeviceViewSet(ViewSet): | ||||||
| @ -61,6 +57,7 @@ class DeviceViewSet(ViewSet): | |||||||
|     serializer_class = DeviceSerializer |     serializer_class = DeviceSerializer | ||||||
|     permission_classes = [IsAuthenticated] |     permission_classes = [IsAuthenticated] | ||||||
|  |  | ||||||
|  |     @extend_schema(responses={200: DeviceSerializer(many=True)}) | ||||||
|     def list(self, request: Request) -> Response: |     def list(self, request: Request) -> Response: | ||||||
|         """Get all devices for current user""" |         """Get all devices for current user""" | ||||||
|         devices = devices_for_user(request.user) |         devices = devices_for_user(request.user) | ||||||
| @ -82,11 +79,18 @@ class AdminDeviceViewSet(ViewSet): | |||||||
|             yield from device_set |             yield from device_set | ||||||
|  |  | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         parameters=[ParamUserSerializer], |         parameters=[ | ||||||
|  |             OpenApiParameter( | ||||||
|  |                 name="user", | ||||||
|  |                 location=OpenApiParameter.QUERY, | ||||||
|  |                 type=OpenApiTypes.INT, | ||||||
|  |             ) | ||||||
|  |         ], | ||||||
|         responses={200: DeviceSerializer(many=True)}, |         responses={200: DeviceSerializer(many=True)}, | ||||||
|     ) |     ) | ||||||
|     def list(self, request: Request) -> Response: |     def list(self, request: Request) -> Response: | ||||||
|         """Get all devices for current user""" |         """Get all devices for current user""" | ||||||
|         args = ParamUserSerializer(data=request.query_params) |         kwargs = {} | ||||||
|         args.is_valid(raise_exception=True) |         if "user" in request.query_params: | ||||||
|         return Response(DeviceSerializer(self.get_devices(**args.validated_data), many=True).data) |             kwargs = {"user": request.query_params["user"]} | ||||||
|  |         return Response(DeviceSerializer(self.get_devices(**kwargs), many=True).data) | ||||||
|  | |||||||
| @ -6,6 +6,7 @@ from typing import Any | |||||||
|  |  | ||||||
| from django.contrib.auth import update_session_auth_hash | from django.contrib.auth import update_session_auth_hash | ||||||
| from django.contrib.auth.models import Permission | from django.contrib.auth.models import Permission | ||||||
|  | from django.db.models.functions import ExtractHour | ||||||
| from django.db.transaction import atomic | from django.db.transaction import atomic | ||||||
| from django.db.utils import IntegrityError | from django.db.utils import IntegrityError | ||||||
| from django.urls import reverse_lazy | from django.urls import reverse_lazy | ||||||
| @ -51,6 +52,7 @@ from rest_framework.validators import UniqueValidator | |||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
|  | from authentik.admin.api.metrics import CoordinateSerializer | ||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| @ -82,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN | |||||||
| from authentik.lib.avatars import get_avatar | from authentik.lib.avatars import get_avatar | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
| from authentik.rbac.models import get_permission_choices | from authentik.rbac.models import get_permission_choices | ||||||
| from authentik.stages.email.flow import pickle_flow_token_for_email |  | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.stages.email.tasks import send_mails | from authentik.stages.email.tasks import send_mails | ||||||
| from authentik.stages.email.utils import TemplateEmailMessage | from authentik.stages.email.utils import TemplateEmailMessage | ||||||
| @ -90,12 +91,6 @@ from authentik.stages.email.utils import TemplateEmailMessage | |||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
|  |  | ||||||
|  |  | ||||||
| class ParamUserSerializer(PassiveSerializer): |  | ||||||
|     """Partial serializer for query parameters to select a user""" |  | ||||||
|  |  | ||||||
|     user = PrimaryKeyRelatedField(queryset=User.objects.all().exclude_anonymous(), required=False) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserGroupSerializer(ModelSerializer): | class UserGroupSerializer(ModelSerializer): | ||||||
|     """Simplified Group Serializer for user's groups""" |     """Simplified Group Serializer for user's groups""" | ||||||
|  |  | ||||||
| @ -321,6 +316,53 @@ class SessionUserSerializer(PassiveSerializer): | |||||||
|     original = UserSelfSerializer(required=False) |     original = UserSelfSerializer(required=False) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class UserMetricsSerializer(PassiveSerializer): | ||||||
|  |     """User Metrics""" | ||||||
|  |  | ||||||
|  |     logins = SerializerMethodField() | ||||||
|  |     logins_failed = SerializerMethodField() | ||||||
|  |     authorizations = SerializerMethodField() | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins(self, _): | ||||||
|  |         """Get successful logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         request = self.context["request"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN, user__pk=user.pk | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins_failed(self, _): | ||||||
|  |         """Get failed logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         request = self.context["request"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN_FAILED, context__username=user.username | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_authorizations(self, _): | ||||||
|  |         """Get failed logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         request = self.context["request"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class UsersFilter(FilterSet): | class UsersFilter(FilterSet): | ||||||
|     """Filter for users""" |     """Filter for users""" | ||||||
|  |  | ||||||
| @ -392,23 +434,8 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|     queryset = User.objects.none() |     queryset = User.objects.none() | ||||||
|     ordering = ["username"] |     ordering = ["username"] | ||||||
|     serializer_class = UserSerializer |     serializer_class = UserSerializer | ||||||
|     filterset_class = UsersFilter |  | ||||||
|     search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"] |     search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"] | ||||||
|  |     filterset_class = UsersFilter | ||||||
|     def get_ql_fields(self): |  | ||||||
|         from djangoql.schema import BoolField, StrField |  | ||||||
|  |  | ||||||
|         from authentik.enterprise.search.fields import ChoiceSearchField, JSONSearchField |  | ||||||
|  |  | ||||||
|         return [ |  | ||||||
|             StrField(User, "username"), |  | ||||||
|             StrField(User, "name"), |  | ||||||
|             StrField(User, "email"), |  | ||||||
|             StrField(User, "path"), |  | ||||||
|             BoolField(User, "is_active", nullable=True), |  | ||||||
|             ChoiceSearchField(User, "type"), |  | ||||||
|             JSONSearchField(User, "attributes", suggest_nested=False), |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|     def get_queryset(self): |     def get_queryset(self): | ||||||
|         base_qs = User.objects.all().exclude_anonymous() |         base_qs = User.objects.all().exclude_anonymous() | ||||||
| @ -424,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def list(self, request, *args, **kwargs): |     def list(self, request, *args, **kwargs): | ||||||
|         return super().list(request, *args, **kwargs) |         return super().list(request, *args, **kwargs) | ||||||
|  |  | ||||||
|     def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: |     def _create_recovery_link(self) -> tuple[str, Token]: | ||||||
|         """Create a recovery link (when the current brand has a recovery flow set), |         """Create a recovery link (when the current brand has a recovery flow set), | ||||||
|         that can either be shown to an admin or sent to the user directly""" |         that can either be shown to an admin or sent to the user directly""" | ||||||
|         brand: Brand = self.request._request.brand |         brand: Brand = self.request._request.brand | ||||||
| @ -446,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|             raise ValidationError( |             raise ValidationError( | ||||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} |                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||||
|             ) from None |             ) from None | ||||||
|         _plan = FlowToken.pickle(plan) |  | ||||||
|         if for_email: |  | ||||||
|             _plan = pickle_flow_token_for_email(plan) |  | ||||||
|         token, __ = FlowToken.objects.update_or_create( |         token, __ = FlowToken.objects.update_or_create( | ||||||
|             identifier=f"{user.uid}-password-reset", |             identifier=f"{user.uid}-password-reset", | ||||||
|             defaults={ |             defaults={ | ||||||
|                 "user": user, |                 "user": user, | ||||||
|                 "flow": flow, |                 "flow": flow, | ||||||
|                 "_plan": _plan, |                 "_plan": FlowToken.pickle(plan), | ||||||
|                 "revoke_on_execution": not for_email, |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         querystring = urlencode({QS_KEY_TOKEN: token.key}) |         querystring = urlencode({QS_KEY_TOKEN: token.key}) | ||||||
| @ -579,6 +602,17 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|             update_session_auth_hash(self.request, user) |             update_session_auth_hash(self.request, user) | ||||||
|         return Response(status=204) |         return Response(status=204) | ||||||
|  |  | ||||||
|  |     @permission_required("authentik_core.view_user", ["authentik_events.view_event"]) | ||||||
|  |     @extend_schema(responses={200: UserMetricsSerializer(many=False)}) | ||||||
|  |     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||||
|  |     def metrics(self, request: Request, pk: int) -> Response: | ||||||
|  |         """User metrics per 1h""" | ||||||
|  |         user: User = self.get_object() | ||||||
|  |         serializer = UserMetricsSerializer(instance={}) | ||||||
|  |         serializer.context["user"] = user | ||||||
|  |         serializer.context["request"] = request | ||||||
|  |         return Response(serializer.data) | ||||||
|  |  | ||||||
|     @permission_required("authentik_core.reset_user_password") |     @permission_required("authentik_core.reset_user_password") | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         responses={ |         responses={ | ||||||
| @ -614,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|         if for_user.email == "": |         if for_user.email == "": | ||||||
|             LOGGER.debug("User doesn't have an email address") |             LOGGER.debug("User doesn't have an email address") | ||||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) |             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||||
|         link, token = self._create_recovery_link(for_email=True) |         link, token = self._create_recovery_link() | ||||||
|         # Lookup the email stage to assure the current user can access it |         # Lookup the email stage to assure the current user can access it | ||||||
|         stages = get_objects_for_user( |         stages = get_objects_for_user( | ||||||
|             request.user, "authentik_stages_email.view_emailstage" |             request.user, "authentik_stages_email.view_emailstage" | ||||||
|  | |||||||
| @ -2,7 +2,6 @@ | |||||||
|  |  | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.db import models |  | ||||||
| from django.db.models import Model | from django.db.models import Model | ||||||
| from drf_spectacular.extensions import OpenApiSerializerFieldExtension | from drf_spectacular.extensions import OpenApiSerializerFieldExtension | ||||||
| from drf_spectacular.plumbing import build_basic_type | from drf_spectacular.plumbing import build_basic_type | ||||||
| @ -31,27 +30,7 @@ def is_dict(value: Any): | |||||||
|     raise ValidationError("Value must be a dictionary, and not have any duplicate keys.") |     raise ValidationError("Value must be a dictionary, and not have any duplicate keys.") | ||||||
|  |  | ||||||
|  |  | ||||||
| class JSONDictField(JSONField): |  | ||||||
|     """JSON Field which only allows dictionaries""" |  | ||||||
|  |  | ||||||
|     default_validators = [is_dict] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class JSONExtension(OpenApiSerializerFieldExtension): |  | ||||||
|     """Generate API Schema for JSON fields as""" |  | ||||||
|  |  | ||||||
|     target_class = "authentik.core.api.utils.JSONDictField" |  | ||||||
|  |  | ||||||
|     def map_serializer_field(self, auto_schema, direction): |  | ||||||
|         return build_basic_type(OpenApiTypes.OBJECT) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ModelSerializer(BaseModelSerializer): | class ModelSerializer(BaseModelSerializer): | ||||||
|  |  | ||||||
|     # By default, JSON fields we have are used to store dictionaries |  | ||||||
|     serializer_field_mapping = BaseModelSerializer.serializer_field_mapping.copy() |  | ||||||
|     serializer_field_mapping[models.JSONField] = JSONDictField |  | ||||||
|  |  | ||||||
|     def create(self, validated_data): |     def create(self, validated_data): | ||||||
|         instance = super().create(validated_data) |         instance = super().create(validated_data) | ||||||
|  |  | ||||||
| @ -92,6 +71,21 @@ class ModelSerializer(BaseModelSerializer): | |||||||
|         return instance |         return instance | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class JSONDictField(JSONField): | ||||||
|  |     """JSON Field which only allows dictionaries""" | ||||||
|  |  | ||||||
|  |     default_validators = [is_dict] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class JSONExtension(OpenApiSerializerFieldExtension): | ||||||
|  |     """Generate API Schema for JSON fields as""" | ||||||
|  |  | ||||||
|  |     target_class = "authentik.core.api.utils.JSONDictField" | ||||||
|  |  | ||||||
|  |     def map_serializer_field(self, auto_schema, direction): | ||||||
|  |         return build_basic_type(OpenApiTypes.OBJECT) | ||||||
|  |  | ||||||
|  |  | ||||||
| class PassiveSerializer(Serializer): | class PassiveSerializer(Serializer): | ||||||
|     """Base serializer class which doesn't implement create/update methods""" |     """Base serializer class which doesn't implement create/update methods""" | ||||||
|  |  | ||||||
|  | |||||||
| @ -13,6 +13,7 @@ class Command(TenantCommand): | |||||||
|         parser.add_argument("usernames", nargs="*", type=str) |         parser.add_argument("usernames", nargs="*", type=str) | ||||||
|  |  | ||||||
|     def handle_per_tenant(self, **options): |     def handle_per_tenant(self, **options): | ||||||
|  |         print(options) | ||||||
|         new_type = UserTypes(options["type"]) |         new_type = UserTypes(options["type"]) | ||||||
|         qs = ( |         qs = ( | ||||||
|             User.objects.exclude_anonymous() |             User.objects.exclude_anonymous() | ||||||
|  | |||||||
| @ -79,7 +79,6 @@ def _migrate_session( | |||||||
|         AuthenticatedSession.objects.using(db_alias).create( |         AuthenticatedSession.objects.using(db_alias).create( | ||||||
|             session=session, |             session=session, | ||||||
|             user=old_auth_session.user, |             user=old_auth_session.user, | ||||||
|             uuid=old_auth_session.uuid, |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,81 +1,10 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-14 11:15 | # Generated by Django 5.1.9 on 2025-05-14 11:15 | ||||||
|  |  | ||||||
| from django.apps.registry import Apps, apps as global_apps | from django.apps.registry import Apps | ||||||
| from django.db import migrations | from django.db import migrations | ||||||
| from django.contrib.contenttypes.management import create_contenttypes |  | ||||||
| from django.contrib.auth.management import create_permissions |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession""" |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the |  | ||||||
|     # real config for creating permissions and content types |  | ||||||
|     authentik_core_config = global_apps.get_app_config("authentik_core") |  | ||||||
|     # These are only ran by django after all migrations, but we need them right now. |  | ||||||
|     # `global_apps` is needed, |  | ||||||
|     create_permissions(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|     create_contenttypes(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|  |  | ||||||
|     # But from now on, this is just a regular migration, so use `apps` |  | ||||||
|     Permission = apps.get_model("auth", "Permission") |  | ||||||
|     ContentType = apps.get_model("contenttypes", "ContentType") |  | ||||||
|  |  | ||||||
|     try: |  | ||||||
|         old_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="oldauthenticatedsession" |  | ||||||
|         ) |  | ||||||
|         new_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="authenticatedsession" |  | ||||||
|         ) |  | ||||||
|     except ContentType.DoesNotExist: |  | ||||||
|         # This should exist at this point, but if not, let's cut our losses |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     # Get all permissions for the old content type |  | ||||||
|     old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct) |  | ||||||
|  |  | ||||||
|     # Create equivalent permissions for the new content type |  | ||||||
|     for old_perm in old_perms: |  | ||||||
|         new_perm = ( |  | ||||||
|             Permission.objects.using(db_alias) |  | ||||||
|             .filter( |  | ||||||
|                 content_type=new_ct, |  | ||||||
|                 codename=old_perm.codename, |  | ||||||
|             ) |  | ||||||
|             .first() |  | ||||||
|         ) |  | ||||||
|         if not new_perm: |  | ||||||
|             # This should exist at this point, but if not, let's cut our losses |  | ||||||
|             continue |  | ||||||
|  |  | ||||||
|         # Global user permissions |  | ||||||
|         User = apps.get_model("authentik_core", "User") |  | ||||||
|         User.user_permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Global role permissions |  | ||||||
|         DjangoGroup = apps.get_model("auth", "Group") |  | ||||||
|         DjangoGroup.permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Object user permissions |  | ||||||
|         UserObjectPermission = apps.get_model("guardian", "UserObjectPermission") |  | ||||||
|         UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # Object role permissions |  | ||||||
|         GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission") |  | ||||||
|         GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def remove_old_authenticated_session_content_type( | def remove_old_authenticated_session_content_type( | ||||||
|     apps: Apps, schema_editor: BaseDatabaseSchemaEditor |     apps: Apps, schema_editor: BaseDatabaseSchemaEditor | ||||||
| ): | ): | ||||||
| @ -92,12 +21,7 @@ class Migration(migrations.Migration): | |||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.RunPython( |  | ||||||
|             code=migrate_authenticated_session_permissions, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython( |         migrations.RunPython( | ||||||
|             code=remove_old_authenticated_session_content_type, |             code=remove_old_authenticated_session_content_type, | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|  | |||||||
| @ -18,7 +18,7 @@ from django.http import HttpRequest | |||||||
| from django.utils.functional import SimpleLazyObject, cached_property | from django.utils.functional import SimpleLazyObject, cached_property | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from django_cte import CTE, with_cte | from django_cte import CTEQuerySet, With | ||||||
| from guardian.conf import settings | from guardian.conf import settings | ||||||
| from guardian.mixins import GuardianUserMixin | from guardian.mixins import GuardianUserMixin | ||||||
| from model_utils.managers import InheritanceManager | from model_utils.managers import InheritanceManager | ||||||
| @ -136,7 +136,7 @@ class AttributesMixin(models.Model): | |||||||
|         return instance, False |         return instance, False | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupQuerySet(QuerySet): | class GroupQuerySet(CTEQuerySet): | ||||||
|     def with_children_recursive(self): |     def with_children_recursive(self): | ||||||
|         """Recursively get all groups that have the current queryset as parents |         """Recursively get all groups that have the current queryset as parents | ||||||
|         or are indirectly related.""" |         or are indirectly related.""" | ||||||
| @ -165,9 +165,9 @@ class GroupQuerySet(QuerySet): | |||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         # Build the recursive query, see above |         # Build the recursive query, see above | ||||||
|         cte = CTE.recursive(make_cte) |         cte = With.recursive(make_cte) | ||||||
|         # Return the result, as a usable queryset for Group. |         # Return the result, as a usable queryset for Group. | ||||||
|         return with_cte(cte, select=cte.join(Group, group_uuid=cte.col.group_uuid)) |         return cte.join(Group, group_uuid=cte.col.group_uuid).with_cte(cte) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Group(SerializerModel, AttributesMixin): | class Group(SerializerModel, AttributesMixin): | ||||||
| @ -1082,12 +1082,6 @@ class AuthenticatedSession(SerializerModel): | |||||||
|  |  | ||||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) |     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def serializer(self) -> type[Serializer]: |  | ||||||
|         from authentik.core.api.authenticated_sessions import AuthenticatedSessionSerializer |  | ||||||
|  |  | ||||||
|         return AuthenticatedSessionSerializer |  | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("Authenticated Session") |         verbose_name = _("Authenticated Session") | ||||||
|         verbose_name_plural = _("Authenticated Sessions") |         verbose_name_plural = _("Authenticated Sessions") | ||||||
|  | |||||||
| @ -16,7 +16,7 @@ | |||||||
|         {% block head_before %} |         {% block head_before %} | ||||||
|         {% endblock %} |         {% endblock %} | ||||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> |         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> | ||||||
|         <style>{{ brand_css }}</style> |         <style>{{ brand.branding_custom_css }}</style> | ||||||
|         <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> |         <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> | ||||||
|         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> |         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> | ||||||
|         {% block head %} |         {% block head %} | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ | |||||||
| {% endblock %} | {% endblock %} | ||||||
|  |  | ||||||
| {% block body %} | {% block body %} | ||||||
| <ak-message-container alignment="bottom"></ak-message-container> | <ak-message-container></ak-message-container> | ||||||
| <ak-interface-admin> | <ak-interface-admin> | ||||||
|     <ak-loading></ak-loading> |     <ak-loading></ak-loading> | ||||||
| </ak-interface-admin> | </ak-interface-admin> | ||||||
|  | |||||||
| @ -114,7 +114,6 @@ class TestApplicationsAPI(APITestCase): | |||||||
|         self.assertJSONEqual( |         self.assertJSONEqual( | ||||||
|             response.content.decode(), |             response.content.decode(), | ||||||
|             { |             { | ||||||
|                 "autocomplete": {}, |  | ||||||
|                 "pagination": { |                 "pagination": { | ||||||
|                     "next": 0, |                     "next": 0, | ||||||
|                     "previous": 0, |                     "previous": 0, | ||||||
| @ -168,7 +167,6 @@ class TestApplicationsAPI(APITestCase): | |||||||
|         self.assertJSONEqual( |         self.assertJSONEqual( | ||||||
|             response.content.decode(), |             response.content.decode(), | ||||||
|             { |             { | ||||||
|                 "autocomplete": {}, |  | ||||||
|                 "pagination": { |                 "pagination": { | ||||||
|                     "next": 0, |                     "next": 0, | ||||||
|                     "previous": 0, |                     "previous": 0, | ||||||
|  | |||||||
| @ -81,6 +81,22 @@ class TestUsersAPI(APITestCase): | |||||||
|         response = self.client.get(reverse("authentik_api:user-list"), {"include_groups": "true"}) |         response = self.client.get(reverse("authentik_api:user-list"), {"include_groups": "true"}) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|  |  | ||||||
|  |     def test_metrics(self): | ||||||
|  |         """Test user's metrics""" | ||||||
|  |         self.client.force_login(self.admin) | ||||||
|  |         response = self.client.get( | ||||||
|  |             reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk}) | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(response.status_code, 200) | ||||||
|  |  | ||||||
|  |     def test_metrics_denied(self): | ||||||
|  |         """Test user's metrics (non-superuser)""" | ||||||
|  |         self.client.force_login(self.user) | ||||||
|  |         response = self.client.get( | ||||||
|  |             reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk}) | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(response.status_code, 403) | ||||||
|  |  | ||||||
|     def test_recovery_no_flow(self): |     def test_recovery_no_flow(self): | ||||||
|         """Test user recovery link (no recovery flow set)""" |         """Test user recovery link (no recovery flow set)""" | ||||||
|         self.client.force_login(self.admin) |         self.client.force_login(self.admin) | ||||||
|  | |||||||
| @ -119,17 +119,17 @@ class TestTrimPasswordHistory(TestCase): | |||||||
|             [ |             [ | ||||||
|                 UserPasswordHistory( |                 UserPasswordHistory( | ||||||
|                     user=self.user, |                     user=self.user, | ||||||
|                     old_password="hunter1",  # nosec |                     old_password="hunter1",  # nosec B106 | ||||||
|                     created_at=_now - timedelta(days=3), |                     created_at=_now - timedelta(days=3), | ||||||
|                 ), |                 ), | ||||||
|                 UserPasswordHistory( |                 UserPasswordHistory( | ||||||
|                     user=self.user, |                     user=self.user, | ||||||
|                     old_password="hunter2",  # nosec |                     old_password="hunter2",  # nosec B106 | ||||||
|                     created_at=_now - timedelta(days=2), |                     created_at=_now - timedelta(days=2), | ||||||
|                 ), |                 ), | ||||||
|                 UserPasswordHistory( |                 UserPasswordHistory( | ||||||
|                     user=self.user, |                     user=self.user, | ||||||
|                     old_password="hunter3",  # nosec |                     old_password="hunter3",  # nosec B106 | ||||||
|                     created_at=_now, |                     created_at=_now, | ||||||
|                 ), |                 ), | ||||||
|             ] |             ] | ||||||
|  | |||||||
| @ -1,8 +1,10 @@ | |||||||
| from hashlib import sha256 | from hashlib import sha256 | ||||||
|  |  | ||||||
|  | from django.contrib.auth.signals import user_logged_out | ||||||
| from django.db.models import Model | from django.db.models import Model | ||||||
| from django.db.models.signals import post_delete, post_save, pre_delete | from django.db.models.signals import post_delete, post_save, pre_delete | ||||||
| from django.dispatch import receiver | from django.dispatch import receiver | ||||||
|  | from django.http.request import HttpRequest | ||||||
| from guardian.shortcuts import assign_perm | from guardian.shortcuts import assign_perm | ||||||
|  |  | ||||||
| from authentik.core.models import ( | from authentik.core.models import ( | ||||||
| @ -60,6 +62,31 @@ def ssf_providers_post_save(sender: type[Model], instance: SSFProvider, created: | |||||||
|             instance.save() |             instance.save() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @receiver(user_logged_out) | ||||||
|  | def ssf_user_logged_out_session_revoked(sender, request: HttpRequest, user: User, **_): | ||||||
|  |     """Session revoked trigger (user logged out)""" | ||||||
|  |     if not request.session or not request.session.session_key or not user: | ||||||
|  |         return | ||||||
|  |     send_ssf_event( | ||||||
|  |         EventTypes.CAEP_SESSION_REVOKED, | ||||||
|  |         { | ||||||
|  |             "initiating_entity": "user", | ||||||
|  |         }, | ||||||
|  |         sub_id={ | ||||||
|  |             "format": "complex", | ||||||
|  |             "session": { | ||||||
|  |                 "format": "opaque", | ||||||
|  |                 "id": sha256(request.session.session_key.encode("ascii")).hexdigest(), | ||||||
|  |             }, | ||||||
|  |             "user": { | ||||||
|  |                 "format": "email", | ||||||
|  |                 "email": user.email, | ||||||
|  |             }, | ||||||
|  |         }, | ||||||
|  |         request=request, | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @receiver(pre_delete, sender=AuthenticatedSession) | @receiver(pre_delete, sender=AuthenticatedSession) | ||||||
| def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSession, **_): | def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSession, **_): | ||||||
|     """Session revoked trigger (users' session has been deleted) |     """Session revoked trigger (users' session has been deleted) | ||||||
|  | |||||||
| @ -1,12 +0,0 @@ | |||||||
| """Enterprise app config""" |  | ||||||
|  |  | ||||||
| from authentik.enterprise.apps import EnterpriseConfig |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikEnterpriseSearchConfig(EnterpriseConfig): |  | ||||||
|     """Enterprise app config""" |  | ||||||
|  |  | ||||||
|     name = "authentik.enterprise.search" |  | ||||||
|     label = "authentik_search" |  | ||||||
|     verbose_name = "authentik Enterprise.Search" |  | ||||||
|     default = True |  | ||||||
| @ -1,128 +0,0 @@ | |||||||
| """DjangoQL search""" |  | ||||||
|  |  | ||||||
| from collections import OrderedDict, defaultdict |  | ||||||
| from collections.abc import Generator |  | ||||||
|  |  | ||||||
| from django.db import connection |  | ||||||
| from django.db.models import Model, Q |  | ||||||
| from djangoql.compat import text_type |  | ||||||
| from djangoql.schema import StrField |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class JSONSearchField(StrField): |  | ||||||
|     """JSON field for DjangoQL""" |  | ||||||
|  |  | ||||||
|     model: Model |  | ||||||
|  |  | ||||||
|     def __init__(self, model=None, name=None, nullable=None, suggest_nested=True): |  | ||||||
|         # Set this in the constructor to not clobber the type variable |  | ||||||
|         self.type = "relation" |  | ||||||
|         self.suggest_nested = suggest_nested |  | ||||||
|         super().__init__(model, name, nullable) |  | ||||||
|  |  | ||||||
|     def get_lookup(self, path, operator, value): |  | ||||||
|         search = "__".join(path) |  | ||||||
|         op, invert = self.get_operator(operator) |  | ||||||
|         q = Q(**{f"{search}{op}": self.get_lookup_value(value)}) |  | ||||||
|         return ~q if invert else q |  | ||||||
|  |  | ||||||
|     def json_field_keys(self) -> Generator[tuple[str]]: |  | ||||||
|         with connection.cursor() as cursor: |  | ||||||
|             cursor.execute( |  | ||||||
|                 f""" |  | ||||||
|                 WITH RECURSIVE "{self.name}_keys" AS ( |  | ||||||
|                     SELECT |  | ||||||
|                         ARRAY[jsonb_object_keys("{self.name}")] AS key_path_array, |  | ||||||
|                         "{self.name}" -> jsonb_object_keys("{self.name}") AS value |  | ||||||
|                     FROM {self.model._meta.db_table} |  | ||||||
|                     WHERE "{self.name}" IS NOT NULL |  | ||||||
|                         AND jsonb_typeof("{self.name}") = 'object' |  | ||||||
|  |  | ||||||
|                     UNION ALL |  | ||||||
|  |  | ||||||
|                     SELECT |  | ||||||
|                         ck.key_path_array || jsonb_object_keys(ck.value), |  | ||||||
|                         ck.value -> jsonb_object_keys(ck.value) AS value |  | ||||||
|                     FROM "{self.name}_keys" ck |  | ||||||
|                     WHERE jsonb_typeof(ck.value) = 'object' |  | ||||||
|                 ), |  | ||||||
|  |  | ||||||
|                 unique_paths AS ( |  | ||||||
|                     SELECT DISTINCT key_path_array |  | ||||||
|                     FROM "{self.name}_keys" |  | ||||||
|                 ) |  | ||||||
|  |  | ||||||
|                 SELECT key_path_array FROM unique_paths; |  | ||||||
|             """  # nosec |  | ||||||
|             ) |  | ||||||
|             return (x[0] for x in cursor.fetchall()) |  | ||||||
|  |  | ||||||
|     def get_nested_options(self) -> OrderedDict: |  | ||||||
|         """Get keys of all nested objects to show autocomplete""" |  | ||||||
|         if not self.suggest_nested: |  | ||||||
|             return OrderedDict() |  | ||||||
|         base_model_name = f"{self.model._meta.app_label}.{self.model._meta.model_name}_{self.name}" |  | ||||||
|  |  | ||||||
|         def recursive_function(parts: list[str], parent_parts: list[str] | None = None): |  | ||||||
|             if not parent_parts: |  | ||||||
|                 parent_parts = [] |  | ||||||
|             path = parts.pop(0) |  | ||||||
|             parent_parts.append(path) |  | ||||||
|             relation_key = "_".join(parent_parts) |  | ||||||
|             if len(parts) > 1: |  | ||||||
|                 out_dict = { |  | ||||||
|                     relation_key: { |  | ||||||
|                         parts[0]: { |  | ||||||
|                             "type": "relation", |  | ||||||
|                             "relation": f"{relation_key}_{parts[0]}", |  | ||||||
|                         } |  | ||||||
|                     } |  | ||||||
|                 } |  | ||||||
|                 child_paths = recursive_function(parts.copy(), parent_parts.copy()) |  | ||||||
|                 child_paths.update(out_dict) |  | ||||||
|                 return child_paths |  | ||||||
|             else: |  | ||||||
|                 return {relation_key: {parts[0]: {}}} |  | ||||||
|  |  | ||||||
|         relation_structure = defaultdict(dict) |  | ||||||
|  |  | ||||||
|         for relations in self.json_field_keys(): |  | ||||||
|             result = recursive_function([base_model_name] + relations) |  | ||||||
|             for relation_key, value in result.items(): |  | ||||||
|                 for sub_relation_key, sub_value in value.items(): |  | ||||||
|                     if not relation_structure[relation_key].get(sub_relation_key, None): |  | ||||||
|                         relation_structure[relation_key][sub_relation_key] = sub_value |  | ||||||
|                     else: |  | ||||||
|                         relation_structure[relation_key][sub_relation_key].update(sub_value) |  | ||||||
|  |  | ||||||
|         final_dict = defaultdict(dict) |  | ||||||
|  |  | ||||||
|         for key, value in relation_structure.items(): |  | ||||||
|             for sub_key, sub_value in value.items(): |  | ||||||
|                 if not sub_value: |  | ||||||
|                     final_dict[key][sub_key] = { |  | ||||||
|                         "type": "str", |  | ||||||
|                         "nullable": True, |  | ||||||
|                     } |  | ||||||
|                 else: |  | ||||||
|                     final_dict[key][sub_key] = sub_value |  | ||||||
|         return OrderedDict(final_dict) |  | ||||||
|  |  | ||||||
|     def relation(self) -> str: |  | ||||||
|         return f"{self.model._meta.app_label}.{self.model._meta.model_name}_{self.name}" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ChoiceSearchField(StrField): |  | ||||||
|     def __init__(self, model=None, name=None, nullable=None): |  | ||||||
|         super().__init__(model, name, nullable, suggest_options=True) |  | ||||||
|  |  | ||||||
|     def get_options(self, search): |  | ||||||
|         result = [] |  | ||||||
|         choices = self._field_choices() |  | ||||||
|         if choices: |  | ||||||
|             search = search.lower() |  | ||||||
|             for c in choices: |  | ||||||
|                 choice = text_type(c[0]) |  | ||||||
|                 if search in choice.lower(): |  | ||||||
|                     result.append(choice) |  | ||||||
|         return result |  | ||||||
| @ -1,53 +0,0 @@ | |||||||
| from rest_framework.response import Response |  | ||||||
|  |  | ||||||
| from authentik.api.pagination import Pagination |  | ||||||
| from authentik.enterprise.search.ql import AUTOCOMPLETE_COMPONENT_NAME, QLSearch |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AutocompletePagination(Pagination): |  | ||||||
|  |  | ||||||
|     def paginate_queryset(self, queryset, request, view=None): |  | ||||||
|         self.view = view |  | ||||||
|         return super().paginate_queryset(queryset, request, view) |  | ||||||
|  |  | ||||||
|     def get_autocomplete(self): |  | ||||||
|         schema = QLSearch().get_schema(self.request, self.view) |  | ||||||
|         introspections = {} |  | ||||||
|         if hasattr(self.view, "get_ql_fields"): |  | ||||||
|             from authentik.enterprise.search.schema import AKQLSchemaSerializer |  | ||||||
|  |  | ||||||
|             introspections = AKQLSchemaSerializer().serialize( |  | ||||||
|                 schema(self.page.paginator.object_list.model) |  | ||||||
|             ) |  | ||||||
|         return introspections |  | ||||||
|  |  | ||||||
|     def get_paginated_response(self, data): |  | ||||||
|         previous_page_number = 0 |  | ||||||
|         if self.page.has_previous(): |  | ||||||
|             previous_page_number = self.page.previous_page_number() |  | ||||||
|         next_page_number = 0 |  | ||||||
|         if self.page.has_next(): |  | ||||||
|             next_page_number = self.page.next_page_number() |  | ||||||
|         return Response( |  | ||||||
|             { |  | ||||||
|                 "pagination": { |  | ||||||
|                     "next": next_page_number, |  | ||||||
|                     "previous": previous_page_number, |  | ||||||
|                     "count": self.page.paginator.count, |  | ||||||
|                     "current": self.page.number, |  | ||||||
|                     "total_pages": self.page.paginator.num_pages, |  | ||||||
|                     "start_index": self.page.start_index(), |  | ||||||
|                     "end_index": self.page.end_index(), |  | ||||||
|                 }, |  | ||||||
|                 "results": data, |  | ||||||
|                 "autocomplete": self.get_autocomplete(), |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def get_paginated_response_schema(self, schema): |  | ||||||
|         final_schema = super().get_paginated_response_schema(schema) |  | ||||||
|         final_schema["properties"]["autocomplete"] = { |  | ||||||
|             "$ref": f"#/components/schemas/{AUTOCOMPLETE_COMPONENT_NAME}" |  | ||||||
|         } |  | ||||||
|         final_schema["required"].append("autocomplete") |  | ||||||
|         return final_schema |  | ||||||
| @ -1,80 +0,0 @@ | |||||||
| """DjangoQL search""" |  | ||||||
|  |  | ||||||
| from django.apps import apps |  | ||||||
| from django.db.models import QuerySet |  | ||||||
| from djangoql.ast import Name |  | ||||||
| from djangoql.exceptions import DjangoQLError |  | ||||||
| from djangoql.queryset import apply_search |  | ||||||
| from djangoql.schema import DjangoQLSchema |  | ||||||
| from rest_framework.filters import BaseFilterBackend, SearchFilter |  | ||||||
| from rest_framework.request import Request |  | ||||||
| from structlog.stdlib import get_logger |  | ||||||
|  |  | ||||||
| from authentik.enterprise.search.fields import JSONSearchField |  | ||||||
|  |  | ||||||
| LOGGER = get_logger() |  | ||||||
| AUTOCOMPLETE_COMPONENT_NAME = "Autocomplete" |  | ||||||
| AUTOCOMPLETE_SCHEMA = { |  | ||||||
|     "type": "object", |  | ||||||
|     "additionalProperties": {}, |  | ||||||
| } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseSchema(DjangoQLSchema): |  | ||||||
|     """Base Schema which deals with JSON Fields""" |  | ||||||
|  |  | ||||||
|     def resolve_name(self, name: Name): |  | ||||||
|         model = self.model_label(self.current_model) |  | ||||||
|         root_field = name.parts[0] |  | ||||||
|         field = self.models[model].get(root_field) |  | ||||||
|         # If the query goes into a JSON field, return the root |  | ||||||
|         # field as the JSON field will do the rest |  | ||||||
|         if isinstance(field, JSONSearchField): |  | ||||||
|             # This is a workaround; build_filter will remove the right-most |  | ||||||
|             # entry in the path as that is intended to be the same as the field |  | ||||||
|             # however for JSON that is not the case |  | ||||||
|             if name.parts[-1] != root_field: |  | ||||||
|                 name.parts.append(root_field) |  | ||||||
|             return field |  | ||||||
|         return super().resolve_name(name) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class QLSearch(BaseFilterBackend): |  | ||||||
|     """rest_framework search filter which uses DjangoQL""" |  | ||||||
|  |  | ||||||
|     def __init__(self): |  | ||||||
|         super().__init__() |  | ||||||
|         self._fallback = SearchFilter() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def enabled(self): |  | ||||||
|         return apps.get_app_config("authentik_enterprise").enabled() |  | ||||||
|  |  | ||||||
|     def get_search_terms(self, request: Request) -> str: |  | ||||||
|         """Search terms are set by a ?search=... query parameter, |  | ||||||
|         and may be comma and/or whitespace delimited.""" |  | ||||||
|         params = request.query_params.get("search", "") |  | ||||||
|         params = params.replace("\x00", "")  # strip null characters |  | ||||||
|         return params |  | ||||||
|  |  | ||||||
|     def get_schema(self, request: Request, view) -> BaseSchema: |  | ||||||
|         ql_fields = [] |  | ||||||
|         if hasattr(view, "get_ql_fields"): |  | ||||||
|             ql_fields = view.get_ql_fields() |  | ||||||
|  |  | ||||||
|         class InlineSchema(BaseSchema): |  | ||||||
|             def get_fields(self, model): |  | ||||||
|                 return ql_fields or [] |  | ||||||
|  |  | ||||||
|         return InlineSchema |  | ||||||
|  |  | ||||||
|     def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet: |  | ||||||
|         search_query = self.get_search_terms(request) |  | ||||||
|         schema = self.get_schema(request, view) |  | ||||||
|         if len(search_query) == 0 or not self.enabled: |  | ||||||
|             return self._fallback.filter_queryset(request, queryset, view) |  | ||||||
|         try: |  | ||||||
|             return apply_search(queryset, search_query, schema=schema) |  | ||||||
|         except DjangoQLError as exc: |  | ||||||
|             LOGGER.debug("Failed to parse search expression", exc=exc) |  | ||||||
|             return self._fallback.filter_queryset(request, queryset, view) |  | ||||||
| @ -1,29 +0,0 @@ | |||||||
| from djangoql.serializers import DjangoQLSchemaSerializer |  | ||||||
| from drf_spectacular.generators import SchemaGenerator |  | ||||||
|  |  | ||||||
| from authentik.api.schema import create_component |  | ||||||
| from authentik.enterprise.search.fields import JSONSearchField |  | ||||||
| from authentik.enterprise.search.ql import AUTOCOMPLETE_COMPONENT_NAME, AUTOCOMPLETE_SCHEMA |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AKQLSchemaSerializer(DjangoQLSchemaSerializer): |  | ||||||
|     def serialize(self, schema): |  | ||||||
|         serialization = super().serialize(schema) |  | ||||||
|         for _, fields in schema.models.items(): |  | ||||||
|             for _, field in fields.items(): |  | ||||||
|                 if not isinstance(field, JSONSearchField): |  | ||||||
|                     continue |  | ||||||
|                 serialization["models"].update(field.get_nested_options()) |  | ||||||
|         return serialization |  | ||||||
|  |  | ||||||
|     def serialize_field(self, field): |  | ||||||
|         result = super().serialize_field(field) |  | ||||||
|         if isinstance(field, JSONSearchField): |  | ||||||
|             result["relation"] = field.relation() |  | ||||||
|         return result |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def postprocess_schema_search_autocomplete(result, generator: SchemaGenerator, **kwargs): |  | ||||||
|     create_component(generator, AUTOCOMPLETE_COMPONENT_NAME, AUTOCOMPLETE_SCHEMA) |  | ||||||
|  |  | ||||||
|     return result |  | ||||||
| @ -1,17 +0,0 @@ | |||||||
| SPECTACULAR_SETTINGS = { |  | ||||||
|     "POSTPROCESSING_HOOKS": [ |  | ||||||
|         "authentik.api.schema.postprocess_schema_responses", |  | ||||||
|         "authentik.enterprise.search.schema.postprocess_schema_search_autocomplete", |  | ||||||
|         "drf_spectacular.hooks.postprocess_schema_enums", |  | ||||||
|     ], |  | ||||||
| } |  | ||||||
|  |  | ||||||
| REST_FRAMEWORK = { |  | ||||||
|     "DEFAULT_PAGINATION_CLASS": "authentik.enterprise.search.pagination.AutocompletePagination", |  | ||||||
|     "DEFAULT_FILTER_BACKENDS": [ |  | ||||||
|         "authentik.enterprise.search.ql.QLSearch", |  | ||||||
|         "authentik.rbac.filters.ObjectFilter", |  | ||||||
|         "django_filters.rest_framework.DjangoFilterBackend", |  | ||||||
|         "rest_framework.filters.OrderingFilter", |  | ||||||
|     ], |  | ||||||
| } |  | ||||||
| @ -1,78 +0,0 @@ | |||||||
| from json import loads |  | ||||||
| from unittest.mock import PropertyMock, patch |  | ||||||
| from urllib.parse import urlencode |  | ||||||
|  |  | ||||||
| from django.urls import reverse |  | ||||||
| from rest_framework.test import APITestCase |  | ||||||
|  |  | ||||||
| from authentik.core.tests.utils import create_test_admin_user |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @patch( |  | ||||||
|     "authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware.enabled", |  | ||||||
|     PropertyMock(return_value=True), |  | ||||||
| ) |  | ||||||
| class QLTest(APITestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         self.user = create_test_admin_user() |  | ||||||
|         # ensure we have more than 1 user |  | ||||||
|         create_test_admin_user() |  | ||||||
|  |  | ||||||
|     def test_search(self): |  | ||||||
|         """Test simple search query""" |  | ||||||
|         self.client.force_login(self.user) |  | ||||||
|         query = f'username = "{self.user.username}"' |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse( |  | ||||||
|                 "authentik_api:user-list", |  | ||||||
|             ) |  | ||||||
|             + f"?{urlencode({"search": query})}" |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         content = loads(res.content) |  | ||||||
|         self.assertEqual(content["pagination"]["count"], 1) |  | ||||||
|         self.assertEqual(content["results"][0]["username"], self.user.username) |  | ||||||
|  |  | ||||||
|     def test_no_search(self): |  | ||||||
|         """Ensure works with no search query""" |  | ||||||
|         self.client.force_login(self.user) |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse( |  | ||||||
|                 "authentik_api:user-list", |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         content = loads(res.content) |  | ||||||
|         self.assertNotEqual(content["pagination"]["count"], 1) |  | ||||||
|  |  | ||||||
|     def test_search_no_ql(self): |  | ||||||
|         """Test simple search query (no QL)""" |  | ||||||
|         self.client.force_login(self.user) |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse( |  | ||||||
|                 "authentik_api:user-list", |  | ||||||
|             ) |  | ||||||
|             + f"?{urlencode({"search": self.user.username})}" |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         content = loads(res.content) |  | ||||||
|         self.assertEqual(content["pagination"]["count"], 1) |  | ||||||
|         self.assertEqual(content["results"][0]["username"], self.user.username) |  | ||||||
|  |  | ||||||
|     def test_search_json(self): |  | ||||||
|         """Test search query with a JSON attribute""" |  | ||||||
|         self.user.attributes = {"foo": {"bar": "baz"}} |  | ||||||
|         self.user.save() |  | ||||||
|         self.client.force_login(self.user) |  | ||||||
|         query = 'attributes.foo.bar = "baz"' |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse( |  | ||||||
|                 "authentik_api:user-list", |  | ||||||
|             ) |  | ||||||
|             + f"?{urlencode({"search": query})}" |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         content = loads(res.content) |  | ||||||
|         self.assertEqual(content["pagination"]["count"], 1) |  | ||||||
|         self.assertEqual(content["results"][0]["username"], self.user.username) |  | ||||||
| @ -18,7 +18,6 @@ TENANT_APPS = [ | |||||||
|     "authentik.enterprise.providers.google_workspace", |     "authentik.enterprise.providers.google_workspace", | ||||||
|     "authentik.enterprise.providers.microsoft_entra", |     "authentik.enterprise.providers.microsoft_entra", | ||||||
|     "authentik.enterprise.providers.ssf", |     "authentik.enterprise.providers.ssf", | ||||||
|     "authentik.enterprise.search", |  | ||||||
|     "authentik.enterprise.stages.authenticator_endpoint_gdtc", |     "authentik.enterprise.stages.authenticator_endpoint_gdtc", | ||||||
|     "authentik.enterprise.stages.mtls", |     "authentik.enterprise.stages.mtls", | ||||||
|     "authentik.enterprise.stages.source", |     "authentik.enterprise.stages.source", | ||||||
|  | |||||||
| @ -97,7 +97,6 @@ class SourceStageFinal(StageView): | |||||||
|         token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) |         token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) | ||||||
|         self.logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) |         self.logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) | ||||||
|         plan = token.plan |         plan = token.plan | ||||||
|         plan.context.update(self.executor.plan.context) |  | ||||||
|         plan.context[PLAN_CONTEXT_IS_RESTORED] = token |         plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||||
|         response = plan.to_redirect(self.request, token.flow) |         response = plan.to_redirect(self.request, token.flow) | ||||||
|         token.delete() |         token.delete() | ||||||
|  | |||||||
| @ -90,17 +90,14 @@ class TestSourceStage(FlowTestCase): | |||||||
|         plan: FlowPlan = session[SESSION_KEY_PLAN] |         plan: FlowPlan = session[SESSION_KEY_PLAN] | ||||||
|         plan.insert_stage(in_memory_stage(SourceStageFinal), index=0) |         plan.insert_stage(in_memory_stage(SourceStageFinal), index=0) | ||||||
|         plan.context[PLAN_CONTEXT_IS_RESTORED] = flow_token |         plan.context[PLAN_CONTEXT_IS_RESTORED] = flow_token | ||||||
|         plan.context["foo"] = "bar" |  | ||||||
|         session[SESSION_KEY_PLAN] = plan |         session[SESSION_KEY_PLAN] = plan | ||||||
|         session.save() |         session.save() | ||||||
|  |  | ||||||
|         # Pretend we've just returned from the source |         # Pretend we've just returned from the source | ||||||
|         with self.assertFlowFinishes() as ff: |         response = self.client.get( | ||||||
|             response = self.client.get( |             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), follow=True | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), follow=True |         ) | ||||||
|             ) |         self.assertEqual(response.status_code, 200) | ||||||
|             self.assertEqual(response.status_code, 200) |         self.assertStageRedirects( | ||||||
|             self.assertStageRedirects( |             response, reverse("authentik_core:if-flow", kwargs={"flow_slug": flow.slug}) | ||||||
|                 response, reverse("authentik_core:if-flow", kwargs={"flow_slug": flow.slug}) |         ) | ||||||
|             ) |  | ||||||
|         self.assertEqual(ff().context["foo"], "bar") |  | ||||||
|  | |||||||
| @ -1,36 +1,28 @@ | |||||||
| """Events API Views""" | """Events API Views""" | ||||||
|  |  | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
|  | from json import loads | ||||||
|  |  | ||||||
| import django_filters | import django_filters | ||||||
| from django.db.models import Count, ExpressionWrapper, F, QuerySet | from django.db.models.aggregates import Count | ||||||
| from django.db.models import DateTimeField as DjangoDateTimeField |  | ||||||
| from django.db.models.fields.json import KeyTextTransform, KeyTransform | from django.db.models.fields.json import KeyTextTransform, KeyTransform | ||||||
| from django.db.models.functions import TruncHour | from django.db.models.functions import ExtractDay, ExtractHour | ||||||
| from django.db.models.query_utils import Q | from django.db.models.query_utils import Q | ||||||
| from django.utils.timezone import now |  | ||||||
| from drf_spectacular.types import OpenApiTypes | from drf_spectacular.types import OpenApiTypes | ||||||
| from drf_spectacular.utils import OpenApiParameter, extend_schema | from drf_spectacular.utils import OpenApiParameter, extend_schema | ||||||
| from guardian.shortcuts import get_objects_for_user | from guardian.shortcuts import get_objects_for_user | ||||||
| from rest_framework.decorators import action | from rest_framework.decorators import action | ||||||
| from rest_framework.fields import ChoiceField, DateTimeField, DictField, IntegerField | from rest_framework.fields import DictField, IntegerField | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
|  | from authentik.admin.api.metrics import CoordinateSerializer | ||||||
| from authentik.core.api.object_types import TypeCreateSerializer | from authentik.core.api.object_types import TypeCreateSerializer | ||||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
|  |  | ||||||
|  |  | ||||||
| class EventVolumeSerializer(PassiveSerializer): |  | ||||||
|     """Count of events of action created on day""" |  | ||||||
|  |  | ||||||
|     action = ChoiceField(choices=EventAction.choices) |  | ||||||
|     time = DateTimeField() |  | ||||||
|     count = IntegerField() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class EventSerializer(ModelSerializer): | class EventSerializer(ModelSerializer): | ||||||
|     """Event Serializer""" |     """Event Serializer""" | ||||||
|  |  | ||||||
| @ -61,7 +53,7 @@ class EventsFilter(django_filters.FilterSet): | |||||||
|     """Filter for events""" |     """Filter for events""" | ||||||
|  |  | ||||||
|     username = django_filters.CharFilter( |     username = django_filters.CharFilter( | ||||||
|         field_name="user", label="Username", method="filter_username" |         field_name="user", lookup_expr="username", label="Username" | ||||||
|     ) |     ) | ||||||
|     context_model_pk = django_filters.CharFilter( |     context_model_pk = django_filters.CharFilter( | ||||||
|         field_name="context", |         field_name="context", | ||||||
| @ -86,19 +78,12 @@ class EventsFilter(django_filters.FilterSet): | |||||||
|         field_name="action", |         field_name="action", | ||||||
|         lookup_expr="icontains", |         lookup_expr="icontains", | ||||||
|     ) |     ) | ||||||
|     actions = django_filters.MultipleChoiceFilter( |  | ||||||
|         field_name="action", |  | ||||||
|         choices=EventAction.choices, |  | ||||||
|     ) |  | ||||||
|     brand_name = django_filters.CharFilter( |     brand_name = django_filters.CharFilter( | ||||||
|         field_name="brand", |         field_name="brand", | ||||||
|         lookup_expr="name", |         lookup_expr="name", | ||||||
|         label="Brand name", |         label="Brand name", | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     def filter_username(self, queryset, name, value): |  | ||||||
|         return queryset.filter(Q(user__username=value) | Q(context__username=value)) |  | ||||||
|  |  | ||||||
|     def filter_context_model_pk(self, queryset, name, value): |     def filter_context_model_pk(self, queryset, name, value): | ||||||
|         """Because we store the PK as UUID.hex, |         """Because we store the PK as UUID.hex, | ||||||
|         we need to remove the dashes that a client may send. We can't use a |         we need to remove the dashes that a client may send. We can't use a | ||||||
| @ -132,22 +117,6 @@ class EventViewSet(ModelViewSet): | |||||||
|     ] |     ] | ||||||
|     filterset_class = EventsFilter |     filterset_class = EventsFilter | ||||||
|  |  | ||||||
|     def get_ql_fields(self): |  | ||||||
|         from djangoql.schema import DateTimeField, StrField |  | ||||||
|  |  | ||||||
|         from authentik.enterprise.search.fields import ChoiceSearchField, JSONSearchField |  | ||||||
|  |  | ||||||
|         return [ |  | ||||||
|             ChoiceSearchField(Event, "action"), |  | ||||||
|             StrField(Event, "event_uuid"), |  | ||||||
|             StrField(Event, "app", suggest_options=True), |  | ||||||
|             StrField(Event, "client_ip"), |  | ||||||
|             JSONSearchField(Event, "user", suggest_nested=False), |  | ||||||
|             JSONSearchField(Event, "brand", suggest_nested=False), |  | ||||||
|             JSONSearchField(Event, "context", suggest_nested=False), |  | ||||||
|             DateTimeField(Event, "created", suggest_options=True), |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         methods=["GET"], |         methods=["GET"], | ||||||
|         responses={200: EventTopPerUserSerializer(many=True)}, |         responses={200: EventTopPerUserSerializer(many=True)}, | ||||||
| @ -187,37 +156,45 @@ class EventViewSet(ModelViewSet): | |||||||
|         return Response(EventTopPerUserSerializer(instance=events, many=True).data) |         return Response(EventTopPerUserSerializer(instance=events, many=True).data) | ||||||
|  |  | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         responses={200: EventVolumeSerializer(many=True)}, |         responses={200: CoordinateSerializer(many=True)}, | ||||||
|         parameters=[ |  | ||||||
|             OpenApiParameter( |  | ||||||
|                 "history_days", |  | ||||||
|                 type=OpenApiTypes.NUMBER, |  | ||||||
|                 location=OpenApiParameter.QUERY, |  | ||||||
|                 required=False, |  | ||||||
|                 default=7, |  | ||||||
|             ), |  | ||||||
|         ], |  | ||||||
|     ) |     ) | ||||||
|     @action(detail=False, methods=["GET"], pagination_class=None) |     @action(detail=False, methods=["GET"], pagination_class=None) | ||||||
|     def volume(self, request: Request) -> Response: |     def volume(self, request: Request) -> Response: | ||||||
|         """Get event volume for specified filters and timeframe""" |         """Get event volume for specified filters and timeframe""" | ||||||
|         queryset: QuerySet[Event] = self.filter_queryset(self.get_queryset()) |         queryset = self.filter_queryset(self.get_queryset()) | ||||||
|         delta = timedelta(days=7) |         return Response(queryset.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)) | ||||||
|         time_delta = request.query_params.get("history_days", 7) |  | ||||||
|         if time_delta: |     @extend_schema( | ||||||
|             delta = timedelta(days=min(int(time_delta), 60)) |         responses={200: CoordinateSerializer(many=True)}, | ||||||
|  |         filters=[], | ||||||
|  |         parameters=[ | ||||||
|  |             OpenApiParameter( | ||||||
|  |                 "action", | ||||||
|  |                 type=OpenApiTypes.STR, | ||||||
|  |                 location=OpenApiParameter.QUERY, | ||||||
|  |                 required=False, | ||||||
|  |             ), | ||||||
|  |             OpenApiParameter( | ||||||
|  |                 "query", | ||||||
|  |                 type=OpenApiTypes.STR, | ||||||
|  |                 location=OpenApiParameter.QUERY, | ||||||
|  |                 required=False, | ||||||
|  |             ), | ||||||
|  |         ], | ||||||
|  |     ) | ||||||
|  |     @action(detail=False, methods=["GET"], pagination_class=None) | ||||||
|  |     def per_month(self, request: Request): | ||||||
|  |         """Get the count of events per month""" | ||||||
|  |         filtered_action = request.query_params.get("action", EventAction.LOGIN) | ||||||
|  |         try: | ||||||
|  |             query = loads(request.query_params.get("query", "{}")) | ||||||
|  |         except ValueError: | ||||||
|  |             return Response(status=400) | ||||||
|         return Response( |         return Response( | ||||||
|             queryset.filter(created__gte=now() - delta) |             get_objects_for_user(request.user, "authentik_events.view_event") | ||||||
|             .annotate(hour=TruncHour("created")) |             .filter(action=filtered_action) | ||||||
|             .annotate( |             .filter(**query) | ||||||
|                 time=ExpressionWrapper( |             .get_events_per(timedelta(weeks=4), ExtractDay, 30) | ||||||
|                     F("hour") - (F("hour__hour") % 6) * timedelta(hours=1), |  | ||||||
|                     output_field=DjangoDateTimeField(), |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|             .values("time", "action") |  | ||||||
|             .annotate(count=Count("pk")) |  | ||||||
|             .order_by("time", "action") |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) |     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||||
|  | |||||||
| @ -11,7 +11,7 @@ from authentik.events.models import NotificationRule | |||||||
| class NotificationRuleSerializer(ModelSerializer): | class NotificationRuleSerializer(ModelSerializer): | ||||||
|     """NotificationRule Serializer""" |     """NotificationRule Serializer""" | ||||||
|  |  | ||||||
|     destination_group_obj = GroupSerializer(read_only=True, source="destination_group") |     group_obj = GroupSerializer(read_only=True, source="group") | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         model = NotificationRule |         model = NotificationRule | ||||||
| @ -20,9 +20,8 @@ class NotificationRuleSerializer(ModelSerializer): | |||||||
|             "name", |             "name", | ||||||
|             "transports", |             "transports", | ||||||
|             "severity", |             "severity", | ||||||
|             "destination_group", |             "group", | ||||||
|             "destination_group_obj", |             "group_obj", | ||||||
|             "destination_event_user", |  | ||||||
|         ] |         ] | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -31,6 +30,6 @@ class NotificationRuleViewSet(UsedByMixin, ModelViewSet): | |||||||
|  |  | ||||||
|     queryset = NotificationRule.objects.all() |     queryset = NotificationRule.objects.all() | ||||||
|     serializer_class = NotificationRuleSerializer |     serializer_class = NotificationRuleSerializer | ||||||
|     filterset_fields = ["name", "severity", "destination_group__name"] |     filterset_fields = ["name", "severity", "group__name"] | ||||||
|     ordering = ["name"] |     ordering = ["name"] | ||||||
|     search_fields = ["name", "destination_group__name"] |     search_fields = ["name", "group__name"] | ||||||
|  | |||||||
| @ -15,13 +15,13 @@ class MMDBContextProcessor(EventContextProcessor): | |||||||
|         self.reader: Reader | None = None |         self.reader: Reader | None = None | ||||||
|         self._last_mtime: float = 0.0 |         self._last_mtime: float = 0.0 | ||||||
|         self.logger = get_logger() |         self.logger = get_logger() | ||||||
|         self.load() |         self.open() | ||||||
|  |  | ||||||
|     def path(self) -> str | None: |     def path(self) -> str | None: | ||||||
|         """Get the path to the MMDB file to load""" |         """Get the path to the MMDB file to load""" | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     def load(self): |     def open(self): | ||||||
|         """Get GeoIP Reader, if configured, otherwise none""" |         """Get GeoIP Reader, if configured, otherwise none""" | ||||||
|         path = self.path() |         path = self.path() | ||||||
|         if path == "" or not path: |         if path == "" or not path: | ||||||
| @ -44,7 +44,7 @@ class MMDBContextProcessor(EventContextProcessor): | |||||||
|             diff = self._last_mtime < mtime |             diff = self._last_mtime < mtime | ||||||
|             if diff > 0: |             if diff > 0: | ||||||
|                 self.logger.info("Found new MMDB Database, reopening", diff=diff, path=path) |                 self.logger.info("Found new MMDB Database, reopening", diff=diff, path=path) | ||||||
|                 self.load() |                 self.open() | ||||||
|         except OSError as exc: |         except OSError as exc: | ||||||
|             self.logger.warning("Failed to check MMDB age", exc=exc) |             self.logger.warning("Failed to check MMDB age", exc=exc) | ||||||
|  |  | ||||||
|  | |||||||
| @ -19,7 +19,7 @@ from authentik.blueprints.v1.importer import excluded_models | |||||||
| from authentik.core.models import Group, User | from authentik.core.models import Group, User | ||||||
| from authentik.events.models import Event, EventAction, Notification | from authentik.events.models import Event, EventAction, Notification | ||||||
| from authentik.events.utils import model_to_dict | from authentik.events.utils import model_to_dict | ||||||
| from authentik.lib.sentry import should_ignore_exception | from authentik.lib.sentry import before_send | ||||||
| from authentik.lib.utils.errors import exception_to_string | from authentik.lib.utils.errors import exception_to_string | ||||||
| from authentik.stages.authenticator_static.models import StaticToken | from authentik.stages.authenticator_static.models import StaticToken | ||||||
|  |  | ||||||
| @ -173,7 +173,7 @@ class AuditMiddleware: | |||||||
|                 message=exception_to_string(exception), |                 message=exception_to_string(exception), | ||||||
|             ) |             ) | ||||||
|             thread.run() |             thread.run() | ||||||
|         elif not should_ignore_exception(exception): |         elif before_send({}, {"exc_info": (None, exception, None)}) is not None: | ||||||
|             thread = EventNewThread( |             thread = EventNewThread( | ||||||
|                 EventAction.SYSTEM_EXCEPTION, |                 EventAction.SYSTEM_EXCEPTION, | ||||||
|                 request, |                 request, | ||||||
|  | |||||||
| @ -1,26 +0,0 @@ | |||||||
| # Generated by Django 5.1.11 on 2025-06-16 23:21 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_events", "0009_remove_notificationtransport_webhook_mapping_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.RenameField( |  | ||||||
|             model_name="notificationrule", |  | ||||||
|             old_name="group", |  | ||||||
|             new_name="destination_group", |  | ||||||
|         ), |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="notificationrule", |  | ||||||
|             name="destination_event_user", |  | ||||||
|             field=models.BooleanField( |  | ||||||
|                 default=False, |  | ||||||
|                 help_text="When enabled, notification will be sent to user the user that triggered the event.When destination_group is configured, notification is sent to both.", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,16 +1,21 @@ | |||||||
| """authentik events models""" | """authentik events models""" | ||||||
|  |  | ||||||
| from collections.abc import Generator | import time | ||||||
|  | from collections import Counter | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
| from difflib import get_close_matches | from difflib import get_close_matches | ||||||
| from functools import lru_cache | from functools import lru_cache | ||||||
| from inspect import currentframe | from inspect import currentframe | ||||||
| from smtplib import SMTPException | from smtplib import SMTPException | ||||||
| from typing import Any |  | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from django.apps import apps | from django.apps import apps | ||||||
| from django.db import connection, models | from django.db import connection, models | ||||||
|  | from django.db.models import Count, ExpressionWrapper, F | ||||||
|  | from django.db.models.fields import DurationField | ||||||
|  | from django.db.models.functions import Extract | ||||||
|  | from django.db.models.manager import Manager | ||||||
|  | from django.db.models.query import QuerySet | ||||||
| from django.http import HttpRequest | from django.http import HttpRequest | ||||||
| from django.http.request import QueryDict | from django.http.request import QueryDict | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| @ -119,6 +124,60 @@ class EventAction(models.TextChoices): | |||||||
|     CUSTOM_PREFIX = "custom_" |     CUSTOM_PREFIX = "custom_" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class EventQuerySet(QuerySet): | ||||||
|  |     """Custom events query set with helper functions""" | ||||||
|  |  | ||||||
|  |     def get_events_per( | ||||||
|  |         self, | ||||||
|  |         time_since: timedelta, | ||||||
|  |         extract: Extract, | ||||||
|  |         data_points: int, | ||||||
|  |     ) -> list[dict[str, int]]: | ||||||
|  |         """Get event count by hour in the last day, fill with zeros""" | ||||||
|  |         _now = now() | ||||||
|  |         max_since = timedelta(days=60) | ||||||
|  |         # Allow maximum of 60 days to limit load | ||||||
|  |         if time_since.total_seconds() > max_since.total_seconds(): | ||||||
|  |             time_since = max_since | ||||||
|  |         date_from = _now - time_since | ||||||
|  |         result = ( | ||||||
|  |             self.filter(created__gte=date_from) | ||||||
|  |             .annotate(age=ExpressionWrapper(_now - F("created"), output_field=DurationField())) | ||||||
|  |             .annotate(age_interval=extract("age")) | ||||||
|  |             .values("age_interval") | ||||||
|  |             .annotate(count=Count("pk")) | ||||||
|  |             .order_by("age_interval") | ||||||
|  |         ) | ||||||
|  |         data = Counter({int(d["age_interval"]): d["count"] for d in result}) | ||||||
|  |         results = [] | ||||||
|  |         interval_delta = time_since / data_points | ||||||
|  |         for interval in range(1, -data_points, -1): | ||||||
|  |             results.append( | ||||||
|  |                 { | ||||||
|  |                     "x_cord": time.mktime((_now + (interval_delta * interval)).timetuple()) * 1000, | ||||||
|  |                     "y_cord": data[interval * -1], | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |         return results | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class EventManager(Manager): | ||||||
|  |     """Custom helper methods for Events""" | ||||||
|  |  | ||||||
|  |     def get_queryset(self) -> QuerySet: | ||||||
|  |         """use custom queryset""" | ||||||
|  |         return EventQuerySet(self.model, using=self._db) | ||||||
|  |  | ||||||
|  |     def get_events_per( | ||||||
|  |         self, | ||||||
|  |         time_since: timedelta, | ||||||
|  |         extract: Extract, | ||||||
|  |         data_points: int, | ||||||
|  |     ) -> list[dict[str, int]]: | ||||||
|  |         """Wrap method from queryset""" | ||||||
|  |         return self.get_queryset().get_events_per(time_since, extract, data_points) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Event(SerializerModel, ExpiringModel): | class Event(SerializerModel, ExpiringModel): | ||||||
|     """An individual Audit/Metrics/Notification/Error Event""" |     """An individual Audit/Metrics/Notification/Error Event""" | ||||||
|  |  | ||||||
| @ -134,6 +193,8 @@ class Event(SerializerModel, ExpiringModel): | |||||||
|     # Shadow the expires attribute from ExpiringModel to override the default duration |     # Shadow the expires attribute from ExpiringModel to override the default duration | ||||||
|     expires = models.DateTimeField(default=default_event_duration) |     expires = models.DateTimeField(default=default_event_duration) | ||||||
|  |  | ||||||
|  |     objects = EventManager() | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def _get_app_from_request(request: HttpRequest) -> str: |     def _get_app_from_request(request: HttpRequest) -> str: | ||||||
|         if not isinstance(request, HttpRequest): |         if not isinstance(request, HttpRequest): | ||||||
| @ -193,32 +254,17 @@ class Event(SerializerModel, ExpiringModel): | |||||||
|             brand: Brand = request.brand |             brand: Brand = request.brand | ||||||
|             self.brand = sanitize_dict(model_to_dict(brand)) |             self.brand = sanitize_dict(model_to_dict(brand)) | ||||||
|         if hasattr(request, "user"): |         if hasattr(request, "user"): | ||||||
|             self.user = get_user(request.user) |             original_user = None | ||||||
|  |             if hasattr(request, "session"): | ||||||
|  |                 original_user = request.session.get(SESSION_KEY_IMPERSONATE_ORIGINAL_USER, None) | ||||||
|  |             self.user = get_user(request.user, original_user) | ||||||
|         if user: |         if user: | ||||||
|             self.user = get_user(user) |             self.user = get_user(user) | ||||||
|  |         # Check if we're currently impersonating, and add that user | ||||||
|         if hasattr(request, "session"): |         if hasattr(request, "session"): | ||||||
|             from authentik.flows.views.executor import SESSION_KEY_PLAN |  | ||||||
|  |  | ||||||
|             # Check if we're currently impersonating, and add that user |  | ||||||
|             if SESSION_KEY_IMPERSONATE_ORIGINAL_USER in request.session: |             if SESSION_KEY_IMPERSONATE_ORIGINAL_USER in request.session: | ||||||
|                 self.user = get_user(request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER]) |                 self.user = get_user(request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER]) | ||||||
|                 self.user["on_behalf_of"] = get_user(request.session[SESSION_KEY_IMPERSONATE_USER]) |                 self.user["on_behalf_of"] = get_user(request.session[SESSION_KEY_IMPERSONATE_USER]) | ||||||
|             # Special case for events that happen during a flow, the user might not be authenticated |  | ||||||
|             # yet but is a pending user instead |  | ||||||
|             if SESSION_KEY_PLAN in request.session: |  | ||||||
|                 from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan |  | ||||||
|  |  | ||||||
|                 plan: FlowPlan = request.session[SESSION_KEY_PLAN] |  | ||||||
|                 pending_user = plan.context.get(PLAN_CONTEXT_PENDING_USER, None) |  | ||||||
|                 # Only save `authenticated_as` if there's a different pending user in the flow |  | ||||||
|                 # than the user that is authenticated |  | ||||||
|                 if pending_user and ( |  | ||||||
|                     (pending_user.pk and pending_user.pk != self.user.get("pk")) |  | ||||||
|                     or (not pending_user.pk) |  | ||||||
|                 ): |  | ||||||
|                     orig_user = self.user.copy() |  | ||||||
|  |  | ||||||
|                     self.user = {"authenticated_as": orig_user, **get_user(pending_user)} |  | ||||||
|         # User 255.255.255.255 as fallback if IP cannot be determined |         # User 255.255.255.255 as fallback if IP cannot be determined | ||||||
|         self.client_ip = ClientIPMiddleware.get_client_ip(request) |         self.client_ip = ClientIPMiddleware.get_client_ip(request) | ||||||
|         # Enrich event data |         # Enrich event data | ||||||
| @ -564,7 +610,7 @@ class NotificationRule(SerializerModel, PolicyBindingModel): | |||||||
|         default=NotificationSeverity.NOTICE, |         default=NotificationSeverity.NOTICE, | ||||||
|         help_text=_("Controls which severity level the created notifications will have."), |         help_text=_("Controls which severity level the created notifications will have."), | ||||||
|     ) |     ) | ||||||
|     destination_group = models.ForeignKey( |     group = models.ForeignKey( | ||||||
|         Group, |         Group, | ||||||
|         help_text=_( |         help_text=_( | ||||||
|             "Define which group of users this notification should be sent and shown to. " |             "Define which group of users this notification should be sent and shown to. " | ||||||
| @ -574,19 +620,6 @@ class NotificationRule(SerializerModel, PolicyBindingModel): | |||||||
|         blank=True, |         blank=True, | ||||||
|         on_delete=models.SET_NULL, |         on_delete=models.SET_NULL, | ||||||
|     ) |     ) | ||||||
|     destination_event_user = models.BooleanField( |  | ||||||
|         default=False, |  | ||||||
|         help_text=_( |  | ||||||
|             "When enabled, notification will be sent to user the user that triggered the event." |  | ||||||
|             "When destination_group is configured, notification is sent to both." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     def destination_users(self, event: Event) -> Generator[User, Any]: |  | ||||||
|         if self.destination_event_user and event.user.get("pk"): |  | ||||||
|             yield User(pk=event.user.get("pk")) |  | ||||||
|         if self.destination_group: |  | ||||||
|             yield from self.destination_group.users.all() |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> type[Serializer]: |     def serializer(self) -> type[Serializer]: | ||||||
|  | |||||||
| @ -68,10 +68,14 @@ def event_trigger_handler(event_uuid: str, trigger_name: str): | |||||||
|     if not result.passing: |     if not result.passing: | ||||||
|         return |         return | ||||||
|  |  | ||||||
|  |     if not trigger.group: | ||||||
|  |         LOGGER.debug("e(trigger): trigger has no group", trigger=trigger) | ||||||
|  |         return | ||||||
|  |  | ||||||
|     LOGGER.debug("e(trigger): event trigger matched", trigger=trigger) |     LOGGER.debug("e(trigger): event trigger matched", trigger=trigger) | ||||||
|     # Create the notification objects |     # Create the notification objects | ||||||
|     for transport in trigger.transports.all(): |     for transport in trigger.transports.all(): | ||||||
|         for user in trigger.destination_users(event): |         for user in trigger.group.users.all(): | ||||||
|             LOGGER.debug("created notification") |             LOGGER.debug("created notification") | ||||||
|             notification_transport.apply_async( |             notification_transport.apply_async( | ||||||
|                 args=[ |                 args=[ | ||||||
|  | |||||||
| @ -2,9 +2,7 @@ | |||||||
|  |  | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
|  |  | ||||||
| from authentik.events.context_processors.base import get_context_processors |  | ||||||
| from authentik.events.context_processors.geoip import GeoIPContextProcessor | from authentik.events.context_processors.geoip import GeoIPContextProcessor | ||||||
| from authentik.events.models import Event, EventAction |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestGeoIP(TestCase): | class TestGeoIP(TestCase): | ||||||
| @ -15,7 +13,8 @@ class TestGeoIP(TestCase): | |||||||
|  |  | ||||||
|     def test_simple(self): |     def test_simple(self): | ||||||
|         """Test simple city wrapper""" |         """Test simple city wrapper""" | ||||||
|         # IPs from https://github.com/maxmind/MaxMind-DB/blob/main/source-data/GeoLite2-City-Test.json |         # IPs from | ||||||
|  |         # https://github.com/maxmind/MaxMind-DB/blob/main/source-data/GeoLite2-City-Test.json | ||||||
|         self.assertEqual( |         self.assertEqual( | ||||||
|             self.reader.city_dict("2.125.160.216"), |             self.reader.city_dict("2.125.160.216"), | ||||||
|             { |             { | ||||||
| @ -26,12 +25,3 @@ class TestGeoIP(TestCase): | |||||||
|                 "long": -1.25, |                 "long": -1.25, | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_special_chars(self): |  | ||||||
|         """Test city name with special characters""" |  | ||||||
|         # IPs from https://github.com/maxmind/MaxMind-DB/blob/main/source-data/GeoLite2-City-Test.json |  | ||||||
|         event = Event.new(EventAction.LOGIN) |  | ||||||
|         event.client_ip = "89.160.20.112" |  | ||||||
|         for processor in get_context_processors(): |  | ||||||
|             processor.enrich_event(event) |  | ||||||
|         event.save() |  | ||||||
|  | |||||||
| @ -8,11 +8,9 @@ from django.views.debug import SafeExceptionReporterFilter | |||||||
| from guardian.shortcuts import get_anonymous_user | from guardian.shortcuts import get_anonymous_user | ||||||
|  |  | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.core.models import Group, User | from authentik.core.models import Group | ||||||
| from authentik.core.tests.utils import create_test_user |  | ||||||
| from authentik.events.models import Event | from authentik.events.models import Event | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan | from authentik.flows.views.executor import QS_QUERY | ||||||
| from authentik.flows.views.executor import QS_QUERY, SESSION_KEY_PLAN |  | ||||||
| from authentik.lib.generators import generate_id | from authentik.lib.generators import generate_id | ||||||
| from authentik.policies.dummy.models import DummyPolicy | from authentik.policies.dummy.models import DummyPolicy | ||||||
|  |  | ||||||
| @ -118,92 +116,3 @@ class TestEvents(TestCase): | |||||||
|                 "pk": brand.pk.hex, |                 "pk": brand.pk.hex, | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_from_http_flow_pending_user(self): |  | ||||||
|         """Test request from flow request with a pending user""" |  | ||||||
|         user = create_test_user() |  | ||||||
|  |  | ||||||
|         session = self.client.session |  | ||||||
|         plan = FlowPlan(generate_id()) |  | ||||||
|         plan.context[PLAN_CONTEXT_PENDING_USER] = user |  | ||||||
|         session[SESSION_KEY_PLAN] = plan |  | ||||||
|         session.save() |  | ||||||
|  |  | ||||||
|         request = self.factory.get("/") |  | ||||||
|         request.session = session |  | ||||||
|         request.user = user |  | ||||||
|  |  | ||||||
|         event = Event.new("unittest").from_http(request) |  | ||||||
|         self.assertEqual( |  | ||||||
|             event.user, |  | ||||||
|             { |  | ||||||
|                 "email": user.email, |  | ||||||
|                 "pk": user.pk, |  | ||||||
|                 "username": user.username, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_from_http_flow_pending_user_anon(self): |  | ||||||
|         """Test request from flow request with a pending user""" |  | ||||||
|         user = create_test_user() |  | ||||||
|         anon = get_anonymous_user() |  | ||||||
|  |  | ||||||
|         session = self.client.session |  | ||||||
|         plan = FlowPlan(generate_id()) |  | ||||||
|         plan.context[PLAN_CONTEXT_PENDING_USER] = user |  | ||||||
|         session[SESSION_KEY_PLAN] = plan |  | ||||||
|         session.save() |  | ||||||
|  |  | ||||||
|         request = self.factory.get("/") |  | ||||||
|         request.session = session |  | ||||||
|         request.user = anon |  | ||||||
|  |  | ||||||
|         event = Event.new("unittest").from_http(request) |  | ||||||
|         self.assertEqual( |  | ||||||
|             event.user, |  | ||||||
|             { |  | ||||||
|                 "authenticated_as": { |  | ||||||
|                     "pk": anon.pk, |  | ||||||
|                     "is_anonymous": True, |  | ||||||
|                     "username": "AnonymousUser", |  | ||||||
|                     "email": "", |  | ||||||
|                 }, |  | ||||||
|                 "email": user.email, |  | ||||||
|                 "pk": user.pk, |  | ||||||
|                 "username": user.username, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_from_http_flow_pending_user_fake(self): |  | ||||||
|         """Test request from flow request with a pending user""" |  | ||||||
|         user = User( |  | ||||||
|             username=generate_id(), |  | ||||||
|             email=generate_id(), |  | ||||||
|         ) |  | ||||||
|         anon = get_anonymous_user() |  | ||||||
|  |  | ||||||
|         session = self.client.session |  | ||||||
|         plan = FlowPlan(generate_id()) |  | ||||||
|         plan.context[PLAN_CONTEXT_PENDING_USER] = user |  | ||||||
|         session[SESSION_KEY_PLAN] = plan |  | ||||||
|         session.save() |  | ||||||
|  |  | ||||||
|         request = self.factory.get("/") |  | ||||||
|         request.session = session |  | ||||||
|         request.user = anon |  | ||||||
|  |  | ||||||
|         event = Event.new("unittest").from_http(request) |  | ||||||
|         self.assertEqual( |  | ||||||
|             event.user, |  | ||||||
|             { |  | ||||||
|                 "authenticated_as": { |  | ||||||
|                     "pk": anon.pk, |  | ||||||
|                     "is_anonymous": True, |  | ||||||
|                     "username": "AnonymousUser", |  | ||||||
|                     "email": "", |  | ||||||
|                 }, |  | ||||||
|                 "email": user.email, |  | ||||||
|                 "pk": user.pk, |  | ||||||
|                 "username": user.username, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  | |||||||
| @ -6,7 +6,6 @@ from django.urls import reverse | |||||||
| from rest_framework.test import APITestCase | from rest_framework.test import APITestCase | ||||||
|  |  | ||||||
| from authentik.core.models import Group, User | from authentik.core.models import Group, User | ||||||
| from authentik.core.tests.utils import create_test_user |  | ||||||
| from authentik.events.models import ( | from authentik.events.models import ( | ||||||
|     Event, |     Event, | ||||||
|     EventAction, |     EventAction, | ||||||
| @ -35,7 +34,7 @@ class TestEventsNotifications(APITestCase): | |||||||
|     def test_trigger_empty(self): |     def test_trigger_empty(self): | ||||||
|         """Test trigger without any policies attached""" |         """Test trigger without any policies attached""" | ||||||
|         transport = NotificationTransport.objects.create(name=generate_id()) |         transport = NotificationTransport.objects.create(name=generate_id()) | ||||||
|         trigger = NotificationRule.objects.create(name=generate_id(), destination_group=self.group) |         trigger = NotificationRule.objects.create(name=generate_id(), group=self.group) | ||||||
|         trigger.transports.add(transport) |         trigger.transports.add(transport) | ||||||
|         trigger.save() |         trigger.save() | ||||||
|  |  | ||||||
| @ -47,7 +46,7 @@ class TestEventsNotifications(APITestCase): | |||||||
|     def test_trigger_single(self): |     def test_trigger_single(self): | ||||||
|         """Test simple transport triggering""" |         """Test simple transport triggering""" | ||||||
|         transport = NotificationTransport.objects.create(name=generate_id()) |         transport = NotificationTransport.objects.create(name=generate_id()) | ||||||
|         trigger = NotificationRule.objects.create(name=generate_id(), destination_group=self.group) |         trigger = NotificationRule.objects.create(name=generate_id(), group=self.group) | ||||||
|         trigger.transports.add(transport) |         trigger.transports.add(transport) | ||||||
|         trigger.save() |         trigger.save() | ||||||
|         matcher = EventMatcherPolicy.objects.create( |         matcher = EventMatcherPolicy.objects.create( | ||||||
| @ -60,25 +59,6 @@ class TestEventsNotifications(APITestCase): | |||||||
|             Event.new(EventAction.CUSTOM_PREFIX).save() |             Event.new(EventAction.CUSTOM_PREFIX).save() | ||||||
|         self.assertEqual(execute_mock.call_count, 1) |         self.assertEqual(execute_mock.call_count, 1) | ||||||
|  |  | ||||||
|     def test_trigger_event_user(self): |  | ||||||
|         """Test trigger with event user""" |  | ||||||
|         user = create_test_user() |  | ||||||
|         transport = NotificationTransport.objects.create(name=generate_id()) |  | ||||||
|         trigger = NotificationRule.objects.create(name=generate_id(), destination_event_user=True) |  | ||||||
|         trigger.transports.add(transport) |  | ||||||
|         trigger.save() |  | ||||||
|         matcher = EventMatcherPolicy.objects.create( |  | ||||||
|             name="matcher", action=EventAction.CUSTOM_PREFIX |  | ||||||
|         ) |  | ||||||
|         PolicyBinding.objects.create(target=trigger, policy=matcher, order=0) |  | ||||||
|  |  | ||||||
|         execute_mock = MagicMock() |  | ||||||
|         with patch("authentik.events.models.NotificationTransport.send", execute_mock): |  | ||||||
|             Event.new(EventAction.CUSTOM_PREFIX).set_user(user).save() |  | ||||||
|         self.assertEqual(execute_mock.call_count, 1) |  | ||||||
|         notification: Notification = execute_mock.call_args[0][0] |  | ||||||
|         self.assertEqual(notification.user, user) |  | ||||||
|  |  | ||||||
|     def test_trigger_no_group(self): |     def test_trigger_no_group(self): | ||||||
|         """Test trigger without group""" |         """Test trigger without group""" | ||||||
|         trigger = NotificationRule.objects.create(name=generate_id()) |         trigger = NotificationRule.objects.create(name=generate_id()) | ||||||
| @ -96,7 +76,7 @@ class TestEventsNotifications(APITestCase): | |||||||
|         """Test Policy error which would cause recursion""" |         """Test Policy error which would cause recursion""" | ||||||
|         transport = NotificationTransport.objects.create(name=generate_id()) |         transport = NotificationTransport.objects.create(name=generate_id()) | ||||||
|         NotificationRule.objects.filter(name__startswith="default").delete() |         NotificationRule.objects.filter(name__startswith="default").delete() | ||||||
|         trigger = NotificationRule.objects.create(name=generate_id(), destination_group=self.group) |         trigger = NotificationRule.objects.create(name=generate_id(), group=self.group) | ||||||
|         trigger.transports.add(transport) |         trigger.transports.add(transport) | ||||||
|         trigger.save() |         trigger.save() | ||||||
|         matcher = EventMatcherPolicy.objects.create( |         matcher = EventMatcherPolicy.objects.create( | ||||||
| @ -119,7 +99,7 @@ class TestEventsNotifications(APITestCase): | |||||||
|  |  | ||||||
|         transport = NotificationTransport.objects.create(name=generate_id(), send_once=True) |         transport = NotificationTransport.objects.create(name=generate_id(), send_once=True) | ||||||
|         NotificationRule.objects.filter(name__startswith="default").delete() |         NotificationRule.objects.filter(name__startswith="default").delete() | ||||||
|         trigger = NotificationRule.objects.create(name=generate_id(), destination_group=self.group) |         trigger = NotificationRule.objects.create(name=generate_id(), group=self.group) | ||||||
|         trigger.transports.add(transport) |         trigger.transports.add(transport) | ||||||
|         trigger.save() |         trigger.save() | ||||||
|         matcher = EventMatcherPolicy.objects.create( |         matcher = EventMatcherPolicy.objects.create( | ||||||
| @ -143,7 +123,7 @@ class TestEventsNotifications(APITestCase): | |||||||
|             name=generate_id(), webhook_mapping_body=mapping, mode=TransportMode.LOCAL |             name=generate_id(), webhook_mapping_body=mapping, mode=TransportMode.LOCAL | ||||||
|         ) |         ) | ||||||
|         NotificationRule.objects.filter(name__startswith="default").delete() |         NotificationRule.objects.filter(name__startswith="default").delete() | ||||||
|         trigger = NotificationRule.objects.create(name=generate_id(), destination_group=self.group) |         trigger = NotificationRule.objects.create(name=generate_id(), group=self.group) | ||||||
|         trigger.transports.add(transport) |         trigger.transports.add(transport) | ||||||
|         matcher = EventMatcherPolicy.objects.create( |         matcher = EventMatcherPolicy.objects.create( | ||||||
|             name="matcher", action=EventAction.CUSTOM_PREFIX |             name="matcher", action=EventAction.CUSTOM_PREFIX | ||||||
|  | |||||||
| @ -74,8 +74,8 @@ def model_to_dict(model: Model) -> dict[str, Any]: | |||||||
|     } |     } | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_user(user: User | AnonymousUser) -> dict[str, Any]: | def get_user(user: User | AnonymousUser, original_user: User | None = None) -> dict[str, Any]: | ||||||
|     """Convert user object to dictionary""" |     """Convert user object to dictionary, optionally including the original user""" | ||||||
|     if isinstance(user, AnonymousUser): |     if isinstance(user, AnonymousUser): | ||||||
|         try: |         try: | ||||||
|             user = get_anonymous_user() |             user = get_anonymous_user() | ||||||
| @ -88,6 +88,10 @@ def get_user(user: User | AnonymousUser) -> dict[str, Any]: | |||||||
|     } |     } | ||||||
|     if user.username == settings.ANONYMOUS_USER_NAME: |     if user.username == settings.ANONYMOUS_USER_NAME: | ||||||
|         user_data["is_anonymous"] = True |         user_data["is_anonymous"] = True | ||||||
|  |     if original_user: | ||||||
|  |         original_data = get_user(original_user) | ||||||
|  |         original_data["on_behalf_of"] = user_data | ||||||
|  |         return original_data | ||||||
|     return user_data |     return user_data | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,18 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-27 12:52 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_flows", "0027_auto_20231028_1424"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="flowtoken", |  | ||||||
|             name="revoke_on_execution", |  | ||||||
|             field=models.BooleanField(default=True), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -303,10 +303,9 @@ class FlowToken(Token): | |||||||
|  |  | ||||||
|     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) |     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) | ||||||
|     _plan = models.TextField() |     _plan = models.TextField() | ||||||
|     revoke_on_execution = models.BooleanField(default=True) |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def pickle(plan: "FlowPlan") -> str: |     def pickle(plan) -> str: | ||||||
|         """Pickle into string""" |         """Pickle into string""" | ||||||
|         data = dumps(plan) |         data = dumps(plan) | ||||||
|         return b64encode(data).decode() |         return b64encode(data).decode() | ||||||
|  | |||||||
| @ -99,10 +99,9 @@ class ChallengeStageView(StageView): | |||||||
|             self.logger.debug("Got StageInvalidException", exc=exc) |             self.logger.debug("Got StageInvalidException", exc=exc) | ||||||
|             return self.executor.stage_invalid() |             return self.executor.stage_invalid() | ||||||
|         if not challenge.is_valid(): |         if not challenge.is_valid(): | ||||||
|             self.logger.error( |             self.logger.warning( | ||||||
|                 "f(ch): Invalid challenge", |                 "f(ch): Invalid challenge", | ||||||
|                 errors=challenge.errors, |                 errors=challenge.errors, | ||||||
|                 challenge=challenge.data, |  | ||||||
|             ) |             ) | ||||||
|         return HttpChallengeResponse(challenge) |         return HttpChallengeResponse(challenge) | ||||||
|  |  | ||||||
|  | |||||||
| @ -4,10 +4,8 @@ from unittest.mock import MagicMock, PropertyMock, patch | |||||||
| from urllib.parse import urlencode | from urllib.parse import urlencode | ||||||
|  |  | ||||||
| from django.http import HttpRequest, HttpResponse | from django.http import HttpRequest, HttpResponse | ||||||
| from django.test import override_settings |  | ||||||
| from django.test.client import RequestFactory | from django.test.client import RequestFactory | ||||||
| from django.urls import reverse | from django.urls import reverse | ||||||
| from rest_framework.exceptions import ParseError |  | ||||||
|  |  | ||||||
| from authentik.core.models import Group, User | from authentik.core.models import Group, User | ||||||
| from authentik.core.tests.utils import create_test_flow, create_test_user | from authentik.core.tests.utils import create_test_flow, create_test_user | ||||||
| @ -650,25 +648,3 @@ class TestFlowExecutor(FlowTestCase): | |||||||
|             self.assertStageResponse(response, flow, component="ak-stage-identification") |             self.assertStageResponse(response, flow, component="ak-stage-identification") | ||||||
|             response = self.client.post(exec_url, {"uid_field": user_other.username}, follow=True) |             response = self.client.post(exec_url, {"uid_field": user_other.username}, follow=True) | ||||||
|             self.assertStageResponse(response, flow, component="ak-stage-access-denied") |             self.assertStageResponse(response, flow, component="ak-stage-access-denied") | ||||||
|  |  | ||||||
|     @patch( |  | ||||||
|         "authentik.flows.views.executor.to_stage_response", |  | ||||||
|         TO_STAGE_RESPONSE_MOCK, |  | ||||||
|     ) |  | ||||||
|     def test_invalid_json(self): |  | ||||||
|         """Test invalid JSON body""" |  | ||||||
|         flow = create_test_flow() |  | ||||||
|         FlowStageBinding.objects.create( |  | ||||||
|             target=flow, stage=DummyStage.objects.create(name=generate_id()), order=0 |  | ||||||
|         ) |  | ||||||
|         url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}) |  | ||||||
|  |  | ||||||
|         with override_settings(TEST=False, DEBUG=False): |  | ||||||
|             self.client.logout() |  | ||||||
|             response = self.client.post(url, data="{", content_type="application/json") |  | ||||||
|             self.assertEqual(response.status_code, 200) |  | ||||||
|  |  | ||||||
|         with self.assertRaises(ParseError): |  | ||||||
|             self.client.logout() |  | ||||||
|             response = self.client.post(url, data="{", content_type="application/json") |  | ||||||
|             self.assertEqual(response.status_code, 200) |  | ||||||
|  | |||||||
| @ -55,7 +55,7 @@ from authentik.flows.planner import ( | |||||||
|     FlowPlanner, |     FlowPlanner, | ||||||
| ) | ) | ||||||
| from authentik.flows.stage import AccessDeniedStage, StageView | from authentik.flows.stage import AccessDeniedStage, StageView | ||||||
| from authentik.lib.sentry import SentryIgnoredException, should_ignore_exception | from authentik.lib.sentry import SentryIgnoredException | ||||||
| from authentik.lib.utils.errors import exception_to_string | from authentik.lib.utils.errors import exception_to_string | ||||||
| from authentik.lib.utils.reflection import all_subclasses, class_to_path | from authentik.lib.utils.reflection import all_subclasses, class_to_path | ||||||
| from authentik.lib.utils.urls import is_url_absolute, redirect_with_qs | from authentik.lib.utils.urls import is_url_absolute, redirect_with_qs | ||||||
| @ -146,8 +146,7 @@ class FlowExecutorView(APIView): | |||||||
|         except (AttributeError, EOFError, ImportError, IndexError) as exc: |         except (AttributeError, EOFError, ImportError, IndexError) as exc: | ||||||
|             LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) |             LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) | ||||||
|         finally: |         finally: | ||||||
|             if token.revoke_on_execution: |             token.delete() | ||||||
|                 token.delete() |  | ||||||
|         if not isinstance(plan, FlowPlan): |         if not isinstance(plan, FlowPlan): | ||||||
|             return None |             return None | ||||||
|         plan.context[PLAN_CONTEXT_IS_RESTORED] = token |         plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||||
| @ -234,13 +233,12 @@ class FlowExecutorView(APIView): | |||||||
|         """Handle exception in stage execution""" |         """Handle exception in stage execution""" | ||||||
|         if settings.DEBUG or settings.TEST: |         if settings.DEBUG or settings.TEST: | ||||||
|             raise exc |             raise exc | ||||||
|  |         capture_exception(exc) | ||||||
|         self._logger.warning(exc) |         self._logger.warning(exc) | ||||||
|         if not should_ignore_exception(exc): |         Event.new( | ||||||
|             capture_exception(exc) |             action=EventAction.SYSTEM_EXCEPTION, | ||||||
|             Event.new( |             message=exception_to_string(exc), | ||||||
|                 action=EventAction.SYSTEM_EXCEPTION, |         ).from_http(self.request) | ||||||
|                 message=exception_to_string(exc), |  | ||||||
|             ).from_http(self.request) |  | ||||||
|         challenge = FlowErrorChallenge(self.request, exc) |         challenge = FlowErrorChallenge(self.request, exc) | ||||||
|         challenge.is_valid(raise_exception=True) |         challenge.is_valid(raise_exception=True) | ||||||
|         return to_stage_response(self.request, HttpChallengeResponse(challenge)) |         return to_stage_response(self.request, HttpChallengeResponse(challenge)) | ||||||
|  | |||||||
| @ -8,12 +8,12 @@ | |||||||
| # make gen-dev-config | # make gen-dev-config | ||||||
| # ``` | # ``` | ||||||
| # | # | ||||||
| # You may edit the generated file to override the configuration below. | # You may edit the generated file to override the configuration below.   | ||||||
| # | # | ||||||
| # When making modifying the default configuration file, | # When making modifying the default configuration file,  | ||||||
| # ensure that the corresponding documentation is updated to match. | # ensure that the corresponding documentation is updated to match. | ||||||
| # | # | ||||||
| # @see {@link ../../docs/topics/install-config/configuration/configuration.mdx Configuration documentation} for more information. | # @see {@link ../../website/docs/install-config/configuration/configuration.mdx Configuration documentation} for more information. | ||||||
|  |  | ||||||
| postgresql: | postgresql: | ||||||
|   host: localhost |   host: localhost | ||||||
| @ -81,6 +81,7 @@ debugger: false | |||||||
|  |  | ||||||
| log_level: info | log_level: info | ||||||
|  |  | ||||||
|  | session_storage: cache | ||||||
| sessions: | sessions: | ||||||
|   unauthenticated_age: days=1 |   unauthenticated_age: days=1 | ||||||
|  |  | ||||||
|  | |||||||
| @ -14,7 +14,6 @@ from django_redis.exceptions import ConnectionInterrupted | |||||||
| from docker.errors import DockerException | from docker.errors import DockerException | ||||||
| from h11 import LocalProtocolError | from h11 import LocalProtocolError | ||||||
| from ldap3.core.exceptions import LDAPException | from ldap3.core.exceptions import LDAPException | ||||||
| from psycopg.errors import Error |  | ||||||
| from redis.exceptions import ConnectionError as RedisConnectionError | from redis.exceptions import ConnectionError as RedisConnectionError | ||||||
| from redis.exceptions import RedisError, ResponseError | from redis.exceptions import RedisError, ResponseError | ||||||
| from rest_framework.exceptions import APIException | from rest_framework.exceptions import APIException | ||||||
| @ -45,49 +44,6 @@ class SentryIgnoredException(Exception): | |||||||
|     """Base Class for all errors that are suppressed, and not sent to sentry.""" |     """Base Class for all errors that are suppressed, and not sent to sentry.""" | ||||||
|  |  | ||||||
|  |  | ||||||
| ignored_classes = ( |  | ||||||
|     # Inbuilt types |  | ||||||
|     KeyboardInterrupt, |  | ||||||
|     ConnectionResetError, |  | ||||||
|     OSError, |  | ||||||
|     PermissionError, |  | ||||||
|     # Django Errors |  | ||||||
|     Error, |  | ||||||
|     ImproperlyConfigured, |  | ||||||
|     DatabaseError, |  | ||||||
|     OperationalError, |  | ||||||
|     InternalError, |  | ||||||
|     ProgrammingError, |  | ||||||
|     SuspiciousOperation, |  | ||||||
|     ValidationError, |  | ||||||
|     # Redis errors |  | ||||||
|     RedisConnectionError, |  | ||||||
|     ConnectionInterrupted, |  | ||||||
|     RedisError, |  | ||||||
|     ResponseError, |  | ||||||
|     # websocket errors |  | ||||||
|     ChannelFull, |  | ||||||
|     WebSocketException, |  | ||||||
|     LocalProtocolError, |  | ||||||
|     # rest_framework error |  | ||||||
|     APIException, |  | ||||||
|     # celery errors |  | ||||||
|     WorkerLostError, |  | ||||||
|     CeleryError, |  | ||||||
|     SoftTimeLimitExceeded, |  | ||||||
|     # custom baseclass |  | ||||||
|     SentryIgnoredException, |  | ||||||
|     # ldap errors |  | ||||||
|     LDAPException, |  | ||||||
|     # Docker errors |  | ||||||
|     DockerException, |  | ||||||
|     # End-user errors |  | ||||||
|     Http404, |  | ||||||
|     # AsyncIO |  | ||||||
|     CancelledError, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SentryTransport(HttpTransport): | class SentryTransport(HttpTransport): | ||||||
|     """Custom sentry transport with custom user-agent""" |     """Custom sentry transport with custom user-agent""" | ||||||
|  |  | ||||||
| @ -145,17 +101,56 @@ def traces_sampler(sampling_context: dict) -> float: | |||||||
|     return float(CONFIG.get("error_reporting.sample_rate", 0.1)) |     return float(CONFIG.get("error_reporting.sample_rate", 0.1)) | ||||||
|  |  | ||||||
|  |  | ||||||
| def should_ignore_exception(exc: Exception) -> bool: |  | ||||||
|     """Check if an exception should be dropped""" |  | ||||||
|     return isinstance(exc, ignored_classes) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def before_send(event: dict, hint: dict) -> dict | None: | def before_send(event: dict, hint: dict) -> dict | None: | ||||||
|     """Check if error is database error, and ignore if so""" |     """Check if error is database error, and ignore if so""" | ||||||
|  |  | ||||||
|  |     from psycopg.errors import Error | ||||||
|  |  | ||||||
|  |     ignored_classes = ( | ||||||
|  |         # Inbuilt types | ||||||
|  |         KeyboardInterrupt, | ||||||
|  |         ConnectionResetError, | ||||||
|  |         OSError, | ||||||
|  |         PermissionError, | ||||||
|  |         # Django Errors | ||||||
|  |         Error, | ||||||
|  |         ImproperlyConfigured, | ||||||
|  |         DatabaseError, | ||||||
|  |         OperationalError, | ||||||
|  |         InternalError, | ||||||
|  |         ProgrammingError, | ||||||
|  |         SuspiciousOperation, | ||||||
|  |         ValidationError, | ||||||
|  |         # Redis errors | ||||||
|  |         RedisConnectionError, | ||||||
|  |         ConnectionInterrupted, | ||||||
|  |         RedisError, | ||||||
|  |         ResponseError, | ||||||
|  |         # websocket errors | ||||||
|  |         ChannelFull, | ||||||
|  |         WebSocketException, | ||||||
|  |         LocalProtocolError, | ||||||
|  |         # rest_framework error | ||||||
|  |         APIException, | ||||||
|  |         # celery errors | ||||||
|  |         WorkerLostError, | ||||||
|  |         CeleryError, | ||||||
|  |         SoftTimeLimitExceeded, | ||||||
|  |         # custom baseclass | ||||||
|  |         SentryIgnoredException, | ||||||
|  |         # ldap errors | ||||||
|  |         LDAPException, | ||||||
|  |         # Docker errors | ||||||
|  |         DockerException, | ||||||
|  |         # End-user errors | ||||||
|  |         Http404, | ||||||
|  |         # AsyncIO | ||||||
|  |         CancelledError, | ||||||
|  |     ) | ||||||
|     exc_value = None |     exc_value = None | ||||||
|     if "exc_info" in hint: |     if "exc_info" in hint: | ||||||
|         _, exc_value, _ = hint["exc_info"] |         _, exc_value, _ = hint["exc_info"] | ||||||
|         if should_ignore_exception(exc_value): |         if isinstance(exc_value, ignored_classes): | ||||||
|             LOGGER.debug("dropping exception", exc=exc_value) |             LOGGER.debug("dropping exception", exc=exc_value) | ||||||
|             return None |             return None | ||||||
|     if "logger" in event: |     if "logger" in event: | ||||||
|  | |||||||
| @ -1,7 +1,6 @@ | |||||||
| from collections.abc import Callable | from collections.abc import Callable | ||||||
| from dataclasses import asdict | from dataclasses import asdict | ||||||
|  |  | ||||||
| from celery import group |  | ||||||
| from celery.exceptions import Retry | from celery.exceptions import Retry | ||||||
| from celery.result import allow_join_result | from celery.result import allow_join_result | ||||||
| from django.core.paginator import Paginator | from django.core.paginator import Paginator | ||||||
| @ -83,41 +82,21 @@ class SyncTasks: | |||||||
|                 self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) |                 self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) | ||||||
|                 return |                 return | ||||||
|             try: |             try: | ||||||
|                 messages.append(_("Syncing users")) |                 for page in users_paginator.page_range: | ||||||
|                 user_results = ( |                     messages.append(_("Syncing page {page} of users".format(page=page))) | ||||||
|                     group( |                     for msg in sync_objects.apply_async( | ||||||
|                         [ |                         args=(class_to_path(User), page, provider_pk), | ||||||
|                             sync_objects.signature( |                         time_limit=PAGE_TIMEOUT, | ||||||
|                                 args=(class_to_path(User), page, provider_pk), |                         soft_time_limit=PAGE_TIMEOUT, | ||||||
|                                 time_limit=PAGE_TIMEOUT, |                     ).get(): | ||||||
|                                 soft_time_limit=PAGE_TIMEOUT, |  | ||||||
|                             ) |  | ||||||
|                             for page in users_paginator.page_range |  | ||||||
|                         ] |  | ||||||
|                     ) |  | ||||||
|                     .apply_async() |  | ||||||
|                     .get() |  | ||||||
|                 ) |  | ||||||
|                 for result in user_results: |  | ||||||
|                     for msg in result: |  | ||||||
|                         messages.append(LogEvent(**msg)) |                         messages.append(LogEvent(**msg)) | ||||||
|                 messages.append(_("Syncing groups")) |                 for page in groups_paginator.page_range: | ||||||
|                 group_results = ( |                     messages.append(_("Syncing page {page} of groups".format(page=page))) | ||||||
|                     group( |                     for msg in sync_objects.apply_async( | ||||||
|                         [ |                         args=(class_to_path(Group), page, provider_pk), | ||||||
|                             sync_objects.signature( |                         time_limit=PAGE_TIMEOUT, | ||||||
|                                 args=(class_to_path(Group), page, provider_pk), |                         soft_time_limit=PAGE_TIMEOUT, | ||||||
|                                 time_limit=PAGE_TIMEOUT, |                     ).get(): | ||||||
|                                 soft_time_limit=PAGE_TIMEOUT, |  | ||||||
|                             ) |  | ||||||
|                             for page in groups_paginator.page_range |  | ||||||
|                         ] |  | ||||||
|                     ) |  | ||||||
|                     .apply_async() |  | ||||||
|                     .get() |  | ||||||
|                 ) |  | ||||||
|                 for result in group_results: |  | ||||||
|                     for msg in result: |  | ||||||
|                         messages.append(LogEvent(**msg)) |                         messages.append(LogEvent(**msg)) | ||||||
|             except TransientSyncException as exc: |             except TransientSyncException as exc: | ||||||
|                 self.logger.warning("transient sync exception", exc=exc) |                 self.logger.warning("transient sync exception", exc=exc) | ||||||
| @ -130,7 +109,7 @@ class SyncTasks: | |||||||
|     def sync_objects( |     def sync_objects( | ||||||
|         self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter |         self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter | ||||||
|     ): |     ): | ||||||
|         _object_type: type[Model] = path_to_class(object_type) |         _object_type = path_to_class(object_type) | ||||||
|         self.logger = get_logger().bind( |         self.logger = get_logger().bind( | ||||||
|             provider_type=class_to_path(self._provider_model), |             provider_type=class_to_path(self._provider_model), | ||||||
|             provider_pk=provider_pk, |             provider_pk=provider_pk, | ||||||
| @ -153,19 +132,6 @@ class SyncTasks: | |||||||
|             self.logger.debug("starting discover") |             self.logger.debug("starting discover") | ||||||
|             client.discover() |             client.discover() | ||||||
|         self.logger.debug("starting sync for page", page=page) |         self.logger.debug("starting sync for page", page=page) | ||||||
|         messages.append( |  | ||||||
|             asdict( |  | ||||||
|                 LogEvent( |  | ||||||
|                     _( |  | ||||||
|                         "Syncing page {page} of {object_type}".format( |  | ||||||
|                             page=page, object_type=_object_type._meta.verbose_name_plural |  | ||||||
|                         ) |  | ||||||
|                     ), |  | ||||||
|                     log_level="info", |  | ||||||
|                     logger=f"{provider._meta.verbose_name}@{object_type}", |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         for obj in paginator.page(page).object_list: |         for obj in paginator.page(page).object_list: | ||||||
|             obj: Model |             obj: Model | ||||||
|             try: |             try: | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
|  |  | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
|  |  | ||||||
| from authentik.lib.sentry import SentryIgnoredException, should_ignore_exception | from authentik.lib.sentry import SentryIgnoredException, before_send | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestSentry(TestCase): | class TestSentry(TestCase): | ||||||
| @ -10,8 +10,8 @@ class TestSentry(TestCase): | |||||||
|  |  | ||||||
|     def test_error_not_sent(self): |     def test_error_not_sent(self): | ||||||
|         """Test SentryIgnoredError not sent""" |         """Test SentryIgnoredError not sent""" | ||||||
|         self.assertTrue(should_ignore_exception(SentryIgnoredException())) |         self.assertIsNone(before_send({}, {"exc_info": (0, SentryIgnoredException(), 0)})) | ||||||
|  |  | ||||||
|     def test_error_sent(self): |     def test_error_sent(self): | ||||||
|         """Test error sent""" |         """Test error sent""" | ||||||
|         self.assertFalse(should_ignore_exception(ValueError())) |         self.assertEqual({}, before_send({}, {"exc_info": (0, ValueError(), 0)})) | ||||||
|  | |||||||
| @ -37,9 +37,6 @@ class WebsocketMessageInstruction(IntEnum): | |||||||
|     # Provider specific message |     # Provider specific message | ||||||
|     PROVIDER_SPECIFIC = 3 |     PROVIDER_SPECIFIC = 3 | ||||||
|  |  | ||||||
|     # Session ended |  | ||||||
|     SESSION_END = 4 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) | @dataclass(slots=True) | ||||||
| class WebsocketMessage: | class WebsocketMessage: | ||||||
| @ -148,14 +145,6 @@ class OutpostConsumer(JsonWebsocketConsumer): | |||||||
|             asdict(WebsocketMessage(instruction=WebsocketMessageInstruction.TRIGGER_UPDATE)) |             asdict(WebsocketMessage(instruction=WebsocketMessageInstruction.TRIGGER_UPDATE)) | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def event_session_end(self, event): |  | ||||||
|         """Event handler which is called when a session is ended""" |  | ||||||
|         self.send_json( |  | ||||||
|             asdict( |  | ||||||
|                 WebsocketMessage(instruction=WebsocketMessageInstruction.SESSION_END, args=event) |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def event_provider_specific(self, event): |     def event_provider_specific(self, event): | ||||||
|         """Event handler which can be called by provider-specific |         """Event handler which can be called by provider-specific | ||||||
|         implementations to send specific messages to the outpost""" |         implementations to send specific messages to the outpost""" | ||||||
|  | |||||||
| @ -7,16 +7,11 @@ from django.dispatch import receiver | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.core.models import AuthenticatedSession, Provider | from authentik.core.models import Provider | ||||||
| from authentik.crypto.models import CertificateKeyPair | from authentik.crypto.models import CertificateKeyPair | ||||||
| from authentik.lib.utils.reflection import class_to_path | from authentik.lib.utils.reflection import class_to_path | ||||||
| from authentik.outposts.models import Outpost, OutpostServiceConnection | from authentik.outposts.models import Outpost, OutpostServiceConnection | ||||||
| from authentik.outposts.tasks import ( | from authentik.outposts.tasks import CACHE_KEY_OUTPOST_DOWN, outpost_controller, outpost_post_save | ||||||
|     CACHE_KEY_OUTPOST_DOWN, |  | ||||||
|     outpost_controller, |  | ||||||
|     outpost_post_save, |  | ||||||
|     outpost_session_end, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| UPDATE_TRIGGERING_MODELS = ( | UPDATE_TRIGGERING_MODELS = ( | ||||||
| @ -78,9 +73,3 @@ def pre_delete_cleanup(sender, instance: Outpost, **_): | |||||||
|     instance.user.delete() |     instance.user.delete() | ||||||
|     cache.set(CACHE_KEY_OUTPOST_DOWN % instance.pk.hex, instance) |     cache.set(CACHE_KEY_OUTPOST_DOWN % instance.pk.hex, instance) | ||||||
|     outpost_controller.delay(instance.pk.hex, action="down", from_cache=True) |     outpost_controller.delay(instance.pk.hex, action="down", from_cache=True) | ||||||
|  |  | ||||||
|  |  | ||||||
| @receiver(pre_delete, sender=AuthenticatedSession) |  | ||||||
| def logout_revoke(sender: type[AuthenticatedSession], instance: AuthenticatedSession, **_): |  | ||||||
|     """Catch logout by expiring sessions being deleted""" |  | ||||||
|     outpost_session_end.delay(instance.session.session_key) |  | ||||||
|  | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	