Compare commits
	
		
			1 Commits
		
	
	
		
			celery-2-d
			...
			permission
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| ff787a0f59 | 
| @ -1,5 +1,5 @@ | ||||
| [bumpversion] | ||||
| current_version = 2025.6.3 | ||||
| current_version = 2025.2.1 | ||||
| tag = True | ||||
| commit = True | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||
| @ -17,12 +17,8 @@ optional_value = final | ||||
|  | ||||
| [bumpversion:file:pyproject.toml] | ||||
|  | ||||
| [bumpversion:file:uv.lock] | ||||
|  | ||||
| [bumpversion:file:package.json] | ||||
|  | ||||
| [bumpversion:file:package-lock.json] | ||||
|  | ||||
| [bumpversion:file:docker-compose.yml] | ||||
|  | ||||
| [bumpversion:file:schema.yml] | ||||
| @ -33,4 +29,6 @@ optional_value = final | ||||
|  | ||||
| [bumpversion:file:internal/constants/constants.go] | ||||
|  | ||||
| [bumpversion:file:web/src/common/constants.ts] | ||||
|  | ||||
| [bumpversion:file:lifecycle/aws/template.yaml] | ||||
|  | ||||
| @ -5,10 +5,8 @@ dist/** | ||||
| build/** | ||||
| build_docs/** | ||||
| *Dockerfile | ||||
| **/*Dockerfile | ||||
| blueprints/local | ||||
| .git | ||||
| !gen-ts-api/node_modules | ||||
| !gen-ts-api/dist/** | ||||
| !gen-go-api/ | ||||
| .venv | ||||
|  | ||||
| @ -7,9 +7,6 @@ charset = utf-8 | ||||
| trim_trailing_whitespace = true | ||||
| insert_final_newline = true | ||||
|  | ||||
| [*.toml] | ||||
| indent_size = 2 | ||||
|  | ||||
| [*.html] | ||||
| indent_size = 2 | ||||
|  | ||||
|  | ||||
							
								
								
									
										22
									
								
								.github/ISSUE_TEMPLATE/docs_issue.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								.github/ISSUE_TEMPLATE/docs_issue.md
									
									
									
									
										vendored
									
									
								
							| @ -1,22 +0,0 @@ | ||||
| --- | ||||
| name: Documentation issue | ||||
| about: Suggest an improvement or report a problem | ||||
| title: "" | ||||
| labels: documentation | ||||
| assignees: "" | ||||
| --- | ||||
|  | ||||
| **Do you see an area that can be clarified or expanded, a technical inaccuracy, or a broken link? Please describe.** | ||||
| A clear and concise description of what the problem is, or where the document can be improved. Ex. I believe we need more details about [...] | ||||
|  | ||||
| **Provide the URL or link to the exact page in the documentation to which you are referring.** | ||||
| If there are multiple pages, list them all, and be sure to state the header or section where the content is. | ||||
|  | ||||
| **Describe the solution you'd like** | ||||
| A clear and concise description of what you want to happen. | ||||
|  | ||||
| **Additional context** | ||||
| Add any other context or screenshots about the documentation issue here. | ||||
|  | ||||
| **Consider opening a PR!** | ||||
| If the issue is one that you can fix, or even make a good pass at, we'd appreciate a PR. For more information about making a contribution to the docs, and using our Style Guide and our templates, refer to ["Writing documentation"](https://docs.goauthentik.io/docs/developer-docs/docs/writing-documentation). | ||||
| @ -44,6 +44,7 @@ if is_release: | ||||
|         ] | ||||
|         if not prerelease: | ||||
|             image_tags += [ | ||||
|                 f"{name}:latest", | ||||
|                 f"{name}:{version_family}", | ||||
|             ] | ||||
| else: | ||||
|  | ||||
							
								
								
									
										18
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										18
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -9,22 +9,17 @@ inputs: | ||||
| runs: | ||||
|   using: "composite" | ||||
|   steps: | ||||
|     - name: Install apt deps | ||||
|     - name: Install poetry & deps | ||||
|       shell: bash | ||||
|       run: | | ||||
|         pipx install poetry || true | ||||
|         sudo apt-get update | ||||
|         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server | ||||
|     - name: Install uv | ||||
|       uses: astral-sh/setup-uv@v5 | ||||
|       with: | ||||
|         enable-cache: true | ||||
|     - name: Setup python | ||||
|     - name: Setup python and restore poetry | ||||
|       uses: actions/setup-python@v5 | ||||
|       with: | ||||
|         python-version-file: "pyproject.toml" | ||||
|     - name: Install Python deps | ||||
|       shell: bash | ||||
|       run: uv sync --all-extras --dev --frozen | ||||
|         cache: "poetry" | ||||
|     - name: Setup node | ||||
|       uses: actions/setup-node@v4 | ||||
|       with: | ||||
| @ -36,7 +31,7 @@ runs: | ||||
|       with: | ||||
|         go-version-file: "go.mod" | ||||
|     - name: Setup docker cache | ||||
|       uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 | ||||
|       uses: ScribeMD/docker-cache@0.5.0 | ||||
|       with: | ||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} | ||||
|     - name: Setup dependencies | ||||
| @ -44,9 +39,10 @@ runs: | ||||
|       run: | | ||||
|         export PSQL_TAG=${{ inputs.postgresql_version }} | ||||
|         docker compose -f .github/actions/setup/docker-compose.yml up -d | ||||
|         poetry sync | ||||
|         cd web && npm ci | ||||
|     - name: Generate config | ||||
|       shell: uv run python {0} | ||||
|       shell: poetry run python {0} | ||||
|       run: | | ||||
|         from authentik.lib.generators import generate_id | ||||
|         from yaml import safe_dump | ||||
|  | ||||
							
								
								
									
										35
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										35
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -23,13 +23,7 @@ updates: | ||||
|   - package-ecosystem: npm | ||||
|     directories: | ||||
|       - "/web" | ||||
|       - "/web/packages/sfe" | ||||
|       - "/web/packages/core" | ||||
|       - "/web/packages/esbuild-plugin-live-reload" | ||||
|       - "/packages/prettier-config" | ||||
|       - "/packages/tsconfig" | ||||
|       - "/packages/docusaurus-config" | ||||
|       - "/packages/eslint-config" | ||||
|       - "/web/sfe" | ||||
|     schedule: | ||||
|       interval: daily | ||||
|       time: "04:00" | ||||
| @ -74,9 +68,6 @@ updates: | ||||
|       wdio: | ||||
|         patterns: | ||||
|           - "@wdio/*" | ||||
|       goauthentik: | ||||
|         patterns: | ||||
|           - "@goauthentik/*" | ||||
|   - package-ecosystem: npm | ||||
|     directory: "/website" | ||||
|     schedule: | ||||
| @ -97,16 +88,6 @@ updates: | ||||
|           - "swc-*" | ||||
|           - "lightningcss*" | ||||
|           - "@rspack/binding*" | ||||
|       goauthentik: | ||||
|         patterns: | ||||
|           - "@goauthentik/*" | ||||
|       eslint: | ||||
|         patterns: | ||||
|           - "@eslint/*" | ||||
|           - "@typescript-eslint/*" | ||||
|           - "eslint-*" | ||||
|           - "eslint" | ||||
|           - "typescript-eslint" | ||||
|   - package-ecosystem: npm | ||||
|     directory: "/lifecycle/aws" | ||||
|     schedule: | ||||
| @ -117,7 +98,7 @@ updates: | ||||
|       prefix: "lifecycle/aws:" | ||||
|     labels: | ||||
|       - dependencies | ||||
|   - package-ecosystem: uv | ||||
|   - package-ecosystem: pip | ||||
|     directory: "/" | ||||
|     schedule: | ||||
|       interval: daily | ||||
| @ -137,15 +118,3 @@ updates: | ||||
|       prefix: "core:" | ||||
|     labels: | ||||
|       - dependencies | ||||
|   - package-ecosystem: docker-compose | ||||
|     directories: | ||||
|       # - /scripts # Maybe | ||||
|       - /tests/e2e | ||||
|     schedule: | ||||
|       interval: daily | ||||
|       time: "04:00" | ||||
|     open-pull-requests-limit: 10 | ||||
|     commit-message: | ||||
|       prefix: "core:" | ||||
|     labels: | ||||
|       - dependencies | ||||
|  | ||||
| @ -38,8 +38,6 @@ jobs: | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|       # Needed for checkout | ||||
|       contents: read | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: docker/setup-qemu-action@v3.6.0 | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -30,6 +30,7 @@ jobs: | ||||
|         uses: actions/setup-python@v5 | ||||
|         with: | ||||
|           python-version-file: "pyproject.toml" | ||||
|           cache: "poetry" | ||||
|       - name: Generate API Client | ||||
|         run: make gen-client-py | ||||
|       - name: Publish package | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | ||||
|           signoff: true | ||||
|           # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|           labels: dependencies | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							| @ -33,7 +33,7 @@ jobs: | ||||
|           npm ci | ||||
|       - name: Check changes have been applied | ||||
|         run: | | ||||
|           uv run make aws-cfn | ||||
|           poetry run make aws-cfn | ||||
|           git diff --exit-code | ||||
|   ci-aws-cfn-mark: | ||||
|     if: always() | ||||
|  | ||||
							
								
								
									
										3
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							| @ -9,15 +9,14 @@ on: | ||||
|  | ||||
| jobs: | ||||
|   test-container: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         version: | ||||
|           - docs | ||||
|           - version-2025-4 | ||||
|           - version-2025-2 | ||||
|           - version-2024-12 | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - run: | | ||||
|  | ||||
							
								
								
									
										33
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										33
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -34,7 +34,7 @@ jobs: | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: run job | ||||
|         run: uv run make ci-${{ matrix.job }} | ||||
|         run: poetry run make ci-${{ matrix.job }} | ||||
|   test-migrations: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
| @ -42,7 +42,7 @@ jobs: | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: run migrations | ||||
|         run: uv run python -m lifecycle.migrate | ||||
|         run: poetry run python -m lifecycle.migrate | ||||
|   test-make-seed: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
| @ -62,7 +62,6 @@ jobs: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|           - 17-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -70,6 +69,8 @@ jobs: | ||||
|           fetch-depth: 0 | ||||
|       - name: checkout stable | ||||
|         run: | | ||||
|           # Delete all poetry envs | ||||
|           rm -rf /home/runner/.cache/pypoetry | ||||
|           # Copy current, latest config to local | ||||
|           cp authentik/lib/default.yml local.env.yml | ||||
|           cp -R .github .. | ||||
| @ -82,7 +83,7 @@ jobs: | ||||
|         with: | ||||
|           postgresql_version: ${{ matrix.psql }} | ||||
|       - name: run migrations to stable | ||||
|         run: uv run python -m lifecycle.migrate | ||||
|         run: poetry run python -m lifecycle.migrate | ||||
|       - name: checkout current code | ||||
|         run: | | ||||
|           set -x | ||||
| @ -90,13 +91,15 @@ jobs: | ||||
|           git reset --hard HEAD | ||||
|           git clean -d -fx . | ||||
|           git checkout $GITHUB_SHA | ||||
|           # Delete previous poetry env | ||||
|           rm -rf /home/runner/.cache/pypoetry/virtualenvs/* | ||||
|       - name: Setup authentik env (ensure latest deps are installed) | ||||
|         uses: ./.github/actions/setup | ||||
|         with: | ||||
|           postgresql_version: ${{ matrix.psql }} | ||||
|       - name: migrate to latest | ||||
|         run: | | ||||
|           uv run python -m lifecycle.migrate | ||||
|           poetry run python -m lifecycle.migrate | ||||
|       - name: run tests | ||||
|         env: | ||||
|           # Test in the main database that we just migrated from the previous stable version | ||||
| @ -105,7 +108,7 @@ jobs: | ||||
|           CI_RUN_ID: ${{ matrix.run_id }} | ||||
|           CI_TOTAL_RUNS: "5" | ||||
|         run: | | ||||
|           uv run make ci-test | ||||
|           poetry run make ci-test | ||||
|   test-unittest: | ||||
|     name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||
|     runs-on: ubuntu-latest | ||||
| @ -117,7 +120,6 @@ jobs: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|           - 17-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -131,7 +133,7 @@ jobs: | ||||
|           CI_RUN_ID: ${{ matrix.run_id }} | ||||
|           CI_TOTAL_RUNS: "5" | ||||
|         run: | | ||||
|           uv run make ci-test | ||||
|           poetry run make ci-test | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v5 | ||||
|         with: | ||||
| @ -154,8 +156,8 @@ jobs: | ||||
|         uses: helm/kind-action@v1.12.0 | ||||
|       - name: run integration | ||||
|         run: | | ||||
|           uv run coverage run manage.py test tests/integration | ||||
|           uv run coverage xml | ||||
|           poetry run coverage run manage.py test tests/integration | ||||
|           poetry run coverage xml | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v5 | ||||
|         with: | ||||
| @ -202,7 +204,7 @@ jobs: | ||||
|         uses: actions/cache@v4 | ||||
|         with: | ||||
|           path: web/dist | ||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b | ||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }} | ||||
|       - name: prepare web ui | ||||
|         if: steps.cache-web.outputs.cache-hit != 'true' | ||||
|         working-directory: web | ||||
| @ -210,11 +212,10 @@ jobs: | ||||
|           npm ci | ||||
|           make -C .. gen-client-ts | ||||
|           npm run build | ||||
|           npm run build:sfe | ||||
|       - name: run e2e | ||||
|         run: | | ||||
|           uv run coverage run manage.py test ${{ matrix.job.glob }} | ||||
|           uv run coverage xml | ||||
|           poetry run coverage run manage.py test ${{ matrix.job.glob }} | ||||
|           poetry run coverage xml | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v5 | ||||
|         with: | ||||
| @ -247,13 +248,11 @@ jobs: | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|       # Needed for checkout | ||||
|       contents: read | ||||
|     needs: ci-core-mark | ||||
|     uses: ./.github/workflows/_reusable-docker-build.yaml | ||||
|     secrets: inherit | ||||
|     with: | ||||
|       image_name: ${{ github.repository == 'goauthentik/authentik-internal' && 'ghcr.io/goauthentik/internal-server' || 'ghcr.io/goauthentik/dev-server' }} | ||||
|       image_name: ghcr.io/goauthentik/dev-server | ||||
|       release: false | ||||
|   pr-comment: | ||||
|     needs: | ||||
|  | ||||
							
								
								
									
										3
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -29,7 +29,7 @@ jobs: | ||||
|       - name: Generate API | ||||
|         run: make gen-client-go | ||||
|       - name: golangci-lint | ||||
|         uses: golangci/golangci-lint-action@v8 | ||||
|         uses: golangci/golangci-lint-action@v6 | ||||
|         with: | ||||
|           version: latest | ||||
|           args: --timeout 5000s --verbose | ||||
| @ -59,7 +59,6 @@ jobs: | ||||
|         with: | ||||
|           jobs: ${{ toJSON(needs) }} | ||||
|   build-container: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     timeout-minutes: 120 | ||||
|     needs: | ||||
|       - ci-outpost-mark | ||||
|  | ||||
							
								
								
									
										52
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										52
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							| @ -49,7 +49,6 @@ jobs: | ||||
|       matrix: | ||||
|         job: | ||||
|           - build | ||||
|           - build:integrations | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
| @ -62,65 +61,14 @@ jobs: | ||||
|       - name: build | ||||
|         working-directory: website/ | ||||
|         run: npm run ${{ matrix.job }} | ||||
|   build-container: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.6.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/dev-docs | ||||
|       - name: Login to Container Registry | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Build Docker Image | ||||
|         id: push | ||||
|         uses: docker/build-push-action@v6 | ||||
|         with: | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           file: website/Dockerfile | ||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache | ||||
|           cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }} | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|   ci-website-mark: | ||||
|     if: always() | ||||
|     needs: | ||||
|       - lint | ||||
|       - test | ||||
|       - build | ||||
|       - build-container | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: re-actors/alls-green@release/v1 | ||||
|         with: | ||||
|           jobs: ${{ toJSON(needs) }} | ||||
|           allowed-skips: ${{ github.repository == 'goauthentik/authentik-internal' && 'build-container' || '[]' }} | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @ -2,7 +2,7 @@ name: "CodeQL" | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: [main, next, version*] | ||||
|     branches: [main, "*", next, version*] | ||||
|   pull_request: | ||||
|     branches: [main] | ||||
|   schedule: | ||||
|  | ||||
| @ -2,7 +2,7 @@ name: authentik-gen-update-webauthn-mds | ||||
| on: | ||||
|   workflow_dispatch: | ||||
|   schedule: | ||||
|     - cron: "30 1 1,15 * *" | ||||
|     - cron: '30 1 1,15 * *' | ||||
|  | ||||
| env: | ||||
|   POSTGRES_DB: authentik | ||||
| @ -24,7 +24,7 @@ jobs: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - run: uv run ak update_webauthn_mds | ||||
|       - run: poetry run ak update_webauthn_mds | ||||
|       - uses: peter-evans/create-pull-request@v7 | ||||
|         id: cpr | ||||
|         with: | ||||
| @ -37,7 +37,6 @@ jobs: | ||||
|           signoff: true | ||||
|           # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|           labels: dependencies | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | ||||
|           body: ${{ steps.compress.outputs.markdown }} | ||||
|           delete-branch: true | ||||
|           signoff: true | ||||
|           labels: dependencies | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" | ||||
|         with: | ||||
|  | ||||
							
								
								
									
										47
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										47
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,47 +0,0 @@ | ||||
| name: authentik-packages-npm-publish | ||||
| on: | ||||
|   push: | ||||
|     branches: [main] | ||||
|     paths: | ||||
|       - packages/docusaurus-config/** | ||||
|       - packages/eslint-config/** | ||||
|       - packages/prettier-config/** | ||||
|       - packages/tsconfig/** | ||||
|       - web/packages/esbuild-plugin-live-reload/** | ||||
|   workflow_dispatch: | ||||
| jobs: | ||||
|   publish: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         package: | ||||
|           - packages/docusaurus-config | ||||
|           - packages/eslint-config | ||||
|           - packages/prettier-config | ||||
|           - packages/tsconfig | ||||
|           - web/packages/esbuild-plugin-live-reload | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           fetch-depth: 2 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: ${{ matrix.package }}/package.json | ||||
|           registry-url: "https://registry.npmjs.org" | ||||
|       - name: Get changed files | ||||
|         id: changed-files | ||||
|         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c | ||||
|         with: | ||||
|           files: | | ||||
|             ${{ matrix.package }}/package.json | ||||
|       - name: Publish package | ||||
|         if: steps.changed-files.outputs.any_changed == 'true' | ||||
|         working-directory: ${{ matrix.package }} | ||||
|         run: | | ||||
|           npm ci | ||||
|           npm run build | ||||
|           npm publish | ||||
|         env: | ||||
|           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} | ||||
							
								
								
									
										4
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							| @ -21,8 +21,8 @@ jobs: | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: generate docs | ||||
|         run: | | ||||
|           uv run make migrate | ||||
|           uv run ak build_source_docs | ||||
|           poetry run make migrate | ||||
|           poetry run ak build_source_docs | ||||
|       - name: Publish | ||||
|         uses: netlify/actions/cli@master | ||||
|         with: | ||||
|  | ||||
							
								
								
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -20,49 +20,6 @@ jobs: | ||||
|       release: true | ||||
|       registry_dockerhub: true | ||||
|       registry_ghcr: true | ||||
|   build-docs: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.6.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/docs | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Build Docker Image | ||||
|         id: push | ||||
|         uses: docker/build-push-action@v6 | ||||
|         with: | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           file: website/Dockerfile | ||||
|           push: true | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         if: true | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|   build-outpost: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
| @ -236,6 +193,6 @@ jobs: | ||||
|           SENTRY_ORG: authentik-security-inc | ||||
|           SENTRY_PROJECT: authentik | ||||
|         with: | ||||
|           release: authentik@${{ steps.ev.outputs.version }} | ||||
|           version: authentik@${{ steps.ev.outputs.version }} | ||||
|           sourcemaps: "./web/dist" | ||||
|           url_prefix: "~/static/dist" | ||||
|  | ||||
							
								
								
									
										21
									
								
								.github/workflows/repo-mirror-cleanup.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										21
									
								
								.github/workflows/repo-mirror-cleanup.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,21 +0,0 @@ | ||||
| name: "authentik-repo-mirror-cleanup" | ||||
|  | ||||
| on: | ||||
|   workflow_dispatch: | ||||
|  | ||||
| jobs: | ||||
|   to_internal: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           fetch-depth: 0 | ||||
|       - if: ${{ env.MIRROR_KEY != '' }} | ||||
|         uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb | ||||
|         with: | ||||
|           target_repo_url: git@github.com:goauthentik/authentik-internal.git | ||||
|           ssh_private_key: ${{ secrets.GH_MIRROR_KEY }} | ||||
|           args: --tags --force --prune | ||||
|         env: | ||||
|           MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }} | ||||
							
								
								
									
										9
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,10 +11,11 @@ jobs: | ||||
|         with: | ||||
|           fetch-depth: 0 | ||||
|       - if: ${{ env.MIRROR_KEY != '' }} | ||||
|         uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb | ||||
|         uses: pixta-dev/repository-mirroring-action@v1 | ||||
|         with: | ||||
|           target_repo_url: git@github.com:goauthentik/authentik-internal.git | ||||
|           ssh_private_key: ${{ secrets.GH_MIRROR_KEY }} | ||||
|           args: --tags --force | ||||
|           target_repo_url: | ||||
|             git@github.com:goauthentik/authentik-internal.git | ||||
|           ssh_private_key: | ||||
|             ${{ secrets.GH_MIRROR_KEY }} | ||||
|         env: | ||||
|           MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }} | ||||
|  | ||||
							
								
								
									
										27
									
								
								.github/workflows/semgrep.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								.github/workflows/semgrep.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,27 +0,0 @@ | ||||
| name: authentik-semgrep | ||||
| on: | ||||
|   workflow_dispatch: {} | ||||
|   pull_request: {} | ||||
|   push: | ||||
|     branches: | ||||
|       - main | ||||
|       - master | ||||
|     paths: | ||||
|       - .github/workflows/semgrep.yml | ||||
|   schedule: | ||||
|     # random HH:MM to avoid a load spike on GitHub Actions at 00:00 | ||||
|     - cron: '12 15 * * *' | ||||
| jobs: | ||||
|   semgrep: | ||||
|     name: semgrep/ci | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       contents: read | ||||
|     env: | ||||
|       SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} | ||||
|     container: | ||||
|       image: semgrep/semgrep | ||||
|     if: (github.actor != 'dependabot[bot]') | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - run: semgrep ci | ||||
| @ -16,7 +16,6 @@ env: | ||||
|  | ||||
| jobs: | ||||
|   compile: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - id: generate_token | ||||
| @ -37,10 +36,10 @@ jobs: | ||||
|         run: make gen-client-ts | ||||
|       - name: run extract | ||||
|         run: | | ||||
|           uv run make i18n-extract | ||||
|           poetry run make i18n-extract | ||||
|       - name: run compile | ||||
|         run: | | ||||
|           uv run ak compilemessages | ||||
|           poetry run ak compilemessages | ||||
|           make web-check-compile | ||||
|       - name: Create Pull Request | ||||
|         if: ${{ github.event_name != 'pull_request' }} | ||||
| @ -53,6 +52,3 @@ jobs: | ||||
|           body: "core, web: update translations" | ||||
|           delete-branch: true | ||||
|           signoff: true | ||||
|           labels: dependencies | ||||
|           # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|  | ||||
							
								
								
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							| @ -15,7 +15,6 @@ jobs: | ||||
|     runs-on: ubuntu-latest | ||||
|     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         with: | ||||
| @ -26,13 +25,23 @@ jobs: | ||||
|         env: | ||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||
|         run: | | ||||
|           title=$(gh pr view ${{ github.event.pull_request.number }} --json  "title" -q ".title") | ||||
|           title=$(curl -q -L \ | ||||
|             -H "Accept: application/vnd.github+json" \ | ||||
|             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||
|             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||
|             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title) | ||||
|           echo "title=${title}" >> "$GITHUB_OUTPUT" | ||||
|       - name: Rename | ||||
|         env: | ||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||
|         run: | | ||||
|           gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies | ||||
|           curl -L \ | ||||
|             -X PATCH \ | ||||
|             -H "Accept: application/vnd.github+json" \ | ||||
|             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||
|             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||
|             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \ | ||||
|             -d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}" | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|  | ||||
							
								
								
									
										10
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -11,10 +11,6 @@ local_settings.py | ||||
| db.sqlite3 | ||||
| media | ||||
|  | ||||
| # Node | ||||
|  | ||||
| node_modules | ||||
|  | ||||
| # If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ | ||||
| # in your Git repository. Update and uncomment the following line accordingly. | ||||
| # <django-project-name>/staticfiles/ | ||||
| @ -37,7 +33,6 @@ eggs/ | ||||
| lib64/ | ||||
| parts/ | ||||
| dist/ | ||||
| out/ | ||||
| sdist/ | ||||
| var/ | ||||
| wheels/ | ||||
| @ -100,6 +95,9 @@ ipython_config.py | ||||
| # pyenv | ||||
| .python-version | ||||
|  | ||||
| # celery beat schedule file | ||||
| celerybeat-schedule | ||||
|  | ||||
| # SageMath parsed files | ||||
| *.sage.py | ||||
|  | ||||
| @ -163,6 +161,8 @@ dmypy.json | ||||
|  | ||||
| # pyenv | ||||
|  | ||||
| # celery beat schedule file | ||||
|  | ||||
| # SageMath parsed files | ||||
|  | ||||
| # Environments | ||||
|  | ||||
| @ -1,47 +0,0 @@ | ||||
| # Prettier Ignorefile | ||||
|  | ||||
| ## Static Files | ||||
| **/LICENSE | ||||
|  | ||||
| authentik/stages/**/* | ||||
|  | ||||
| ## Build asset directories | ||||
| coverage | ||||
| dist | ||||
| out | ||||
| .docusaurus | ||||
| website/docs/developer-docs/api/**/* | ||||
|  | ||||
| ## Environment | ||||
| *.env | ||||
|  | ||||
| ## Secrets | ||||
| *.secrets | ||||
|  | ||||
| ## Yarn | ||||
| .yarn/**/* | ||||
|  | ||||
| ## Node | ||||
| node_modules | ||||
| coverage | ||||
|  | ||||
| ## Configs | ||||
| *.log | ||||
| *.yaml | ||||
| *.yml | ||||
|  | ||||
| # Templates | ||||
| # TODO: Rename affected files to *.template.* or similar. | ||||
| *.html | ||||
| *.mdx | ||||
| *.md | ||||
|  | ||||
| ## Import order matters | ||||
| poly.ts | ||||
| src/locale-codes.ts | ||||
| src/locales/ | ||||
|  | ||||
| # Storybook | ||||
| storybook-static/ | ||||
| .storybook/css-import-maps* | ||||
|  | ||||
							
								
								
									
										10
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -6,19 +6,17 @@ | ||||
|         "!Context scalar", | ||||
|         "!Enumerate sequence", | ||||
|         "!Env scalar", | ||||
|         "!Env sequence", | ||||
|         "!Find sequence", | ||||
|         "!Format sequence", | ||||
|         "!If sequence", | ||||
|         "!Index scalar", | ||||
|         "!KeyOf scalar", | ||||
|         "!Value scalar", | ||||
|         "!AtIndex scalar", | ||||
|         "!ParseJSON scalar" | ||||
|         "!AtIndex scalar" | ||||
|     ], | ||||
|     "typescript.preferences.importModuleSpecifier": "non-relative", | ||||
|     "typescript.preferences.importModuleSpecifierEnding": "index", | ||||
|     "typescript.tsdk": "./node_modules/typescript/lib", | ||||
|     "typescript.tsdk": "./web/node_modules/typescript/lib", | ||||
|     "typescript.enablePromptUseWorkspaceTsdk": true, | ||||
|     "yaml.schemas": { | ||||
|         "./blueprints/schema.json": "blueprints/**/*.yaml" | ||||
| @ -32,5 +30,7 @@ | ||||
|         } | ||||
|     ], | ||||
|     "go.testFlags": ["-count=1"], | ||||
|     "github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"] | ||||
|     "github-actions.workflows.pinned.workflows": [ | ||||
|         ".github/workflows/ci-main.yml" | ||||
|     ] | ||||
| } | ||||
|  | ||||
							
								
								
									
										46
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										46
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @ -3,13 +3,8 @@ | ||||
|     "tasks": [ | ||||
|         { | ||||
|             "label": "authentik/core: make", | ||||
|             "command": "uv", | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make", | ||||
|                 "lint-fix", | ||||
|                 "lint" | ||||
|             ], | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "make", "lint-fix", "lint"], | ||||
|             "presentation": { | ||||
|                 "panel": "new" | ||||
|             }, | ||||
| @ -17,12 +12,8 @@ | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/core: run", | ||||
|             "command": "uv", | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "ak", | ||||
|                 "server" | ||||
|             ], | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "ak", "server"], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
| @ -32,17 +23,13 @@ | ||||
|         { | ||||
|             "label": "authentik/web: make", | ||||
|             "command": "make", | ||||
|             "args": [ | ||||
|                 "web" | ||||
|             ], | ||||
|             "args": ["web"], | ||||
|             "group": "build" | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/web: watch", | ||||
|             "command": "make", | ||||
|             "args": [ | ||||
|                 "web-watch" | ||||
|             ], | ||||
|             "args": ["web-watch"], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
| @ -52,26 +39,19 @@ | ||||
|         { | ||||
|             "label": "authentik: install", | ||||
|             "command": "make", | ||||
|             "args": [ | ||||
|                 "install", | ||||
|                 "-j4" | ||||
|             ], | ||||
|             "args": ["install", "-j4"], | ||||
|             "group": "build" | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/website: make", | ||||
|             "command": "make", | ||||
|             "args": [ | ||||
|                 "website" | ||||
|             ], | ||||
|             "args": ["website"], | ||||
|             "group": "build" | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/website: watch", | ||||
|             "command": "make", | ||||
|             "args": [ | ||||
|                 "website-watch" | ||||
|             ], | ||||
|             "args": ["website-watch"], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
| @ -80,12 +60,8 @@ | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/api: generate", | ||||
|             "command": "uv", | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make", | ||||
|                 "gen" | ||||
|             ], | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "make", "gen"], | ||||
|             "group": "build" | ||||
|         } | ||||
|     ] | ||||
|  | ||||
| @ -10,7 +10,7 @@ schemas/                        @goauthentik/backend | ||||
| scripts/                        @goauthentik/backend | ||||
| tests/                          @goauthentik/backend | ||||
| pyproject.toml                  @goauthentik/backend | ||||
| uv.lock                         @goauthentik/backend | ||||
| poetry.lock                     @goauthentik/backend | ||||
| go.mod                          @goauthentik/backend | ||||
| go.sum                          @goauthentik/backend | ||||
| # Infrastructure | ||||
| @ -23,8 +23,6 @@ docker-compose.yml              @goauthentik/infrastructure | ||||
| Makefile                        @goauthentik/infrastructure | ||||
| .editorconfig                   @goauthentik/infrastructure | ||||
| CODEOWNERS                      @goauthentik/infrastructure | ||||
| # Web packages | ||||
| packages/                       @goauthentik/frontend | ||||
| # Web | ||||
| web/                            @goauthentik/frontend | ||||
| tests/wdio/                     @goauthentik/frontend | ||||
|  | ||||
							
								
								
									
										136
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										136
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,27 @@ | ||||
| # syntax=docker/dockerfile:1 | ||||
|  | ||||
| # Stage 1: Build webui | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24-slim AS node-builder | ||||
| # Stage 1: Build website | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | ||||
|  | ||||
| ENV NODE_ENV=production \ | ||||
|     GIT_UNAVAILABLE=true | ||||
|  | ||||
| WORKDIR /work/website | ||||
|  | ||||
| RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \ | ||||
|     --mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \ | ||||
|     --mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \ | ||||
|     npm ci --include=dev | ||||
|  | ||||
| COPY ./website /work/website/ | ||||
| COPY ./blueprints /work/blueprints/ | ||||
| COPY ./schema.yml /work/ | ||||
| COPY ./SECURITY.md /work/ | ||||
|  | ||||
| RUN npm run build-bundled | ||||
|  | ||||
| # Stage 2: Build webui | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | ||||
|  | ||||
| ARG GIT_BUILD_HASH | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
| @ -13,7 +33,7 @@ RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | ||||
|     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ | ||||
|     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ | ||||
|     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ | ||||
|     --mount=type=cache,id=npm-ak,sharing=shared,target=/root/.npm \ | ||||
|     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ | ||||
|     npm ci --include=dev | ||||
|  | ||||
| COPY ./package.json /work | ||||
| @ -21,11 +41,10 @@ COPY ./web /work/web/ | ||||
| COPY ./website /work/website/ | ||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||
|  | ||||
| RUN npm run build && \ | ||||
|     npm run build:sfe | ||||
| RUN npm run build | ||||
|  | ||||
| # Stage 2: Build go proxy | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | ||||
| # Stage 3: Build go proxy | ||||
| FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder | ||||
|  | ||||
| ARG TARGETOS | ||||
| ARG TARGETARCH | ||||
| @ -49,8 +68,8 @@ RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | ||||
| COPY ./cmd /go/src/goauthentik.io/cmd | ||||
| COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib | ||||
| COPY ./web/static.go /go/src/goauthentik.io/web/static.go | ||||
| COPY --from=node-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | ||||
| COPY --from=node-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt | ||||
| COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | ||||
| COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt | ||||
| COPY ./internal /go/src/goauthentik.io/internal | ||||
| COPY ./go.mod /go/src/goauthentik.io/go.mod | ||||
| COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||
| @ -58,76 +77,70 @@ COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||
| RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | ||||
|     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ | ||||
|     if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ | ||||
|     CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \ | ||||
|     CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \ | ||||
|     go build -o /go/authentik ./cmd/server | ||||
|  | ||||
| # Stage 3: MaxMind GeoIP | ||||
| # Stage 4: MaxMind GeoIP | ||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip | ||||
|  | ||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||
| ENV GEOIPUPDATE_VERBOSE="1" | ||||
| ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ||||
| ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY" | ||||
|  | ||||
| USER root | ||||
| RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||
|     --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ | ||||
|     mkdir -p /usr/share/GeoIP && \ | ||||
|     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|  | ||||
| # Stage 4: Download uv | ||||
| FROM ghcr.io/astral-sh/uv:0.7.17 AS uv | ||||
| # Stage 5: Base python image | ||||
| FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base | ||||
|  | ||||
| ENV VENV_PATH="/ak-root/.venv" \ | ||||
|     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ | ||||
|     UV_COMPILE_BYTECODE=1 \ | ||||
|     UV_LINK_MODE=copy \ | ||||
|     UV_NATIVE_TLS=1 \ | ||||
|     UV_PYTHON_DOWNLOADS=0 | ||||
|  | ||||
| WORKDIR /ak-root/ | ||||
|  | ||||
| COPY --from=uv /uv /uvx /bin/ | ||||
|  | ||||
| # Stage 6: Python dependencies | ||||
| FROM python-base AS python-deps | ||||
| # Stage 5: Python dependencies | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps | ||||
|  | ||||
| ARG TARGETARCH | ||||
| ARG TARGETVARIANT | ||||
|  | ||||
| RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache | ||||
| WORKDIR /ak-root/poetry | ||||
|  | ||||
| ENV PATH="/root/.cargo/bin:$PATH" | ||||
| ENV VENV_PATH="/ak-root/venv" \ | ||||
|     POETRY_VIRTUALENVS_CREATE=false \ | ||||
|     PATH="/ak-root/venv/bin:$PATH" | ||||
|  | ||||
| RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache | ||||
|  | ||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | ||||
|     apt-get update && \ | ||||
|     # Required for installing pip packages | ||||
|     apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev | ||||
|  | ||||
| RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||
|     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ | ||||
|     --mount=type=cache,target=/root/.cache/pip \ | ||||
|     --mount=type=cache,target=/root/.cache/pypoetry \ | ||||
|     pip install --no-cache cffi && \ | ||||
|     apt-get update && \ | ||||
|     apt-get install -y --no-install-recommends \ | ||||
|     # Build essentials | ||||
|     build-essential pkg-config libffi-dev git \ | ||||
|     # cryptography | ||||
|     curl \ | ||||
|     # libxml | ||||
|     libxslt-dev zlib1g-dev \ | ||||
|     # postgresql | ||||
|     libpq-dev \ | ||||
|     # python-kadmin-rs | ||||
|     clang libkrb5-dev sccache \ | ||||
|     # xmlsec | ||||
|     libltdl-dev && \ | ||||
|     curl https://sh.rustup.rs -sSf | sh -s -- -y | ||||
|         build-essential libffi-dev \ | ||||
|         # Required for cryptography | ||||
|         curl pkg-config \ | ||||
|         # Required for lxml | ||||
|         libxslt-dev zlib1g-dev \ | ||||
|         # Required for xmlsec | ||||
|         libltdl-dev \ | ||||
|         # Required for kadmin | ||||
|         sccache clang && \ | ||||
|     curl https://sh.rustup.rs -sSf | sh -s -- -y && \ | ||||
|     . "$HOME/.cargo/env" && \ | ||||
|     python -m venv /ak-root/venv/ && \ | ||||
|     bash -c "source ${VENV_PATH}/bin/activate && \ | ||||
|     pip3 install --upgrade pip poetry && \ | ||||
|     poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \ | ||||
|     poetry install --only=main --no-ansi --no-interaction --no-root && \ | ||||
|     pip uninstall cryptography -y && \ | ||||
|     poetry install --only=main --no-ansi --no-interaction --no-root" | ||||
|  | ||||
| ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec" | ||||
|  | ||||
| RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \ | ||||
|     --mount=type=bind,target=uv.lock,src=uv.lock \ | ||||
|     --mount=type=bind,target=packages,src=packages \ | ||||
|     --mount=type=cache,target=/root/.cache/uv \ | ||||
|     uv sync --frozen --no-install-project --no-dev | ||||
|  | ||||
| # Stage 7: Run | ||||
| FROM python-base AS final-image | ||||
| # Stage 6: Run | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image | ||||
|  | ||||
| ARG VERSION | ||||
| ARG GIT_BUILD_HASH | ||||
| @ -159,7 +172,7 @@ RUN apt-get update && \ | ||||
|  | ||||
| COPY ./authentik/ /authentik | ||||
| COPY ./pyproject.toml / | ||||
| COPY ./uv.lock / | ||||
| COPY ./poetry.lock / | ||||
| COPY ./schemas /schemas | ||||
| COPY ./locale /locale | ||||
| COPY ./tests /tests | ||||
| @ -168,10 +181,10 @@ COPY ./blueprints /blueprints | ||||
| COPY ./lifecycle/ /lifecycle | ||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | ||||
| COPY --from=go-builder /go/authentik /bin/authentik | ||||
| COPY ./packages/ /ak-root/packages | ||||
| COPY --from=python-deps /ak-root/.venv /ak-root/.venv | ||||
| COPY --from=node-builder /work/web/dist/ /web/dist/ | ||||
| COPY --from=node-builder /work/web/authentik/ /web/authentik/ | ||||
| COPY --from=python-deps /ak-root/venv /ak-root/venv | ||||
| COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||
| COPY --from=web-builder /work/web/authentik/ /web/authentik/ | ||||
| COPY --from=website-builder /work/website/build/ /website/help/ | ||||
| COPY --from=geoip /usr/share/GeoIP /geoip | ||||
|  | ||||
| USER 1000 | ||||
| @ -179,6 +192,9 @@ USER 1000 | ||||
| ENV TMPDIR=/dev/shm/ \ | ||||
|     PYTHONDONTWRITEBYTECODE=1 \ | ||||
|     PYTHONUNBUFFERED=1 \ | ||||
|     PATH="/ak-root/venv/bin:/lifecycle:$PATH" \ | ||||
|     VENV_PATH="/ak-root/venv" \ | ||||
|     POETRY_VIRTUALENVS_CREATE=false \ | ||||
|     GOFIPS=1 | ||||
|  | ||||
| HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | ||||
|  | ||||
							
								
								
									
										110
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										110
									
								
								Makefile
									
									
									
									
									
								
							| @ -1,21 +1,20 @@ | ||||
| .PHONY: gen dev-reset all clean test web website | ||||
|  | ||||
| SHELL := /usr/bin/env bash | ||||
| .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail | ||||
| .SHELLFLAGS += ${SHELLFLAGS} -e | ||||
| PWD = $(shell pwd) | ||||
| UID = $(shell id -u) | ||||
| GID = $(shell id -g) | ||||
| NPM_VERSION = $(shell python -m scripts.generate_semver) | ||||
| PY_SOURCES = authentik packages tests scripts lifecycle .github | ||||
| PY_SOURCES = authentik tests scripts lifecycle .github | ||||
| DOCKER_IMAGE ?= "authentik:test" | ||||
|  | ||||
| GEN_API_TS = gen-ts-api | ||||
| GEN_API_PY = gen-py-api | ||||
| GEN_API_GO = gen-go-api | ||||
| GEN_API_TS = "gen-ts-api" | ||||
| GEN_API_PY = "gen-py-api" | ||||
| GEN_API_GO = "gen-go-api" | ||||
|  | ||||
| pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||
| pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||
| pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null) | ||||
| pg_user := $(shell poetry run python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||
| pg_host := $(shell poetry run python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||
| pg_name := $(shell poetry run python -m authentik.lib.config postgresql.name 2>/dev/null) | ||||
|  | ||||
| all: lint-fix lint test gen web  ## Lint, build, and test everything | ||||
|  | ||||
| @ -33,37 +32,34 @@ go-test: | ||||
| 	go test -timeout 0 -v -race -cover ./... | ||||
|  | ||||
| test: ## Run the server tests and produce a coverage report (locally) | ||||
| 	uv run coverage run manage.py test --keepdb authentik | ||||
| 	uv run coverage html | ||||
| 	uv run coverage report | ||||
| 	poetry run coverage run manage.py test --keepdb authentik | ||||
| 	poetry run coverage html | ||||
| 	poetry run coverage report | ||||
|  | ||||
| lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | ||||
| 	uv run black $(PY_SOURCES) | ||||
| 	uv run ruff check --fix $(PY_SOURCES) | ||||
| 	poetry run black $(PY_SOURCES) | ||||
| 	poetry run ruff check --fix $(PY_SOURCES) | ||||
|  | ||||
| lint-codespell:  ## Reports spelling errors. | ||||
| 	uv run codespell -w | ||||
| 	poetry run codespell -w | ||||
|  | ||||
| lint: ## Lint the python and golang sources | ||||
| 	uv run bandit -c pyproject.toml -r $(PY_SOURCES) | ||||
| 	poetry run bandit -c pyproject.toml -r $(PY_SOURCES) | ||||
| 	golangci-lint run -v | ||||
|  | ||||
| core-install: | ||||
| 	uv sync --frozen | ||||
| 	poetry install | ||||
|  | ||||
| migrate: ## Run the Authentik Django server's migrations | ||||
| 	uv run python -m lifecycle.migrate | ||||
| 	poetry run python -m lifecycle.migrate | ||||
|  | ||||
| i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | ||||
|  | ||||
| aws-cfn: | ||||
| 	cd lifecycle/aws && npm run aws-cfn | ||||
|  | ||||
| run:  ## Run the main authentik server process | ||||
| 	uv run ak server | ||||
|  | ||||
| core-i18n-extract: | ||||
| 	uv run ak makemessages \ | ||||
| 	poetry run ak makemessages \ | ||||
| 		--add-location file \ | ||||
| 		--no-obsolete \ | ||||
| 		--ignore web \ | ||||
| @ -86,10 +82,6 @@ dev-create-db: | ||||
|  | ||||
| dev-reset: dev-drop-db dev-create-db migrate  ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state. | ||||
|  | ||||
| update-test-mmdb:  ## Update test GeoIP and ASN Databases | ||||
| 	curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb -o ${PWD}/tests/GeoLite2-ASN-Test.mmdb | ||||
| 	curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb -o ${PWD}/tests/GeoLite2-City-Test.mmdb | ||||
|  | ||||
| ######################### | ||||
| ## API Schema | ||||
| ######################### | ||||
| @ -98,11 +90,11 @@ gen-build:  ## Extract the schema from the database | ||||
| 	AUTHENTIK_DEBUG=true \ | ||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||
| 		uv run ak make_blueprint_schema --file blueprints/schema.json | ||||
| 		poetry run ak make_blueprint_schema > blueprints/schema.json | ||||
| 	AUTHENTIK_DEBUG=true \ | ||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||
| 		uv run ak spectacular --file schema.yml | ||||
| 		poetry run ak spectacular --file schema.yml | ||||
|  | ||||
| gen-changelog:  ## (Release) generate the changelog based from the commits since the last tag | ||||
| 	git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md | ||||
| @ -122,19 +114,14 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a | ||||
| 	npx prettier --write diff.md | ||||
|  | ||||
| gen-clean-ts:  ## Remove generated API client for Typescript | ||||
| 	rm -rf ${PWD}/${GEN_API_TS}/ | ||||
| 	rm -rf ${PWD}/web/node_modules/@goauthentik/api/ | ||||
| 	rm -rf ./${GEN_API_TS}/ | ||||
| 	rm -rf ./web/node_modules/@goauthentik/api/ | ||||
|  | ||||
| gen-clean-go:  ## Remove generated API client for Go | ||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | ||||
| ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) | ||||
| 	make -C ${PWD}/${GEN_API_GO} clean | ||||
| else | ||||
| 	rm -rf ${PWD}/${GEN_API_GO} | ||||
| endif | ||||
| 	rm -rf ./${GEN_API_GO}/ | ||||
|  | ||||
| gen-clean-py:  ## Remove generated API client for Python | ||||
| 	rm -rf ${PWD}/${GEN_API_PY}/ | ||||
| 	rm -rf ./${GEN_API_PY}/ | ||||
|  | ||||
| gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | ||||
|  | ||||
| @ -150,9 +137,9 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | ||||
| 		--additional-properties=npmVersion=${NPM_VERSION} \ | ||||
| 		--git-repo-id authentik \ | ||||
| 		--git-user-id goauthentik | ||||
|  | ||||
| 	cd ${PWD}/${GEN_API_TS} && npm link | ||||
| 	cd ${PWD}/web && npm link @goauthentik/api | ||||
| 	mkdir -p web/node_modules/@goauthentik/api | ||||
| 	cd ./${GEN_API_TS} && npm i | ||||
| 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||
|  | ||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||
| 	docker run \ | ||||
| @ -166,20 +153,27 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||
| 		--additional-properties=packageVersion=${NPM_VERSION} \ | ||||
| 		--git-repo-id authentik \ | ||||
| 		--git-user-id goauthentik | ||||
| 	pip install ./${GEN_API_PY} | ||||
|  | ||||
| gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | ||||
| ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) | ||||
| 	git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO} | ||||
| else | ||||
| 	cd ${PWD}/${GEN_API_GO} && git pull | ||||
| endif | ||||
| 	cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO} | ||||
| 	make -C ${PWD}/${GEN_API_GO} build | ||||
| 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache | ||||
| 	cp schema.yml ./${GEN_API_GO}/ | ||||
| 	docker run \ | ||||
| 		--rm -v ${PWD}/${GEN_API_GO}:/local \ | ||||
| 		--user ${UID}:${GID} \ | ||||
| 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||
| 		-i /local/schema.yml \ | ||||
| 		-g go \ | ||||
| 		-o /local/ \ | ||||
| 		-c /local/config.yaml | ||||
| 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | ||||
| 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | ||||
|  | ||||
| gen-dev-config:  ## Generate a local development config file | ||||
| 	uv run scripts/generate_config.py | ||||
| 	poetry run scripts/generate_config.py | ||||
|  | ||||
| gen: gen-build gen-client-ts | ||||
|  | ||||
| @ -247,7 +241,7 @@ docker:  ## Build a docker image of the current source tree | ||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||
|  | ||||
| test-docker: | ||||
| 	BUILD=true ${PWD}/scripts/test_docker.sh | ||||
| 	BUILD=true ./scripts/test_docker.sh | ||||
|  | ||||
| ######################### | ||||
| ## CI | ||||
| @ -260,21 +254,21 @@ ci--meta-debug: | ||||
| 	node --version | ||||
|  | ||||
| ci-black: ci--meta-debug | ||||
| 	uv run black --check $(PY_SOURCES) | ||||
| 	poetry run black --check $(PY_SOURCES) | ||||
|  | ||||
| ci-ruff: ci--meta-debug | ||||
| 	uv run ruff check $(PY_SOURCES) | ||||
| 	poetry run ruff check $(PY_SOURCES) | ||||
|  | ||||
| ci-codespell: ci--meta-debug | ||||
| 	uv run codespell -s | ||||
| 	poetry run codespell -s | ||||
|  | ||||
| ci-bandit: ci--meta-debug | ||||
| 	uv run bandit -r $(PY_SOURCES) | ||||
| 	poetry run bandit -r $(PY_SOURCES) | ||||
|  | ||||
| ci-pending-migrations: ci--meta-debug | ||||
| 	uv run ak makemigrations --check | ||||
| 	poetry run ak makemigrations --check | ||||
|  | ||||
| ci-test: ci--meta-debug | ||||
| 	uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | ||||
| 	uv run coverage report | ||||
| 	uv run coverage xml | ||||
| 	poetry run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | ||||
| 	poetry run coverage report | ||||
| 	poetry run coverage xml | ||||
|  | ||||
| @ -42,4 +42,4 @@ See [SECURITY.md](SECURITY.md) | ||||
|  | ||||
| ## Adoption and Contributions | ||||
|  | ||||
| Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github). | ||||
| Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md). | ||||
|  | ||||
| @ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | ||||
|  | ||||
| | Version   | Supported | | ||||
| | --------- | --------- | | ||||
| | 2025.4.x  | ✅        | | ||||
| | 2025.6.x  | ✅        | | ||||
| | 2024.12.x | ✅        | | ||||
| | 2025.2.x  | ✅        | | ||||
|  | ||||
| ## Reporting a Vulnerability | ||||
|  | ||||
|  | ||||
| @ -2,7 +2,7 @@ | ||||
|  | ||||
| from os import environ | ||||
|  | ||||
| __version__ = "2025.6.3" | ||||
| __version__ = "2025.2.1" | ||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||
|  | ||||
|  | ||||
|  | ||||
							
								
								
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,79 @@ | ||||
| """authentik administration metrics""" | ||||
|  | ||||
| from datetime import timedelta | ||||
|  | ||||
| from django.db.models.functions import ExtractHour | ||||
| from drf_spectacular.utils import extend_schema, extend_schema_field | ||||
| from guardian.shortcuts import get_objects_for_user | ||||
| from rest_framework.fields import IntegerField, SerializerMethodField | ||||
| from rest_framework.permissions import IsAuthenticated | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.events.models import EventAction | ||||
|  | ||||
|  | ||||
| class CoordinateSerializer(PassiveSerializer): | ||||
|     """Coordinates for diagrams""" | ||||
|  | ||||
|     x_cord = IntegerField(read_only=True) | ||||
|     y_cord = IntegerField(read_only=True) | ||||
|  | ||||
|  | ||||
| class LoginMetricsSerializer(PassiveSerializer): | ||||
|     """Login Metrics per 1h""" | ||||
|  | ||||
|     logins = SerializerMethodField() | ||||
|     logins_failed = SerializerMethodField() | ||||
|     authorizations = SerializerMethodField() | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_logins(self, _): | ||||
|         """Get successful logins per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         return ( | ||||
|             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.LOGIN | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_logins_failed(self, _): | ||||
|         """Get failed logins per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         return ( | ||||
|             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.LOGIN_FAILED | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_authorizations(self, _): | ||||
|         """Get successful authorizations per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         return ( | ||||
|             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.AUTHORIZE_APPLICATION | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class AdministrationMetricsViewSet(APIView): | ||||
|     """Login Metrics per 1h""" | ||||
|  | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     @extend_schema(responses={200: LoginMetricsSerializer(many=False)}) | ||||
|     def get(self, request: Request) -> Response: | ||||
|         """Login Metrics per 1h""" | ||||
|         serializer = LoginMetricsSerializer(True) | ||||
|         serializer.context["user"] = request.user | ||||
|         return Response(serializer.data) | ||||
| @ -59,7 +59,7 @@ class SystemInfoSerializer(PassiveSerializer): | ||||
|             if not isinstance(value, str): | ||||
|                 continue | ||||
|             actual_value = value | ||||
|             if raw_session is not None and raw_session in actual_value: | ||||
|             if raw_session in actual_value: | ||||
|                 actual_value = actual_value.replace( | ||||
|                     raw_session, SafeExceptionReporterFilter.cleansed_substitute | ||||
|                 ) | ||||
|  | ||||
| @ -1,7 +1,6 @@ | ||||
| """authentik administration overview""" | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django_tenants.utils import get_public_schema_name | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from packaging.version import parse | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| @ -14,7 +13,6 @@ from authentik import __version__, get_build_hash | ||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.outposts.models import Outpost | ||||
| from authentik.tenants.utils import get_current_tenant | ||||
|  | ||||
|  | ||||
| class VersionSerializer(PassiveSerializer): | ||||
| @ -37,11 +35,9 @@ class VersionSerializer(PassiveSerializer): | ||||
|  | ||||
|     def get_version_latest(self, _) -> str: | ||||
|         """Get latest version from cache""" | ||||
|         if get_current_tenant().schema_name == get_public_schema_name(): | ||||
|             return __version__ | ||||
|         version_in_cache = cache.get(VERSION_CACHE_KEY) | ||||
|         if not version_in_cache:  # pragma: no cover | ||||
|             update_latest_version.send() | ||||
|             update_latest_version.delay() | ||||
|             return __version__ | ||||
|         return version_in_cache | ||||
|  | ||||
|  | ||||
							
								
								
									
										57
									
								
								authentik/admin/api/workers.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								authentik/admin/api/workers.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,57 @@ | ||||
| """authentik administration overview""" | ||||
|  | ||||
| from socket import gethostname | ||||
|  | ||||
| from django.conf import settings | ||||
| from drf_spectacular.utils import extend_schema, inline_serializer | ||||
| from packaging.version import parse | ||||
| from rest_framework.fields import BooleanField, CharField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from authentik import get_full_version | ||||
| from authentik.rbac.permissions import HasPermission | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
|  | ||||
| class WorkerView(APIView): | ||||
|     """Get currently connected worker count.""" | ||||
|  | ||||
|     permission_classes = [HasPermission("authentik_rbac.view_system_info")] | ||||
|  | ||||
|     @extend_schema( | ||||
|         responses=inline_serializer( | ||||
|             "Worker", | ||||
|             fields={ | ||||
|                 "worker_id": CharField(), | ||||
|                 "version": CharField(), | ||||
|                 "version_matching": BooleanField(), | ||||
|             }, | ||||
|             many=True, | ||||
|         ) | ||||
|     ) | ||||
|     def get(self, request: Request) -> Response: | ||||
|         """Get currently connected worker count.""" | ||||
|         raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) | ||||
|         our_version = parse(get_full_version()) | ||||
|         response = [] | ||||
|         for worker in raw: | ||||
|             key = list(worker.keys())[0] | ||||
|             version = worker[key].get("version") | ||||
|             version_matching = False | ||||
|             if version: | ||||
|                 version_matching = parse(version) == our_version | ||||
|             response.append( | ||||
|                 {"worker_id": key, "version": version, "version_matching": version_matching} | ||||
|             ) | ||||
|         # In debug we run with `task_always_eager`, so tasks are ran on the main process | ||||
|         if settings.DEBUG:  # pragma: no cover | ||||
|             response.append( | ||||
|                 { | ||||
|                     "worker_id": f"authentik-debug@{gethostname()}", | ||||
|                     "version": get_full_version(), | ||||
|                     "version_matching": True, | ||||
|                 } | ||||
|             ) | ||||
|         return Response(response) | ||||
| @ -3,9 +3,6 @@ | ||||
| from prometheus_client import Info | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.utils.time import fqdn_rand | ||||
| from authentik.tasks.schedules.lib import ScheduleSpec | ||||
|  | ||||
| PROM_INFO = Info("authentik_version", "Currently running authentik version") | ||||
|  | ||||
| @ -17,31 +14,3 @@ class AuthentikAdminConfig(ManagedAppConfig): | ||||
|     label = "authentik_admin" | ||||
|     verbose_name = "authentik Admin" | ||||
|     default = True | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_global | ||||
|     def clear_update_notifications(self): | ||||
|         """Clear update notifications on startup if the notification was for the version | ||||
|         we're running now.""" | ||||
|         from packaging.version import parse | ||||
|  | ||||
|         from authentik.admin.tasks import LOCAL_VERSION | ||||
|         from authentik.events.models import EventAction, Notification | ||||
|  | ||||
|         for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): | ||||
|             if "new_version" not in notification.event.context: | ||||
|                 continue | ||||
|             notification_version = notification.event.context["new_version"] | ||||
|             if LOCAL_VERSION >= parse(notification_version): | ||||
|                 notification.delete() | ||||
|  | ||||
|     @property | ||||
|     def global_schedule_specs(self) -> list[ScheduleSpec]: | ||||
|         from authentik.admin.tasks import update_latest_version | ||||
|  | ||||
|         return [ | ||||
|             ScheduleSpec( | ||||
|                 actor=update_latest_version, | ||||
|                 crontab=f"{fqdn_rand('admin_latest_version')} * * * *", | ||||
|                 paused=CONFIG.get_bool("disable_update_check"), | ||||
|             ), | ||||
|         ] | ||||
|  | ||||
							
								
								
									
										13
									
								
								authentik/admin/settings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								authentik/admin/settings.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,13 @@ | ||||
| """authentik admin settings""" | ||||
|  | ||||
| from celery.schedules import crontab | ||||
|  | ||||
| from authentik.lib.utils.time import fqdn_rand | ||||
|  | ||||
| CELERY_BEAT_SCHEDULE = { | ||||
|     "admin_latest_version": { | ||||
|         "task": "authentik.admin.tasks.update_latest_version", | ||||
|         "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"), | ||||
|         "options": {"queue": "authentik_scheduled"}, | ||||
|     } | ||||
| } | ||||
							
								
								
									
										35
									
								
								authentik/admin/signals.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								authentik/admin/signals.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,35 @@ | ||||
| """admin signals""" | ||||
|  | ||||
| from django.dispatch import receiver | ||||
| from packaging.version import parse | ||||
| from prometheus_client import Gauge | ||||
|  | ||||
| from authentik import get_full_version | ||||
| from authentik.root.celery import CELERY_APP | ||||
| from authentik.root.monitoring import monitoring_set | ||||
|  | ||||
| GAUGE_WORKERS = Gauge( | ||||
|     "authentik_admin_workers", | ||||
|     "Currently connected workers, their versions and if they are the same version as authentik", | ||||
|     ["version", "version_matched"], | ||||
| ) | ||||
|  | ||||
|  | ||||
| _version = parse(get_full_version()) | ||||
|  | ||||
|  | ||||
| @receiver(monitoring_set) | ||||
| def monitoring_set_workers(sender, **kwargs): | ||||
|     """Set worker gauge""" | ||||
|     raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) | ||||
|     worker_version_count = {} | ||||
|     for worker in raw: | ||||
|         key = list(worker.keys())[0] | ||||
|         version = worker[key].get("version") | ||||
|         version_matching = False | ||||
|         if version: | ||||
|             version_matching = parse(version) == _version | ||||
|         worker_version_count.setdefault(version, {"count": 0, "matching": version_matching}) | ||||
|         worker_version_count[version]["count"] += 1 | ||||
|     for version, stats in worker_version_count.items(): | ||||
|         GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"]) | ||||
| @ -1,19 +1,19 @@ | ||||
| """authentik admin tasks""" | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.db import DatabaseError, InternalError, ProgrammingError | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from django_dramatiq_postgres.middleware import CurrentTask | ||||
| from dramatiq import actor | ||||
| from packaging.version import parse | ||||
| from requests import RequestException | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik import __version__, get_build_hash | ||||
| from authentik.admin.apps import PROM_INFO | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.events.models import Event, EventAction, Notification | ||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.utils.http import get_http_session | ||||
| from authentik.tasks.models import Task | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| VERSION_NULL = "0.0.0" | ||||
| @ -33,12 +33,27 @@ def _set_prom_info(): | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @actor(description=_("Update latest version info.")) | ||||
| def update_latest_version(): | ||||
|     self: Task = CurrentTask.get_task() | ||||
| @CELERY_APP.task( | ||||
|     throws=(DatabaseError, ProgrammingError, InternalError), | ||||
| ) | ||||
| def clear_update_notifications(): | ||||
|     """Clear update notifications on startup if the notification was for the version | ||||
|     we're running now.""" | ||||
|     for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): | ||||
|         if "new_version" not in notification.event.context: | ||||
|             continue | ||||
|         notification_version = notification.event.context["new_version"] | ||||
|         if LOCAL_VERSION >= parse(notification_version): | ||||
|             notification.delete() | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task(bind=True, base=SystemTask) | ||||
| @prefill_task | ||||
| def update_latest_version(self: SystemTask): | ||||
|     """Update latest version info""" | ||||
|     if CONFIG.get_bool("disable_update_check"): | ||||
|         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) | ||||
|         self.info("Version check disabled.") | ||||
|         self.set_status(TaskStatus.WARNING, "Version check disabled.") | ||||
|         return | ||||
|     try: | ||||
|         response = get_http_session().get( | ||||
| @ -48,7 +63,7 @@ def update_latest_version(): | ||||
|         data = response.json() | ||||
|         upstream_version = data.get("stable", {}).get("version") | ||||
|         cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT) | ||||
|         self.info("Successfully updated latest Version") | ||||
|         self.set_status(TaskStatus.SUCCESSFUL, "Successfully updated latest Version") | ||||
|         _set_prom_info() | ||||
|         # Check if upstream version is newer than what we're running, | ||||
|         # and if no event exists yet, create one. | ||||
| @ -71,7 +86,7 @@ def update_latest_version(): | ||||
|             ).save() | ||||
|     except (RequestException, IndexError) as exc: | ||||
|         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) | ||||
|         raise exc | ||||
|         self.set_error(exc) | ||||
|  | ||||
|  | ||||
| _set_prom_info() | ||||
|  | ||||
| @ -29,6 +29,18 @@ class TestAdminAPI(TestCase): | ||||
|         body = loads(response.content) | ||||
|         self.assertEqual(body["version_current"], __version__) | ||||
|  | ||||
|     def test_workers(self): | ||||
|         """Test Workers API""" | ||||
|         response = self.client.get(reverse("authentik_api:admin_workers")) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         body = loads(response.content) | ||||
|         self.assertEqual(len(body), 0) | ||||
|  | ||||
|     def test_metrics(self): | ||||
|         """Test metrics API""" | ||||
|         response = self.client.get(reverse("authentik_api:admin_metrics")) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_apps(self): | ||||
|         """Test apps API""" | ||||
|         response = self.client.get(reverse("authentik_api:apps-list")) | ||||
|  | ||||
| @ -1,12 +1,12 @@ | ||||
| """test admin tasks""" | ||||
|  | ||||
| from django.apps import apps | ||||
| from django.core.cache import cache | ||||
| from django.test import TestCase | ||||
| from requests_mock import Mocker | ||||
|  | ||||
| from authentik.admin.tasks import ( | ||||
|     VERSION_CACHE_KEY, | ||||
|     clear_update_notifications, | ||||
|     update_latest_version, | ||||
| ) | ||||
| from authentik.events.models import Event, EventAction | ||||
| @ -30,7 +30,7 @@ class TestAdminTasks(TestCase): | ||||
|         """Test Update checker with valid response""" | ||||
|         with Mocker() as mocker, CONFIG.patch("disable_update_check", False): | ||||
|             mocker.get("https://version.goauthentik.io/version.json", json=RESPONSE_VALID) | ||||
|             update_latest_version.send() | ||||
|             update_latest_version.delay().get() | ||||
|             self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999") | ||||
|             self.assertTrue( | ||||
|                 Event.objects.filter( | ||||
| @ -40,7 +40,7 @@ class TestAdminTasks(TestCase): | ||||
|                 ).exists() | ||||
|             ) | ||||
|             # test that a consecutive check doesn't create a duplicate event | ||||
|             update_latest_version.send() | ||||
|             update_latest_version.delay().get() | ||||
|             self.assertEqual( | ||||
|                 len( | ||||
|                     Event.objects.filter( | ||||
| @ -56,7 +56,7 @@ class TestAdminTasks(TestCase): | ||||
|         """Test Update checker with invalid response""" | ||||
|         with Mocker() as mocker: | ||||
|             mocker.get("https://version.goauthentik.io/version.json", status_code=400) | ||||
|             update_latest_version.send() | ||||
|             update_latest_version.delay().get() | ||||
|             self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0") | ||||
|             self.assertFalse( | ||||
|                 Event.objects.filter( | ||||
| @ -67,19 +67,17 @@ class TestAdminTasks(TestCase): | ||||
|     def test_version_disabled(self): | ||||
|         """Test Update checker while its disabled""" | ||||
|         with CONFIG.patch("disable_update_check", True): | ||||
|             update_latest_version.send() | ||||
|             update_latest_version.delay().get() | ||||
|             self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0") | ||||
|  | ||||
|     def test_clear_update_notifications(self): | ||||
|         """Test clear of previous notification""" | ||||
|         admin_config = apps.get_app_config("authentik_admin") | ||||
|         Event.objects.create( | ||||
|             action=EventAction.UPDATE_AVAILABLE, | ||||
|             context={"new_version": "99999999.9999999.9999999"}, | ||||
|             action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"} | ||||
|         ) | ||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) | ||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) | ||||
|         admin_config.clear_update_notifications() | ||||
|         clear_update_notifications() | ||||
|         self.assertFalse( | ||||
|             Event.objects.filter( | ||||
|                 action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" | ||||
|  | ||||
| @ -3,14 +3,22 @@ | ||||
| from django.urls import path | ||||
|  | ||||
| from authentik.admin.api.meta import AppsViewSet, ModelViewSet | ||||
| from authentik.admin.api.metrics import AdministrationMetricsViewSet | ||||
| from authentik.admin.api.system import SystemView | ||||
| from authentik.admin.api.version import VersionView | ||||
| from authentik.admin.api.version_history import VersionHistoryViewSet | ||||
| from authentik.admin.api.workers import WorkerView | ||||
|  | ||||
| api_urlpatterns = [ | ||||
|     ("admin/apps", AppsViewSet, "apps"), | ||||
|     ("admin/models", ModelViewSet, "models"), | ||||
|     path( | ||||
|         "admin/metrics/", | ||||
|         AdministrationMetricsViewSet.as_view(), | ||||
|         name="admin_metrics", | ||||
|     ), | ||||
|     path("admin/version/", VersionView.as_view(), name="admin_version"), | ||||
|     ("admin/version/history", VersionHistoryViewSet, "version_history"), | ||||
|     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), | ||||
|     path("admin/system/", SystemView.as_view(), name="admin_system"), | ||||
| ] | ||||
|  | ||||
| @ -1,13 +1,12 @@ | ||||
| """authentik API AppConfig""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikAPIConfig(ManagedAppConfig): | ||||
| class AuthentikAPIConfig(AppConfig): | ||||
|     """authentik API Config""" | ||||
|  | ||||
|     name = "authentik.api" | ||||
|     label = "authentik_api" | ||||
|     mountpoint = "api/" | ||||
|     verbose_name = "authentik API" | ||||
|     default = True | ||||
|  | ||||
| @ -1,12 +1,9 @@ | ||||
| """API Authentication""" | ||||
|  | ||||
| from hmac import compare_digest | ||||
| from pathlib import Path | ||||
| from tempfile import gettempdir | ||||
| from typing import Any | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.contrib.auth.models import AnonymousUser | ||||
| from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||
| from rest_framework.exceptions import AuthenticationFailed | ||||
| @ -14,17 +11,11 @@ from rest_framework.request import Request | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.middleware import CTX_AUTH_VIA | ||||
| from authentik.core.models import Token, TokenIntents, User, UserTypes | ||||
| from authentik.core.models import Token, TokenIntents, User | ||||
| from authentik.outposts.models import Outpost | ||||
| from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| _tmp = Path(gettempdir()) | ||||
| try: | ||||
|     with open(_tmp / "authentik-core-ipc.key") as _f: | ||||
|         ipc_key = _f.read() | ||||
| except OSError: | ||||
|     ipc_key = None | ||||
|  | ||||
|  | ||||
| def validate_auth(header: bytes) -> str | None: | ||||
| @ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None: | ||||
|     if user: | ||||
|         CTX_AUTH_VIA.set("secret_key") | ||||
|         return user | ||||
|     # then try to auth via secret key (for embedded outpost/etc) | ||||
|     user = token_ipc(auth_credentials) | ||||
|     if user: | ||||
|         CTX_AUTH_VIA.set("ipc") | ||||
|         return user | ||||
|     raise AuthenticationFailed("Token invalid/expired") | ||||
|  | ||||
|  | ||||
| @ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None: | ||||
|     return outpost.user | ||||
|  | ||||
|  | ||||
| class IPCUser(AnonymousUser): | ||||
|     """'Virtual' user for IPC communication between authentik core and the authentik router""" | ||||
|  | ||||
|     username = "authentik:system" | ||||
|     is_active = True | ||||
|     is_superuser = True | ||||
|  | ||||
|     @property | ||||
|     def type(self): | ||||
|         return UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||
|  | ||||
|     def has_perm(self, perm, obj=None): | ||||
|         return True | ||||
|  | ||||
|     def has_perms(self, perm_list, obj=None): | ||||
|         return True | ||||
|  | ||||
|     def has_module_perms(self, module): | ||||
|         return True | ||||
|  | ||||
|     @property | ||||
|     def is_anonymous(self): | ||||
|         return False | ||||
|  | ||||
|     @property | ||||
|     def is_authenticated(self): | ||||
|         return True | ||||
|  | ||||
|  | ||||
| def token_ipc(value: str) -> User | None: | ||||
|     """Check if the token is the secret key | ||||
|     and return the service account for the managed outpost""" | ||||
|     if not ipc_key or not compare_digest(value, ipc_key): | ||||
|         return None | ||||
|     return IPCUser() | ||||
|  | ||||
|  | ||||
| class TokenAuthentication(BaseAuthentication): | ||||
|     """Token-based authentication using HTTP Bearer authentication""" | ||||
|  | ||||
|  | ||||
| @ -54,7 +54,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom | ||||
|     return component | ||||
|  | ||||
|  | ||||
| def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): | ||||
| def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):  # noqa: W0613 | ||||
|     """Workaround to set a default response for endpoints. | ||||
|     Workaround suggested at | ||||
|     <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> | ||||
|  | ||||
| @ -7,7 +7,7 @@ from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.fields import CharField, DateTimeField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ListSerializer | ||||
| from rest_framework.serializers import ListSerializer, ModelSerializer | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.blueprints.models import BlueprintInstance | ||||
| @ -15,7 +15,7 @@ from authentik.blueprints.v1.importer import Importer | ||||
| from authentik.blueprints.v1.oci import OCI_PREFIX | ||||
| from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer | ||||
| from authentik.core.api.utils import JSONDictField, PassiveSerializer | ||||
| from authentik.rbac.decorators import permission_required | ||||
|  | ||||
|  | ||||
| @ -39,7 +39,7 @@ class BlueprintInstanceSerializer(ModelSerializer): | ||||
|         """Ensure the path (if set) specified is retrievable""" | ||||
|         if path == "" or path.startswith(OCI_PREFIX): | ||||
|             return path | ||||
|         files: list[dict] = blueprints_find_dict.send().get_result(block=True) | ||||
|         files: list[dict] = blueprints_find_dict.delay().get() | ||||
|         if path not in [file["path"] for file in files]: | ||||
|             raise ValidationError(_("Blueprint file does not exist")) | ||||
|         return path | ||||
| @ -115,7 +115,7 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet): | ||||
|     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||
|     def available(self, request: Request) -> Response: | ||||
|         """Get blueprints""" | ||||
|         files: list[dict] = blueprints_find_dict.send().get_result(block=True) | ||||
|         files: list[dict] = blueprints_find_dict.delay().get() | ||||
|         return Response(files) | ||||
|  | ||||
|     @permission_required("authentik_blueprints.view_blueprintinstance") | ||||
| @ -129,5 +129,5 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet): | ||||
|     def apply(self, request: Request, *args, **kwargs) -> Response: | ||||
|         """Apply a blueprint""" | ||||
|         blueprint = self.get_object() | ||||
|         apply_blueprint.send_with_options(args=(blueprint.pk,), rel_obj=blueprint) | ||||
|         apply_blueprint.delay(str(blueprint.pk)).get() | ||||
|         return self.retrieve(request, *args, **kwargs) | ||||
|  | ||||
| @ -6,12 +6,9 @@ from inspect import ismethod | ||||
|  | ||||
| from django.apps import AppConfig | ||||
| from django.db import DatabaseError, InternalError, ProgrammingError | ||||
| from dramatiq.broker import get_broker | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
|  | ||||
| from authentik.lib.utils.time import fqdn_rand | ||||
| from authentik.root.signals import startup | ||||
| from authentik.tasks.schedules.lib import ScheduleSpec | ||||
|  | ||||
|  | ||||
| class ManagedAppConfig(AppConfig): | ||||
| @ -37,7 +34,7 @@ class ManagedAppConfig(AppConfig): | ||||
|  | ||||
|     def import_related(self): | ||||
|         """Automatically import related modules which rely on just being imported | ||||
|         to register themselves (mainly django signals and tasks)""" | ||||
|         to register themselves (mainly django signals and celery tasks)""" | ||||
|  | ||||
|         def import_relative(rel_module: str): | ||||
|             try: | ||||
| @ -83,16 +80,6 @@ class ManagedAppConfig(AppConfig): | ||||
|         func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY | ||||
|         return func | ||||
|  | ||||
|     @property | ||||
|     def tenant_schedule_specs(self) -> list[ScheduleSpec]: | ||||
|         """Get a list of schedule specs that must exist in each tenant""" | ||||
|         return [] | ||||
|  | ||||
|     @property | ||||
|     def global_schedule_specs(self) -> list[ScheduleSpec]: | ||||
|         """Get a list of schedule specs that must exist in the default tenant""" | ||||
|         return [] | ||||
|  | ||||
|     def _reconcile_tenant(self) -> None: | ||||
|         """reconcile ourselves for tenanted methods""" | ||||
|         from authentik.tenants.models import Tenant | ||||
| @ -113,12 +100,8 @@ class ManagedAppConfig(AppConfig): | ||||
|         """ | ||||
|         from django_tenants.utils import get_public_schema_name, schema_context | ||||
|  | ||||
|         try: | ||||
|             with schema_context(get_public_schema_name()): | ||||
|                 self._reconcile(self.RECONCILE_GLOBAL_CATEGORY) | ||||
|         except (DatabaseError, ProgrammingError, InternalError) as exc: | ||||
|             self.logger.debug("Failed to access database to run reconcile", exc=exc) | ||||
|             return | ||||
|         with schema_context(get_public_schema_name()): | ||||
|             self._reconcile(self.RECONCILE_GLOBAL_CATEGORY) | ||||
|  | ||||
|  | ||||
| class AuthentikBlueprintsConfig(ManagedAppConfig): | ||||
| @ -129,29 +112,19 @@ class AuthentikBlueprintsConfig(ManagedAppConfig): | ||||
|     verbose_name = "authentik Blueprints" | ||||
|     default = True | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_global | ||||
|     def load_blueprints_v1_tasks(self): | ||||
|         """Load v1 tasks""" | ||||
|         self.import_module("authentik.blueprints.v1.tasks") | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_tenant | ||||
|     def blueprints_discovery(self): | ||||
|         """Run blueprint discovery""" | ||||
|         from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints | ||||
|  | ||||
|         blueprints_discovery.delay() | ||||
|         clear_failed_blueprints.delay() | ||||
|  | ||||
|     def import_models(self): | ||||
|         super().import_models() | ||||
|         self.import_module("authentik.blueprints.v1.meta.apply_blueprint") | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_global | ||||
|     def tasks_middlewares(self): | ||||
|         from authentik.blueprints.v1.tasks import BlueprintWatcherMiddleware | ||||
|  | ||||
|         get_broker().add_middleware(BlueprintWatcherMiddleware()) | ||||
|  | ||||
|     @property | ||||
|     def tenant_schedule_specs(self) -> list[ScheduleSpec]: | ||||
|         from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints | ||||
|  | ||||
|         return [ | ||||
|             ScheduleSpec( | ||||
|                 actor=blueprints_discovery, | ||||
|                 crontab=f"{fqdn_rand('blueprints_v1_discover')} * * * *", | ||||
|                 send_on_startup=True, | ||||
|             ), | ||||
|             ScheduleSpec( | ||||
|                 actor=clear_failed_blueprints, | ||||
|                 crontab=f"{fqdn_rand('blueprints_v1_cleanup')} * * * *", | ||||
|                 send_on_startup=True, | ||||
|             ), | ||||
|         ] | ||||
|  | ||||
| @ -72,33 +72,20 @@ class Command(BaseCommand): | ||||
|                     "additionalProperties": True, | ||||
|                 }, | ||||
|                 "entries": { | ||||
|                     "anyOf": [ | ||||
|                         { | ||||
|                             "type": "array", | ||||
|                             "items": {"$ref": "#/$defs/blueprint_entry"}, | ||||
|                         }, | ||||
|                         { | ||||
|                             "type": "object", | ||||
|                             "additionalProperties": { | ||||
|                                 "type": "array", | ||||
|                                 "items": {"$ref": "#/$defs/blueprint_entry"}, | ||||
|                             }, | ||||
|                         }, | ||||
|                     ], | ||||
|                     "type": "array", | ||||
|                     "items": { | ||||
|                         "oneOf": [], | ||||
|                     }, | ||||
|                 }, | ||||
|             }, | ||||
|             "$defs": {"blueprint_entry": {"oneOf": []}}, | ||||
|             "$defs": {}, | ||||
|         } | ||||
|  | ||||
|     def add_arguments(self, parser): | ||||
|         parser.add_argument("--file", type=str) | ||||
|  | ||||
|     @no_translations | ||||
|     def handle(self, *args, file: str, **options): | ||||
|     def handle(self, *args, **options): | ||||
|         """Generate JSON Schema for blueprints""" | ||||
|         self.build() | ||||
|         with open(file, "w") as _schema: | ||||
|             _schema.write(dumps(self.schema, indent=4, default=Command.json_default)) | ||||
|         self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default)) | ||||
|  | ||||
|     @staticmethod | ||||
|     def json_default(value: Any) -> Any: | ||||
| @ -125,7 +112,7 @@ class Command(BaseCommand): | ||||
|                 } | ||||
|             ) | ||||
|             model_path = f"{model._meta.app_label}.{model._meta.model_name}" | ||||
|             self.schema["$defs"]["blueprint_entry"]["oneOf"].append( | ||||
|             self.schema["properties"]["entries"]["items"]["oneOf"].append( | ||||
|                 self.template_entry(model_path, model, serializer) | ||||
|             ) | ||||
|  | ||||
| @ -147,7 +134,7 @@ class Command(BaseCommand): | ||||
|                 "id": {"type": "string"}, | ||||
|                 "state": { | ||||
|                     "type": "string", | ||||
|                     "enum": sorted([s.value for s in BlueprintEntryDesiredState]), | ||||
|                     "enum": [s.value for s in BlueprintEntryDesiredState], | ||||
|                     "default": "present", | ||||
|                 }, | ||||
|                 "conditions": {"type": "array", "items": {"type": "boolean"}}, | ||||
| @ -218,7 +205,7 @@ class Command(BaseCommand): | ||||
|                 "type": "object", | ||||
|                 "required": ["permission"], | ||||
|                 "properties": { | ||||
|                     "permission": {"type": "string", "enum": sorted(perms)}, | ||||
|                     "permission": {"type": "string", "enum": perms}, | ||||
|                     "user": {"type": "integer"}, | ||||
|                     "role": {"type": "string"}, | ||||
|                 }, | ||||
|  | ||||
| @ -3,7 +3,6 @@ | ||||
| from pathlib import Path | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from django.contrib.contenttypes.fields import GenericRelation | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.db import models | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| @ -72,13 +71,6 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel): | ||||
|     enabled = models.BooleanField(default=True) | ||||
|     managed_models = ArrayField(models.TextField(), default=list) | ||||
|  | ||||
|     # Manual link to tasks instead of using TasksModel because of loop imports | ||||
|     tasks = GenericRelation( | ||||
|         "authentik_tasks.Task", | ||||
|         content_type_field="rel_obj_content_type", | ||||
|         object_id_field="rel_obj_id", | ||||
|     ) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Blueprint Instance") | ||||
|         verbose_name_plural = _("Blueprint Instances") | ||||
|  | ||||
							
								
								
									
										18
									
								
								authentik/blueprints/settings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								authentik/blueprints/settings.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,18 @@ | ||||
| """blueprint Settings""" | ||||
|  | ||||
| from celery.schedules import crontab | ||||
|  | ||||
| from authentik.lib.utils.time import fqdn_rand | ||||
|  | ||||
| CELERY_BEAT_SCHEDULE = { | ||||
|     "blueprints_v1_discover": { | ||||
|         "task": "authentik.blueprints.v1.tasks.blueprints_discovery", | ||||
|         "schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"), | ||||
|         "options": {"queue": "authentik_scheduled"}, | ||||
|     }, | ||||
|     "blueprints_v1_cleanup": { | ||||
|         "task": "authentik.blueprints.v1.tasks.clear_failed_blueprints", | ||||
|         "schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"), | ||||
|         "options": {"queue": "authentik_scheduled"}, | ||||
|     }, | ||||
| } | ||||
| @ -1,2 +0,0 @@ | ||||
| # Import all v1 tasks for auto task discovery | ||||
| from authentik.blueprints.v1.tasks import *  # noqa: F403 | ||||
| @ -1,11 +1,10 @@ | ||||
| version: 1 | ||||
| entries: | ||||
|   foo: | ||||
|       - identifiers: | ||||
|             name: "%(id)s" | ||||
|             slug: "%(id)s" | ||||
|         model: authentik_flows.flow | ||||
|         state: present | ||||
|         attrs: | ||||
|             designation: stage_configuration | ||||
|             title: foo | ||||
|     - identifiers: | ||||
|           name: "%(id)s" | ||||
|           slug: "%(id)s" | ||||
|       model: authentik_flows.flow | ||||
|       state: present | ||||
|       attrs: | ||||
|           designation: stage_configuration | ||||
|           title: foo | ||||
|  | ||||
| @ -37,7 +37,6 @@ entries: | ||||
|     - attrs: | ||||
|           attributes: | ||||
|               env_null: !Env [bar-baz, null] | ||||
|               json_parse: !ParseJSON '{"foo": "bar"}' | ||||
|               policy_pk1: | ||||
|                   !Format [ | ||||
|                       "%s-%s", | ||||
|  | ||||
| @ -1,14 +0,0 @@ | ||||
| from django.test import TestCase | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
| from authentik.lib.utils.reflection import get_apps | ||||
|  | ||||
|  | ||||
| class TestManagedAppConfig(TestCase): | ||||
|     def test_apps_use_managed_app_config(self): | ||||
|         for app in get_apps(): | ||||
|             if app.name.startswith("authentik.enterprise"): | ||||
|                 self.assertIn(EnterpriseConfig, app.__class__.__bases__) | ||||
|             else: | ||||
|                 self.assertIn(ManagedAppConfig, app.__class__.__bases__) | ||||
| @ -35,6 +35,6 @@ def blueprint_tester(file_name: Path) -> Callable: | ||||
|  | ||||
|  | ||||
| for blueprint_file in Path("blueprints/").glob("**/*.yaml"): | ||||
|     if "local" in str(blueprint_file) or "testing" in str(blueprint_file): | ||||
|     if "local" in str(blueprint_file): | ||||
|         continue | ||||
|     setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file)) | ||||
|  | ||||
| @ -5,6 +5,7 @@ from collections.abc import Callable | ||||
| from django.apps import apps | ||||
| from django.test import TestCase | ||||
|  | ||||
| from authentik.blueprints.v1.importer import is_model_allowed | ||||
| from authentik.lib.models import SerializerModel | ||||
| from authentik.providers.oauth2.models import RefreshToken | ||||
|  | ||||
| @ -21,13 +22,10 @@ def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable: | ||||
|             return | ||||
|         model_class = test_model() | ||||
|         self.assertTrue(isinstance(model_class, SerializerModel)) | ||||
|         # Models that have subclasses don't have to have a serializer | ||||
|         if len(test_model.__subclasses__()) > 0: | ||||
|             return | ||||
|         self.assertIsNotNone(model_class.serializer) | ||||
|         if model_class.serializer.Meta().model == RefreshToken: | ||||
|             return | ||||
|         self.assertTrue(issubclass(test_model, model_class.serializer.Meta().model)) | ||||
|         self.assertEqual(model_class.serializer.Meta().model, test_model) | ||||
|  | ||||
|     return tester | ||||
|  | ||||
| @ -36,6 +34,6 @@ for app in apps.get_app_configs(): | ||||
|     if not app.label.startswith("authentik"): | ||||
|         continue | ||||
|     for model in app.get_models(): | ||||
|         if not issubclass(model, SerializerModel): | ||||
|         if not is_model_allowed(model): | ||||
|             continue | ||||
|         setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model)) | ||||
|  | ||||
| @ -215,7 +215,6 @@ class TestBlueprintsV1(TransactionTestCase): | ||||
|                     }, | ||||
|                     "nested_context": "context-nested-value", | ||||
|                     "env_null": None, | ||||
|                     "json_parse": {"foo": "bar"}, | ||||
|                     "at_index_sequence": "foo", | ||||
|                     "at_index_sequence_default": "non existent", | ||||
|                     "at_index_mapping": 2, | ||||
|  | ||||
| @ -54,7 +54,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | ||||
|             file.seek(0) | ||||
|             file_hash = sha512(file.read().encode()).hexdigest() | ||||
|             file.flush() | ||||
|             blueprints_discovery.send() | ||||
|             blueprints_discovery() | ||||
|             instance = BlueprintInstance.objects.filter(name=blueprint_id).first() | ||||
|             self.assertEqual(instance.last_applied_hash, file_hash) | ||||
|             self.assertEqual( | ||||
| @ -82,7 +82,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | ||||
|                 ) | ||||
|             ) | ||||
|             file.flush() | ||||
|             blueprints_discovery.send() | ||||
|             blueprints_discovery() | ||||
|             blueprint = BlueprintInstance.objects.filter(name="foo").first() | ||||
|             self.assertEqual( | ||||
|                 blueprint.last_applied_hash, | ||||
| @ -107,7 +107,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | ||||
|                 ) | ||||
|             ) | ||||
|             file.flush() | ||||
|             blueprints_discovery.send() | ||||
|             blueprints_discovery() | ||||
|             blueprint.refresh_from_db() | ||||
|             self.assertEqual( | ||||
|                 blueprint.last_applied_hash, | ||||
|  | ||||
| @ -6,7 +6,6 @@ from copy import copy | ||||
| from dataclasses import asdict, dataclass, field, is_dataclass | ||||
| from enum import Enum | ||||
| from functools import reduce | ||||
| from json import JSONDecodeError, loads | ||||
| from operator import ixor | ||||
| from os import getenv | ||||
| from typing import Any, Literal, Union | ||||
| @ -165,7 +164,9 @@ class BlueprintEntry: | ||||
|         """Get the blueprint model, with yaml tags resolved if present""" | ||||
|         return str(self.tag_resolver(self.model, blueprint)) | ||||
|  | ||||
|     def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]: | ||||
|     def get_permissions( | ||||
|         self, blueprint: "Blueprint" | ||||
|     ) -> Generator[BlueprintEntryPermission, None, None]: | ||||
|         """Get permissions of this entry, with all yaml tags resolved""" | ||||
|         for perm in self.permissions: | ||||
|             yield BlueprintEntryPermission( | ||||
| @ -192,18 +193,11 @@ class Blueprint: | ||||
|     """Dataclass used for a full export""" | ||||
|  | ||||
|     version: int = field(default=1) | ||||
|     entries: list[BlueprintEntry] | dict[str, list[BlueprintEntry]] = field(default_factory=list) | ||||
|     entries: list[BlueprintEntry] = field(default_factory=list) | ||||
|     context: dict = field(default_factory=dict) | ||||
|  | ||||
|     metadata: BlueprintMetadata | None = field(default=None) | ||||
|  | ||||
|     def iter_entries(self) -> Iterable[BlueprintEntry]: | ||||
|         if isinstance(self.entries, dict): | ||||
|             for _section, entries in self.entries.items(): | ||||
|                 yield from entries | ||||
|         else: | ||||
|             yield from self.entries | ||||
|  | ||||
|  | ||||
| class YAMLTag: | ||||
|     """Base class for all YAML Tags""" | ||||
| @ -234,7 +228,7 @@ class KeyOf(YAMLTag): | ||||
|         self.id_from = node.value | ||||
|  | ||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||
|         for _entry in blueprint.iter_entries(): | ||||
|         for _entry in blueprint.entries: | ||||
|             if _entry.id == self.id_from and _entry._state.instance: | ||||
|                 # Special handling for PolicyBindingModels, as they'll have a different PK | ||||
|                 # which is used when creating policy bindings | ||||
| @ -292,22 +286,6 @@ class Context(YAMLTag): | ||||
|         return value | ||||
|  | ||||
|  | ||||
| class ParseJSON(YAMLTag): | ||||
|     """Parse JSON from context/env/etc value""" | ||||
|  | ||||
|     raw: str | ||||
|  | ||||
|     def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None: | ||||
|         super().__init__() | ||||
|         self.raw = node.value | ||||
|  | ||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||
|         try: | ||||
|             return loads(self.raw) | ||||
|         except JSONDecodeError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|  | ||||
|  | ||||
| class Format(YAMLTag): | ||||
|     """Format a string""" | ||||
|  | ||||
| @ -683,7 +661,6 @@ class BlueprintLoader(SafeLoader): | ||||
|         self.add_constructor("!Value", Value) | ||||
|         self.add_constructor("!Index", Index) | ||||
|         self.add_constructor("!AtIndex", AtIndex) | ||||
|         self.add_constructor("!ParseJSON", ParseJSON) | ||||
|  | ||||
|  | ||||
| class EntryInvalidError(SentryIgnoredException): | ||||
|  | ||||
| @ -36,7 +36,6 @@ from authentik.core.models import ( | ||||
|     GroupSourceConnection, | ||||
|     PropertyMapping, | ||||
|     Provider, | ||||
|     Session, | ||||
|     Source, | ||||
|     User, | ||||
|     UserSourceConnection, | ||||
| @ -57,6 +56,7 @@ from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import ( | ||||
|     EndpointDeviceConnection, | ||||
| ) | ||||
| from authentik.events.logs import LogEvent, capture_logs | ||||
| from authentik.events.models import SystemTask | ||||
| from authentik.events.utils import cleanse_dict | ||||
| from authentik.flows.models import FlowToken, Stage | ||||
| from authentik.lib.models import SerializerModel | ||||
| @ -76,7 +76,6 @@ from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser | ||||
| from authentik.rbac.models import Role | ||||
| from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser | ||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType | ||||
| from authentik.tasks.models import Task | ||||
| from authentik.tenants.models import Tenant | ||||
|  | ||||
| # Context set when the serializer is created in a blueprint context | ||||
| @ -109,7 +108,6 @@ def excluded_models() -> list[type[Model]]: | ||||
|         Policy, | ||||
|         PolicyBindingModel, | ||||
|         # Classes that have other dependencies | ||||
|         Session, | ||||
|         AuthenticatedSession, | ||||
|         # Classes which are only internally managed | ||||
|         # FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin | ||||
| @ -118,7 +116,7 @@ def excluded_models() -> list[type[Model]]: | ||||
|         SCIMProviderGroup, | ||||
|         SCIMProviderUser, | ||||
|         Tenant, | ||||
|         Task, | ||||
|         SystemTask, | ||||
|         ConnectionToken, | ||||
|         AuthorizationCode, | ||||
|         AccessToken, | ||||
| @ -384,7 +382,7 @@ class Importer: | ||||
|     def _apply_models(self, raise_errors=False) -> bool: | ||||
|         """Apply (create/update) models yaml""" | ||||
|         self.__pk_map = {} | ||||
|         for entry in self._import.iter_entries(): | ||||
|         for entry in self._import.entries: | ||||
|             model_app_label, model_name = entry.get_model(self._import).split(".") | ||||
|             try: | ||||
|                 model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||
|  | ||||
| @ -44,7 +44,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer): | ||||
|             return MetaResult() | ||||
|         LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance) | ||||
|  | ||||
|         apply_blueprint(self.blueprint_instance.pk) | ||||
|         apply_blueprint(str(self.blueprint_instance.pk)) | ||||
|         return MetaResult() | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -47,7 +47,7 @@ class MetaModelRegistry: | ||||
|         models = apps.get_models() | ||||
|         for _, value in self.models.items(): | ||||
|             models.append(value) | ||||
|         return sorted(models, key=str) | ||||
|         return models | ||||
|  | ||||
|     def get_model(self, app_label: str, model_id: str) -> type[Model]: | ||||
|         """Get model checks if any virtual models are registered, and falls back | ||||
|  | ||||
| @ -4,17 +4,12 @@ from dataclasses import asdict, dataclass, field | ||||
| from hashlib import sha512 | ||||
| from pathlib import Path | ||||
| from sys import platform | ||||
| from uuid import UUID | ||||
|  | ||||
| from dacite.core import from_dict | ||||
| from django.conf import settings | ||||
| from django.db import DatabaseError, InternalError, ProgrammingError | ||||
| from django.utils.text import slugify | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from django_dramatiq_postgres.middleware import CurrentTask, CurrentTaskNotFound | ||||
| from dramatiq.actor import actor | ||||
| from dramatiq.middleware import Middleware | ||||
| from structlog.stdlib import get_logger | ||||
| from watchdog.events import ( | ||||
|     FileCreatedEvent, | ||||
| @ -36,13 +31,15 @@ from authentik.blueprints.v1.importer import Importer | ||||
| from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE | ||||
| from authentik.blueprints.v1.oci import OCI_PREFIX | ||||
| from authentik.events.logs import capture_logs | ||||
| from authentik.events.models import TaskStatus | ||||
| from authentik.events.system_tasks import SystemTask, prefill_task | ||||
| from authentik.events.utils import sanitize_dict | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.tasks.models import Task | ||||
| from authentik.tasks.schedules.models import Schedule | ||||
| from authentik.root.celery import CELERY_APP | ||||
| from authentik.tenants.models import Tenant | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| _file_watcher_started = False | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| @ -56,21 +53,22 @@ class BlueprintFile: | ||||
|     meta: BlueprintMetadata | None = field(default=None) | ||||
|  | ||||
|  | ||||
| class BlueprintWatcherMiddleware(Middleware): | ||||
|     def start_blueprint_watcher(self): | ||||
|         """Start blueprint watcher""" | ||||
|         observer = Observer() | ||||
|         kwargs = {} | ||||
|         if platform.startswith("linux"): | ||||
|             kwargs["event_filter"] = (FileCreatedEvent, FileModifiedEvent) | ||||
|         observer.schedule( | ||||
|             BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True, **kwargs | ||||
|         ) | ||||
|         observer.start() | ||||
| def start_blueprint_watcher(): | ||||
|     """Start blueprint watcher, if it's not running already.""" | ||||
|     # This function might be called twice since it's called on celery startup | ||||
|  | ||||
|     def after_worker_boot(self, broker, worker): | ||||
|         if not settings.TEST: | ||||
|             self.start_blueprint_watcher() | ||||
|     global _file_watcher_started  # noqa: PLW0603 | ||||
|     if _file_watcher_started: | ||||
|         return | ||||
|     observer = Observer() | ||||
|     kwargs = {} | ||||
|     if platform.startswith("linux"): | ||||
|         kwargs["event_filter"] = (FileCreatedEvent, FileModifiedEvent) | ||||
|     observer.schedule( | ||||
|         BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True, **kwargs | ||||
|     ) | ||||
|     observer.start() | ||||
|     _file_watcher_started = True | ||||
|  | ||||
|  | ||||
| class BlueprintEventHandler(FileSystemEventHandler): | ||||
| @ -94,7 +92,7 @@ class BlueprintEventHandler(FileSystemEventHandler): | ||||
|         LOGGER.debug("new blueprint file created, starting discovery") | ||||
|         for tenant in Tenant.objects.filter(ready=True): | ||||
|             with tenant: | ||||
|                 Schedule.dispatch_by_actor(blueprints_discovery) | ||||
|                 blueprints_discovery.delay() | ||||
|  | ||||
|     def on_modified(self, event: FileSystemEvent): | ||||
|         """Process file modification""" | ||||
| @ -105,14 +103,14 @@ class BlueprintEventHandler(FileSystemEventHandler): | ||||
|             with tenant: | ||||
|                 for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True): | ||||
|                     LOGGER.debug("modified blueprint file, starting apply", instance=instance) | ||||
|                     apply_blueprint.send_with_options(args=(instance.pk,), rel_obj=instance) | ||||
|                     apply_blueprint.delay(instance.pk.hex) | ||||
|  | ||||
|  | ||||
| @actor( | ||||
|     description=_("Find blueprints as `blueprints_find` does, but return a safe dict."), | ||||
| @CELERY_APP.task( | ||||
|     throws=(DatabaseError, ProgrammingError, InternalError), | ||||
| ) | ||||
| def blueprints_find_dict(): | ||||
|     """Find blueprints as `blueprints_find` does, but return a safe dict""" | ||||
|     blueprints = [] | ||||
|     for blueprint in blueprints_find(): | ||||
|         blueprints.append(sanitize_dict(asdict(blueprint))) | ||||
| @ -148,19 +146,21 @@ def blueprints_find() -> list[BlueprintFile]: | ||||
|     return blueprints | ||||
|  | ||||
|  | ||||
| @actor( | ||||
|     description=_("Find blueprints and check if they need to be created in the database."), | ||||
|     throws=(DatabaseError, ProgrammingError, InternalError), | ||||
| @CELERY_APP.task( | ||||
|     throws=(DatabaseError, ProgrammingError, InternalError), base=SystemTask, bind=True | ||||
| ) | ||||
| def blueprints_discovery(path: str | None = None): | ||||
|     self: Task = CurrentTask.get_task() | ||||
| @prefill_task | ||||
| def blueprints_discovery(self: SystemTask, path: str | None = None): | ||||
|     """Find blueprints and check if they need to be created in the database""" | ||||
|     count = 0 | ||||
|     for blueprint in blueprints_find(): | ||||
|         if path and blueprint.path != path: | ||||
|             continue | ||||
|         check_blueprint_v1_file(blueprint) | ||||
|         count += 1 | ||||
|     self.info(f"Successfully imported {count} files.") | ||||
|     self.set_status( | ||||
|         TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=count)) | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def check_blueprint_v1_file(blueprint: BlueprintFile): | ||||
| @ -187,26 +187,22 @@ def check_blueprint_v1_file(blueprint: BlueprintFile): | ||||
|         ) | ||||
|     if instance.last_applied_hash != blueprint.hash: | ||||
|         LOGGER.info("Applying blueprint due to changed file", instance=instance, path=instance.path) | ||||
|         apply_blueprint.send_with_options(args=(instance.pk,), rel_obj=instance) | ||||
|         apply_blueprint.delay(str(instance.pk)) | ||||
|  | ||||
|  | ||||
| @actor(description=_("Apply single blueprint.")) | ||||
| def apply_blueprint(instance_pk: UUID): | ||||
|     try: | ||||
|         self: Task = CurrentTask.get_task() | ||||
|     except CurrentTaskNotFound: | ||||
|         self = Task() | ||||
|     self.set_uid(str(instance_pk)) | ||||
| @CELERY_APP.task( | ||||
|     bind=True, | ||||
|     base=SystemTask, | ||||
| ) | ||||
| def apply_blueprint(self: SystemTask, instance_pk: str): | ||||
|     """Apply single blueprint""" | ||||
|     self.save_on_success = False | ||||
|     instance: BlueprintInstance | None = None | ||||
|     try: | ||||
|         instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first() | ||||
|         if not instance: | ||||
|             self.warning(f"Could not find blueprint {instance_pk}, skipping") | ||||
|         if not instance or not instance.enabled: | ||||
|             return | ||||
|         self.set_uid(slugify(instance.name)) | ||||
|         if not instance.enabled: | ||||
|             self.info(f"Blueprint {instance.name} is disabled, skipping") | ||||
|             return | ||||
|         blueprint_content = instance.retrieve() | ||||
|         file_hash = sha512(blueprint_content.encode()).hexdigest() | ||||
|         importer = Importer.from_string(blueprint_content, instance.context) | ||||
| @ -216,18 +212,19 @@ def apply_blueprint(instance_pk: UUID): | ||||
|         if not valid: | ||||
|             instance.status = BlueprintInstanceStatus.ERROR | ||||
|             instance.save() | ||||
|             self.logs(logs) | ||||
|             self.set_status(TaskStatus.ERROR, *logs) | ||||
|             return | ||||
|         with capture_logs() as logs: | ||||
|             applied = importer.apply() | ||||
|             if not applied: | ||||
|                 instance.status = BlueprintInstanceStatus.ERROR | ||||
|                 instance.save() | ||||
|                 self.logs(logs) | ||||
|                 self.set_status(TaskStatus.ERROR, *logs) | ||||
|                 return | ||||
|         instance.status = BlueprintInstanceStatus.SUCCESSFUL | ||||
|         instance.last_applied_hash = file_hash | ||||
|         instance.last_applied = now() | ||||
|         self.set_status(TaskStatus.SUCCESSFUL) | ||||
|     except ( | ||||
|         OSError, | ||||
|         DatabaseError, | ||||
| @ -238,14 +235,15 @@ def apply_blueprint(instance_pk: UUID): | ||||
|     ) as exc: | ||||
|         if instance: | ||||
|             instance.status = BlueprintInstanceStatus.ERROR | ||||
|         self.error(exc) | ||||
|         self.set_error(exc) | ||||
|     finally: | ||||
|         if instance: | ||||
|             instance.save() | ||||
|  | ||||
|  | ||||
| @actor(description=_("Remove blueprints which couldn't be fetched.")) | ||||
| @CELERY_APP.task() | ||||
| def clear_failed_blueprints(): | ||||
|     """Remove blueprints which couldn't be fetched""" | ||||
|     # Exclude OCI blueprints as those might be temporarily unavailable | ||||
|     for blueprint in BlueprintInstance.objects.exclude(path__startswith=OCI_PREFIX): | ||||
|         try: | ||||
|  | ||||
| @ -49,8 +49,6 @@ class BrandSerializer(ModelSerializer): | ||||
|             "branding_title", | ||||
|             "branding_logo", | ||||
|             "branding_favicon", | ||||
|             "branding_custom_css", | ||||
|             "branding_default_flow_background", | ||||
|             "flow_authentication", | ||||
|             "flow_invalidation", | ||||
|             "flow_recovery", | ||||
| @ -59,7 +57,6 @@ class BrandSerializer(ModelSerializer): | ||||
|             "flow_device_code", | ||||
|             "default_application", | ||||
|             "web_certificate", | ||||
|             "client_certificates", | ||||
|             "attributes", | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
| @ -89,7 +86,6 @@ class CurrentBrandSerializer(PassiveSerializer): | ||||
|     branding_title = CharField() | ||||
|     branding_logo = CharField(source="branding_logo_url") | ||||
|     branding_favicon = CharField(source="branding_favicon_url") | ||||
|     branding_custom_css = CharField() | ||||
|     ui_footer_links = ListField( | ||||
|         child=FooterLinkSerializer(), | ||||
|         read_only=True, | ||||
| @ -121,7 +117,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | ||||
|         "domain", | ||||
|         "branding_title", | ||||
|         "web_certificate__name", | ||||
|         "client_certificates__name", | ||||
|     ] | ||||
|     filterset_fields = [ | ||||
|         "brand_uuid", | ||||
| @ -130,7 +125,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | ||||
|         "branding_title", | ||||
|         "branding_logo", | ||||
|         "branding_favicon", | ||||
|         "branding_default_flow_background", | ||||
|         "flow_authentication", | ||||
|         "flow_invalidation", | ||||
|         "flow_recovery", | ||||
| @ -138,7 +132,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | ||||
|         "flow_user_settings", | ||||
|         "flow_device_code", | ||||
|         "web_certificate", | ||||
|         "client_certificates", | ||||
|     ] | ||||
|     ordering = ["domain"] | ||||
|  | ||||
|  | ||||
| @ -1,16 +1,14 @@ | ||||
| """authentik brands app""" | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class AuthentikBrandsConfig(ManagedAppConfig): | ||||
| class AuthentikBrandsConfig(AppConfig): | ||||
|     """authentik Brand app""" | ||||
|  | ||||
|     name = "authentik.brands" | ||||
|     label = "authentik_brands" | ||||
|     verbose_name = "authentik Brands" | ||||
|     default = True | ||||
|     mountpoints = { | ||||
|         "authentik.brands.urls_root": "", | ||||
|     } | ||||
|     default = True | ||||
|  | ||||
| @ -1,35 +0,0 @@ | ||||
| # Generated by Django 5.0.12 on 2025-02-22 01:51 | ||||
|  | ||||
| from pathlib import Path | ||||
| from django.db import migrations, models | ||||
| from django.apps.registry import Apps | ||||
|  | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
|  | ||||
|  | ||||
| def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     Brand = apps.get_model("authentik_brands", "brand") | ||||
|  | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
|     path = Path("/web/dist/custom.css") | ||||
|     if not path.exists(): | ||||
|         return | ||||
|     css = path.read_text() | ||||
|     Brand.objects.using(db_alias).all().update(branding_custom_css=css) | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_brands", "0007_brand_default_application"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="brand", | ||||
|             name="branding_custom_css", | ||||
|             field=models.TextField(blank=True, default=""), | ||||
|         ), | ||||
|         migrations.RunPython(migrate_custom_css), | ||||
|     ] | ||||
| @ -1,18 +0,0 @@ | ||||
| # Generated by Django 5.0.13 on 2025-03-19 22:54 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_brands", "0008_brand_branding_custom_css"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="brand", | ||||
|             name="branding_default_flow_background", | ||||
|             field=models.TextField(default="/static/dist/assets/images/flow_background.jpg"), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,37 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-19 15:09 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_brands", "0009_brand_branding_default_flow_background"), | ||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="brand", | ||||
|             name="client_certificates", | ||||
|             field=models.ManyToManyField( | ||||
|                 blank=True, | ||||
|                 default=None, | ||||
|                 help_text="Certificates used for client authentication.", | ||||
|                 to="authentik_crypto.certificatekeypair", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="brand", | ||||
|             name="web_certificate", | ||||
|             field=models.ForeignKey( | ||||
|                 default=None, | ||||
|                 help_text="Web Certificate used by the authentik Core webserver.", | ||||
|                 null=True, | ||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, | ||||
|                 related_name="+", | ||||
|                 to="authentik_crypto.certificatekeypair", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -33,10 +33,6 @@ class Brand(SerializerModel): | ||||
|  | ||||
|     branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg") | ||||
|     branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png") | ||||
|     branding_custom_css = models.TextField(default="", blank=True) | ||||
|     branding_default_flow_background = models.TextField( | ||||
|         default="/static/dist/assets/images/flow_background.jpg" | ||||
|     ) | ||||
|  | ||||
|     flow_authentication = models.ForeignKey( | ||||
|         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication" | ||||
| @ -73,13 +69,6 @@ class Brand(SerializerModel): | ||||
|         default=None, | ||||
|         on_delete=models.SET_DEFAULT, | ||||
|         help_text=_("Web Certificate used by the authentik Core webserver."), | ||||
|         related_name="+", | ||||
|     ) | ||||
|     client_certificates = models.ManyToManyField( | ||||
|         CertificateKeyPair, | ||||
|         default=None, | ||||
|         blank=True, | ||||
|         help_text=_("Certificates used for client authentication."), | ||||
|     ) | ||||
|     attributes = models.JSONField(default=dict, blank=True) | ||||
|  | ||||
| @ -95,12 +84,6 @@ class Brand(SerializerModel): | ||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon | ||||
|         return self.branding_favicon | ||||
|  | ||||
|     def branding_default_flow_background_url(self) -> str: | ||||
|         """Get branding_default_flow_background with the correct prefix""" | ||||
|         if self.branding_default_flow_background.startswith("/static"): | ||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_default_flow_background | ||||
|         return self.branding_default_flow_background | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> Serializer: | ||||
|         from authentik.brands.api import BrandSerializer | ||||
|  | ||||
| @ -24,7 +24,6 @@ class TestBrands(APITestCase): | ||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||
|                 "branding_title": "authentik", | ||||
|                 "branding_custom_css": "", | ||||
|                 "matched_domain": brand.domain, | ||||
|                 "ui_footer_links": [], | ||||
|                 "ui_theme": Themes.AUTOMATIC, | ||||
| @ -44,7 +43,6 @@ class TestBrands(APITestCase): | ||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||
|                 "branding_title": "custom", | ||||
|                 "branding_custom_css": "", | ||||
|                 "matched_domain": "bar.baz", | ||||
|                 "ui_footer_links": [], | ||||
|                 "ui_theme": Themes.AUTOMATIC, | ||||
| @ -61,7 +59,6 @@ class TestBrands(APITestCase): | ||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||
|                 "branding_title": "authentik", | ||||
|                 "branding_custom_css": "", | ||||
|                 "matched_domain": "fallback", | ||||
|                 "ui_footer_links": [], | ||||
|                 "ui_theme": Themes.AUTOMATIC, | ||||
| @ -124,38 +121,3 @@ class TestBrands(APITestCase): | ||||
|                 "subject": None, | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_branding_url(self): | ||||
|         """Test branding attributes return correct values""" | ||||
|         brand = create_test_brand() | ||||
|         brand.branding_default_flow_background = "https://goauthentik.io/img/icon.png" | ||||
|         brand.branding_favicon = "https://goauthentik.io/img/icon.png" | ||||
|         brand.branding_logo = "https://goauthentik.io/img/icon.png" | ||||
|         brand.save() | ||||
|         self.assertEqual( | ||||
|             brand.branding_default_flow_background_url(), "https://goauthentik.io/img/icon.png" | ||||
|         ) | ||||
|         self.assertJSONEqual( | ||||
|             self.client.get(reverse("authentik_api:brand-current")).content.decode(), | ||||
|             { | ||||
|                 "branding_logo": "https://goauthentik.io/img/icon.png", | ||||
|                 "branding_favicon": "https://goauthentik.io/img/icon.png", | ||||
|                 "branding_title": "authentik", | ||||
|                 "branding_custom_css": "", | ||||
|                 "matched_domain": brand.domain, | ||||
|                 "ui_footer_links": [], | ||||
|                 "ui_theme": Themes.AUTOMATIC, | ||||
|                 "default_locale": "", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_custom_css(self): | ||||
|         """Test custom_css""" | ||||
|         brand = create_test_brand() | ||||
|         brand.branding_custom_css = """* { | ||||
|             font-family: "Foo bar"; | ||||
|         }""" | ||||
|         brand.save() | ||||
|         res = self.client.get(reverse("authentik_core:if-user")) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         self.assertIn(brand.branding_custom_css, res.content.decode()) | ||||
|  | ||||
| @ -5,12 +5,10 @@ from typing import Any | ||||
| from django.db.models import F, Q | ||||
| from django.db.models import Value as V | ||||
| from django.http.request import HttpRequest | ||||
| from django.utils.html import _json_script_escapes | ||||
| from django.utils.safestring import mark_safe | ||||
| from sentry_sdk import get_current_span | ||||
|  | ||||
| from authentik import get_full_version | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.lib.sentry import get_http_meta | ||||
| from authentik.tenants.models import Tenant | ||||
|  | ||||
| _q_default = Q(default=True) | ||||
| @ -34,14 +32,13 @@ def context_processor(request: HttpRequest) -> dict[str, Any]: | ||||
|     """Context Processor that injects brand object into every template""" | ||||
|     brand = getattr(request, "brand", DEFAULT_BRAND) | ||||
|     tenant = getattr(request, "tenant", Tenant()) | ||||
|     # similarly to `json_script` we escape everything HTML-related, however django | ||||
|     # only directly exposes this as a function that also wraps it in a <script> tag | ||||
|     # which we dont want for CSS | ||||
|     brand_css = mark_safe(str(brand.branding_custom_css).translate(_json_script_escapes))  # nosec | ||||
|     trace = "" | ||||
|     span = get_current_span() | ||||
|     if span: | ||||
|         trace = span.to_traceparent() | ||||
|     return { | ||||
|         "brand": brand, | ||||
|         "brand_css": brand_css, | ||||
|         "footer_links": tenant.footer_links, | ||||
|         "html_meta": {**get_http_meta()}, | ||||
|         "sentry_trace": trace, | ||||
|         "version": get_full_version(), | ||||
|     } | ||||
|  | ||||
| @ -2,9 +2,11 @@ | ||||
|  | ||||
| from collections.abc import Iterator | ||||
| from copy import copy | ||||
| from datetime import timedelta | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.db.models import QuerySet | ||||
| from django.db.models.functions import ExtractHour | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | ||||
| @ -18,6 +20,7 @@ from rest_framework.response import Response | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.admin.api.metrics import CoordinateSerializer | ||||
| from authentik.api.pagination import Pagination | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.core.api.providers import ProviderSerializer | ||||
| @ -25,6 +28,7 @@ from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.core.models import Application, User | ||||
| from authentik.events.logs import LogEventSerializer, capture_logs | ||||
| from authentik.events.models import EventAction | ||||
| from authentik.lib.utils.file import ( | ||||
|     FilePathSerializer, | ||||
|     FileUploadSerializer, | ||||
| @ -42,7 +46,7 @@ LOGGER = get_logger() | ||||
|  | ||||
| def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str: | ||||
|     """Cache key where application list for user is saved""" | ||||
|     key = f"{CACHE_PREFIX}app_access/{user_pk}" | ||||
|     key = f"{CACHE_PREFIX}/app_access/{user_pk}" | ||||
|     if page_number: | ||||
|         key += f"/{page_number}" | ||||
|     return key | ||||
| @ -317,3 +321,18 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|         """Set application icon (as URL)""" | ||||
|         app: Application = self.get_object() | ||||
|         return set_file_url(request, app, "meta_icon") | ||||
|  | ||||
|     @permission_required("authentik_core.view_application", ["authentik_events.view_event"]) | ||||
|     @extend_schema(responses={200: CoordinateSerializer(many=True)}) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     def metrics(self, request: Request, slug: str): | ||||
|         """Metrics for application logins""" | ||||
|         app = self.get_object() | ||||
|         return Response( | ||||
|             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.AUTHORIZE_APPLICATION, | ||||
|                 context__authorized_application__pk=app.pk.hex, | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
| @ -5,7 +5,6 @@ from typing import TypedDict | ||||
| from rest_framework import mixins | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.serializers import CharField, DateTimeField, IPAddressField | ||||
| from rest_framework.viewsets import GenericViewSet | ||||
| from ua_parser import user_agent_parser | ||||
|  | ||||
| @ -55,11 +54,6 @@ class UserAgentDict(TypedDict): | ||||
| class AuthenticatedSessionSerializer(ModelSerializer): | ||||
|     """AuthenticatedSession Serializer""" | ||||
|  | ||||
|     expires = DateTimeField(source="session.expires", read_only=True) | ||||
|     last_ip = IPAddressField(source="session.last_ip", read_only=True) | ||||
|     last_user_agent = CharField(source="session.last_user_agent", read_only=True) | ||||
|     last_used = DateTimeField(source="session.last_used", read_only=True) | ||||
|  | ||||
|     current = SerializerMethodField() | ||||
|     user_agent = SerializerMethodField() | ||||
|     geo_ip = SerializerMethodField() | ||||
| @ -68,19 +62,19 @@ class AuthenticatedSessionSerializer(ModelSerializer): | ||||
|     def get_current(self, instance: AuthenticatedSession) -> bool: | ||||
|         """Check if session is currently active session""" | ||||
|         request: Request = self.context["request"] | ||||
|         return request._request.session.session_key == instance.session.session_key | ||||
|         return request._request.session.session_key == instance.session_key | ||||
|  | ||||
|     def get_user_agent(self, instance: AuthenticatedSession) -> UserAgentDict: | ||||
|         """Get parsed user agent""" | ||||
|         return user_agent_parser.Parse(instance.session.last_user_agent) | ||||
|         return user_agent_parser.Parse(instance.last_user_agent) | ||||
|  | ||||
|     def get_geo_ip(self, instance: AuthenticatedSession) -> GeoIPDict | None:  # pragma: no cover | ||||
|         """Get GeoIP Data""" | ||||
|         return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.session.last_ip) | ||||
|         return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.last_ip) | ||||
|  | ||||
|     def get_asn(self, instance: AuthenticatedSession) -> ASNDict | None:  # pragma: no cover | ||||
|         """Get ASN Data""" | ||||
|         return ASN_CONTEXT_PROCESSOR.asn_dict(instance.session.last_ip) | ||||
|         return ASN_CONTEXT_PROCESSOR.asn_dict(instance.last_ip) | ||||
|  | ||||
|     class Meta: | ||||
|         model = AuthenticatedSession | ||||
| @ -96,7 +90,6 @@ class AuthenticatedSessionSerializer(ModelSerializer): | ||||
|             "last_used", | ||||
|             "expires", | ||||
|         ] | ||||
|         extra_args = {"uuid": {"read_only": True}} | ||||
|  | ||||
|  | ||||
| class AuthenticatedSessionViewSet( | ||||
| @ -108,10 +101,9 @@ class AuthenticatedSessionViewSet( | ||||
| ): | ||||
|     """AuthenticatedSession Viewset""" | ||||
|  | ||||
|     lookup_field = "uuid" | ||||
|     queryset = AuthenticatedSession.objects.select_related("session").all() | ||||
|     queryset = AuthenticatedSession.objects.all() | ||||
|     serializer_class = AuthenticatedSessionSerializer | ||||
|     search_fields = ["user__username", "session__last_ip", "session__last_user_agent"] | ||||
|     filterset_fields = ["user__username", "session__last_ip", "session__last_user_agent"] | ||||
|     search_fields = ["user__username", "last_ip", "last_user_agent"] | ||||
|     filterset_fields = ["user__username", "last_ip", "last_user_agent"] | ||||
|     ordering = ["user__username"] | ||||
|     owner_field = "user" | ||||
|  | ||||
| @ -1,6 +1,8 @@ | ||||
| """Authenticator Devices API Views""" | ||||
|  | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import OpenApiParameter, extend_schema | ||||
| from guardian.shortcuts import get_objects_for_user | ||||
| from rest_framework.fields import ( | ||||
|     BooleanField, | ||||
| @ -13,7 +15,6 @@ from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.viewsets import ViewSet | ||||
|  | ||||
| from authentik.core.api.users import ParamUserSerializer | ||||
| from authentik.core.api.utils import MetaNameSerializer | ||||
| from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice | ||||
| from authentik.stages.authenticator import device_classes, devices_for_user | ||||
| @ -22,7 +23,7 @@ from authentik.stages.authenticator_webauthn.models import WebAuthnDevice | ||||
|  | ||||
|  | ||||
| class DeviceSerializer(MetaNameSerializer): | ||||
|     """Serializer for authenticator devices""" | ||||
|     """Serializer for Duo authenticator devices""" | ||||
|  | ||||
|     pk = CharField() | ||||
|     name = CharField() | ||||
| @ -32,27 +33,22 @@ class DeviceSerializer(MetaNameSerializer): | ||||
|     last_updated = DateTimeField(read_only=True) | ||||
|     last_used = DateTimeField(read_only=True, allow_null=True) | ||||
|     extra_description = SerializerMethodField() | ||||
|     external_id = SerializerMethodField() | ||||
|  | ||||
|     def get_type(self, instance: Device) -> str: | ||||
|         """Get type of device""" | ||||
|         return instance._meta.label | ||||
|  | ||||
|     def get_extra_description(self, instance: Device) -> str | None: | ||||
|     def get_extra_description(self, instance: Device) -> str: | ||||
|         """Get extra description""" | ||||
|         if isinstance(instance, WebAuthnDevice): | ||||
|             return instance.device_type.description if instance.device_type else None | ||||
|             return ( | ||||
|                 instance.device_type.description | ||||
|                 if instance.device_type | ||||
|                 else _("Extra description not available") | ||||
|             ) | ||||
|         if isinstance(instance, EndpointDevice): | ||||
|             return instance.data.get("deviceSignals", {}).get("deviceModel") | ||||
|         return None | ||||
|  | ||||
|     def get_external_id(self, instance: Device) -> str | None: | ||||
|         """Get external Device ID""" | ||||
|         if isinstance(instance, WebAuthnDevice): | ||||
|             return instance.device_type.aaguid if instance.device_type else None | ||||
|         if isinstance(instance, EndpointDevice): | ||||
|             return instance.data.get("deviceSignals", {}).get("deviceModel") | ||||
|         return None | ||||
|         return "" | ||||
|  | ||||
|  | ||||
| class DeviceViewSet(ViewSet): | ||||
| @ -61,6 +57,7 @@ class DeviceViewSet(ViewSet): | ||||
|     serializer_class = DeviceSerializer | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     @extend_schema(responses={200: DeviceSerializer(many=True)}) | ||||
|     def list(self, request: Request) -> Response: | ||||
|         """Get all devices for current user""" | ||||
|         devices = devices_for_user(request.user) | ||||
| @ -82,11 +79,18 @@ class AdminDeviceViewSet(ViewSet): | ||||
|             yield from device_set | ||||
|  | ||||
|     @extend_schema( | ||||
|         parameters=[ParamUserSerializer], | ||||
|         parameters=[ | ||||
|             OpenApiParameter( | ||||
|                 name="user", | ||||
|                 location=OpenApiParameter.QUERY, | ||||
|                 type=OpenApiTypes.INT, | ||||
|             ) | ||||
|         ], | ||||
|         responses={200: DeviceSerializer(many=True)}, | ||||
|     ) | ||||
|     def list(self, request: Request) -> Response: | ||||
|         """Get all devices for current user""" | ||||
|         args = ParamUserSerializer(data=request.query_params) | ||||
|         args.is_valid(raise_exception=True) | ||||
|         return Response(DeviceSerializer(self.get_devices(**args.validated_data), many=True).data) | ||||
|         kwargs = {} | ||||
|         if "user" in request.query_params: | ||||
|             kwargs = {"user": request.query_params["user"]} | ||||
|         return Response(DeviceSerializer(self.get_devices(**kwargs), many=True).data) | ||||
|  | ||||
| @ -99,17 +99,18 @@ class GroupSerializer(ModelSerializer): | ||||
|             if superuser | ||||
|             else "authentik_core.disable_group_superuser" | ||||
|         ) | ||||
|         if self.instance or superuser: | ||||
|             has_perm = user.has_perm(perm) or user.has_perm(perm, self.instance) | ||||
|             if not has_perm: | ||||
|                 raise ValidationError( | ||||
|                     _( | ||||
|                         ( | ||||
|                             "User does not have permission to set " | ||||
|                             "superuser status to {superuser_status}." | ||||
|                         ).format_map({"superuser_status": superuser}) | ||||
|                     ) | ||||
|         has_perm = user.has_perm(perm) | ||||
|         if self.instance and not has_perm: | ||||
|             has_perm = user.has_perm(perm, self.instance) | ||||
|         if not has_perm: | ||||
|             raise ValidationError( | ||||
|                 _( | ||||
|                     ( | ||||
|                         "User does not have permission to set " | ||||
|                         "superuser status to {superuser_status}." | ||||
|                     ).format_map({"superuser_status": superuser}) | ||||
|                 ) | ||||
|             ) | ||||
|         return superuser | ||||
|  | ||||
|     class Meta: | ||||
|  | ||||
| @ -179,13 +179,10 @@ class UserSourceConnectionSerializer(SourceSerializer): | ||||
|             "user", | ||||
|             "source", | ||||
|             "source_obj", | ||||
|             "identifier", | ||||
|             "created", | ||||
|             "last_updated", | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
|             "created": {"read_only": True}, | ||||
|             "last_updated": {"read_only": True}, | ||||
|         } | ||||
|  | ||||
|  | ||||
| @ -202,7 +199,7 @@ class UserSourceConnectionViewSet( | ||||
|     queryset = UserSourceConnection.objects.all() | ||||
|     serializer_class = UserSourceConnectionSerializer | ||||
|     filterset_fields = ["user", "source__slug"] | ||||
|     search_fields = ["user__username", "source__slug", "identifier"] | ||||
|     search_fields = ["source__slug"] | ||||
|     ordering = ["source__slug", "pk"] | ||||
|     owner_field = "user" | ||||
|  | ||||
| @ -221,11 +218,9 @@ class GroupSourceConnectionSerializer(SourceSerializer): | ||||
|             "source_obj", | ||||
|             "identifier", | ||||
|             "created", | ||||
|             "last_updated", | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
|             "created": {"read_only": True}, | ||||
|             "last_updated": {"read_only": True}, | ||||
|         } | ||||
|  | ||||
|  | ||||
| @ -242,5 +237,6 @@ class GroupSourceConnectionViewSet( | ||||
|     queryset = GroupSourceConnection.objects.all() | ||||
|     serializer_class = GroupSourceConnectionSerializer | ||||
|     filterset_fields = ["group", "source__slug"] | ||||
|     search_fields = ["group__name", "source__slug", "identifier"] | ||||
|     search_fields = ["source__slug"] | ||||
|     ordering = ["source__slug", "pk"] | ||||
|     owner_field = "user" | ||||
|  | ||||
| @ -6,6 +6,9 @@ from typing import Any | ||||
|  | ||||
| from django.contrib.auth import update_session_auth_hash | ||||
| from django.contrib.auth.models import Permission | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.core.cache import cache | ||||
| from django.db.models.functions import ExtractHour | ||||
| from django.db.transaction import atomic | ||||
| from django.db.utils import IntegrityError | ||||
| from django.urls import reverse_lazy | ||||
| @ -51,6 +54,7 @@ from rest_framework.validators import UniqueValidator | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.admin.api.metrics import CoordinateSerializer | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| @ -67,8 +71,8 @@ from authentik.core.middleware import ( | ||||
| from authentik.core.models import ( | ||||
|     USER_ATTRIBUTE_TOKEN_EXPIRING, | ||||
|     USER_PATH_SERVICE_ACCOUNT, | ||||
|     AuthenticatedSession, | ||||
|     Group, | ||||
|     Session, | ||||
|     Token, | ||||
|     TokenIntents, | ||||
|     User, | ||||
| @ -82,7 +86,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN | ||||
| from authentik.lib.avatars import get_avatar | ||||
| from authentik.rbac.decorators import permission_required | ||||
| from authentik.rbac.models import get_permission_choices | ||||
| from authentik.stages.email.flow import pickle_flow_token_for_email | ||||
| from authentik.stages.email.models import EmailStage | ||||
| from authentik.stages.email.tasks import send_mails | ||||
| from authentik.stages.email.utils import TemplateEmailMessage | ||||
| @ -90,12 +93,6 @@ from authentik.stages.email.utils import TemplateEmailMessage | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class ParamUserSerializer(PassiveSerializer): | ||||
|     """Partial serializer for query parameters to select a user""" | ||||
|  | ||||
|     user = PrimaryKeyRelatedField(queryset=User.objects.all().exclude_anonymous(), required=False) | ||||
|  | ||||
|  | ||||
| class UserGroupSerializer(ModelSerializer): | ||||
|     """Simplified Group Serializer for user's groups""" | ||||
|  | ||||
| @ -229,7 +226,6 @@ class UserSerializer(ModelSerializer): | ||||
|             "name", | ||||
|             "is_active", | ||||
|             "last_login", | ||||
|             "date_joined", | ||||
|             "is_superuser", | ||||
|             "groups", | ||||
|             "groups_obj", | ||||
| @ -244,7 +240,6 @@ class UserSerializer(ModelSerializer): | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
|             "name": {"allow_blank": True}, | ||||
|             "date_joined": {"read_only": True}, | ||||
|             "password_change_date": {"read_only": True}, | ||||
|         } | ||||
|  | ||||
| @ -321,6 +316,53 @@ class SessionUserSerializer(PassiveSerializer): | ||||
|     original = UserSelfSerializer(required=False) | ||||
|  | ||||
|  | ||||
| class UserMetricsSerializer(PassiveSerializer): | ||||
|     """User Metrics""" | ||||
|  | ||||
|     logins = SerializerMethodField() | ||||
|     logins_failed = SerializerMethodField() | ||||
|     authorizations = SerializerMethodField() | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_logins(self, _): | ||||
|         """Get successful logins per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         request = self.context["request"] | ||||
|         return ( | ||||
|             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.LOGIN, user__pk=user.pk | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_logins_failed(self, _): | ||||
|         """Get failed logins per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         request = self.context["request"] | ||||
|         return ( | ||||
|             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.LOGIN_FAILED, context__username=user.username | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|     @extend_schema_field(CoordinateSerializer(many=True)) | ||||
|     def get_authorizations(self, _): | ||||
|         """Get failed logins per 8 hours for the last 7 days""" | ||||
|         user = self.context["user"] | ||||
|         request = self.context["request"] | ||||
|         return ( | ||||
|             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||
|                 action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk | ||||
|             ) | ||||
|             # 3 data points per day, so 8 hour spans | ||||
|             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class UsersFilter(FilterSet): | ||||
|     """Filter for users""" | ||||
|  | ||||
| @ -331,7 +373,7 @@ class UsersFilter(FilterSet): | ||||
|         method="filter_attributes", | ||||
|     ) | ||||
|  | ||||
|     is_superuser = BooleanFilter(field_name="ak_groups", method="filter_is_superuser") | ||||
|     is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser") | ||||
|     uuid = UUIDFilter(field_name="uuid") | ||||
|  | ||||
|     path = CharFilter(field_name="path") | ||||
| @ -349,11 +391,6 @@ class UsersFilter(FilterSet): | ||||
|         queryset=Group.objects.all().order_by("name"), | ||||
|     ) | ||||
|  | ||||
|     def filter_is_superuser(self, queryset, name, value): | ||||
|         if value: | ||||
|             return queryset.filter(ak_groups__is_superuser=True).distinct() | ||||
|         return queryset.exclude(ak_groups__is_superuser=True).distinct() | ||||
|  | ||||
|     def filter_attributes(self, queryset, name, value): | ||||
|         """Filter attributes by query args""" | ||||
|         try: | ||||
| @ -392,23 +429,8 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|     queryset = User.objects.none() | ||||
|     ordering = ["username"] | ||||
|     serializer_class = UserSerializer | ||||
|     filterset_class = UsersFilter | ||||
|     search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"] | ||||
|  | ||||
|     def get_ql_fields(self): | ||||
|         from djangoql.schema import BoolField, StrField | ||||
|  | ||||
|         from authentik.enterprise.search.fields import ChoiceSearchField, JSONSearchField | ||||
|  | ||||
|         return [ | ||||
|             StrField(User, "username"), | ||||
|             StrField(User, "name"), | ||||
|             StrField(User, "email"), | ||||
|             StrField(User, "path"), | ||||
|             BoolField(User, "is_active", nullable=True), | ||||
|             ChoiceSearchField(User, "type"), | ||||
|             JSONSearchField(User, "attributes", suggest_nested=False), | ||||
|         ] | ||||
|     filterset_class = UsersFilter | ||||
|  | ||||
|     def get_queryset(self): | ||||
|         base_qs = User.objects.all().exclude_anonymous() | ||||
| @ -424,7 +446,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|     def list(self, request, *args, **kwargs): | ||||
|         return super().list(request, *args, **kwargs) | ||||
|  | ||||
|     def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: | ||||
|     def _create_recovery_link(self) -> tuple[str, Token]: | ||||
|         """Create a recovery link (when the current brand has a recovery flow set), | ||||
|         that can either be shown to an admin or sent to the user directly""" | ||||
|         brand: Brand = self.request._request.brand | ||||
| @ -446,16 +468,12 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|             raise ValidationError( | ||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||
|             ) from None | ||||
|         _plan = FlowToken.pickle(plan) | ||||
|         if for_email: | ||||
|             _plan = pickle_flow_token_for_email(plan) | ||||
|         token, __ = FlowToken.objects.update_or_create( | ||||
|             identifier=f"{user.uid}-password-reset", | ||||
|             defaults={ | ||||
|                 "user": user, | ||||
|                 "flow": flow, | ||||
|                 "_plan": _plan, | ||||
|                 "revoke_on_execution": not for_email, | ||||
|                 "_plan": FlowToken.pickle(plan), | ||||
|             }, | ||||
|         ) | ||||
|         querystring = urlencode({QS_KEY_TOKEN: token.key}) | ||||
| @ -579,6 +597,17 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|             update_session_auth_hash(self.request, user) | ||||
|         return Response(status=204) | ||||
|  | ||||
|     @permission_required("authentik_core.view_user", ["authentik_events.view_event"]) | ||||
|     @extend_schema(responses={200: UserMetricsSerializer(many=False)}) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     def metrics(self, request: Request, pk: int) -> Response: | ||||
|         """User metrics per 1h""" | ||||
|         user: User = self.get_object() | ||||
|         serializer = UserMetricsSerializer(instance={}) | ||||
|         serializer.context["user"] = user | ||||
|         serializer.context["request"] = request | ||||
|         return Response(serializer.data) | ||||
|  | ||||
|     @permission_required("authentik_core.reset_user_password") | ||||
|     @extend_schema( | ||||
|         responses={ | ||||
| @ -614,7 +643,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|         if for_user.email == "": | ||||
|             LOGGER.debug("User doesn't have an email address") | ||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||
|         link, token = self._create_recovery_link(for_email=True) | ||||
|         link, token = self._create_recovery_link() | ||||
|         # Lookup the email stage to assure the current user can access it | ||||
|         stages = get_objects_for_user( | ||||
|             request.user, "authentik_stages_email.view_emailstage" | ||||
| @ -738,6 +767,9 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|         response = super().partial_update(request, *args, **kwargs) | ||||
|         instance: User = self.get_object() | ||||
|         if not instance.is_active: | ||||
|             Session.objects.filter(authenticatedsession__user=instance).delete() | ||||
|             sessions = AuthenticatedSession.objects.filter(user=instance) | ||||
|             session_ids = sessions.values_list("session_key", flat=True) | ||||
|             cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids) | ||||
|             sessions.delete() | ||||
|             LOGGER.debug("Deleted user's sessions", user=instance.username) | ||||
|         return response | ||||
|  | ||||
| @ -2,7 +2,6 @@ | ||||
|  | ||||
| from typing import Any | ||||
|  | ||||
| from django.db import models | ||||
| from django.db.models import Model | ||||
| from drf_spectacular.extensions import OpenApiSerializerFieldExtension | ||||
| from drf_spectacular.plumbing import build_basic_type | ||||
| @ -21,8 +20,6 @@ from rest_framework.serializers import ( | ||||
|     raise_errors_on_nested_writes, | ||||
| ) | ||||
|  | ||||
| from authentik.rbac.permissions import assign_initial_permissions | ||||
|  | ||||
|  | ||||
| def is_dict(value: Any): | ||||
|     """Ensure a value is a dictionary, useful for JSONFields""" | ||||
| @ -31,36 +28,8 @@ def is_dict(value: Any): | ||||
|     raise ValidationError("Value must be a dictionary, and not have any duplicate keys.") | ||||
|  | ||||
|  | ||||
| class JSONDictField(JSONField): | ||||
|     """JSON Field which only allows dictionaries""" | ||||
|  | ||||
|     default_validators = [is_dict] | ||||
|  | ||||
|  | ||||
| class JSONExtension(OpenApiSerializerFieldExtension): | ||||
|     """Generate API Schema for JSON fields as""" | ||||
|  | ||||
|     target_class = "authentik.core.api.utils.JSONDictField" | ||||
|  | ||||
|     def map_serializer_field(self, auto_schema, direction): | ||||
|         return build_basic_type(OpenApiTypes.OBJECT) | ||||
|  | ||||
|  | ||||
| class ModelSerializer(BaseModelSerializer): | ||||
|  | ||||
|     # By default, JSON fields we have are used to store dictionaries | ||||
|     serializer_field_mapping = BaseModelSerializer.serializer_field_mapping.copy() | ||||
|     serializer_field_mapping[models.JSONField] = JSONDictField | ||||
|  | ||||
|     def create(self, validated_data): | ||||
|         instance = super().create(validated_data) | ||||
|  | ||||
|         request = self.context.get("request") | ||||
|         if request and hasattr(request, "user") and not request.user.is_anonymous: | ||||
|             assign_initial_permissions(request.user, instance) | ||||
|  | ||||
|         return instance | ||||
|  | ||||
|     def update(self, instance: Model, validated_data): | ||||
|         raise_errors_on_nested_writes("update", self, validated_data) | ||||
|         info = model_meta.get_field_info(instance) | ||||
| @ -92,6 +61,21 @@ class ModelSerializer(BaseModelSerializer): | ||||
|         return instance | ||||
|  | ||||
|  | ||||
| class JSONDictField(JSONField): | ||||
|     """JSON Field which only allows dictionaries""" | ||||
|  | ||||
|     default_validators = [is_dict] | ||||
|  | ||||
|  | ||||
| class JSONExtension(OpenApiSerializerFieldExtension): | ||||
|     """Generate API Schema for JSON fields as""" | ||||
|  | ||||
|     target_class = "authentik.core.api.utils.JSONDictField" | ||||
|  | ||||
|     def map_serializer_field(self, auto_schema, direction): | ||||
|         return build_basic_type(OpenApiTypes.OBJECT) | ||||
|  | ||||
|  | ||||
| class PassiveSerializer(Serializer): | ||||
|     """Base serializer class which doesn't implement create/update methods""" | ||||
|  | ||||
|  | ||||
| @ -1,7 +1,8 @@ | ||||
| """authentik core app config""" | ||||
|  | ||||
| from django.conf import settings | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| from authentik.tasks.schedules.lib import ScheduleSpec | ||||
|  | ||||
|  | ||||
| class AuthentikCoreConfig(ManagedAppConfig): | ||||
| @ -13,6 +14,14 @@ class AuthentikCoreConfig(ManagedAppConfig): | ||||
|     mountpoint = "" | ||||
|     default = True | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_global | ||||
|     def debug_worker_hook(self): | ||||
|         """Dispatch startup tasks inline when debugging""" | ||||
|         if settings.DEBUG: | ||||
|             from authentik.root.celery import worker_ready_hook | ||||
|  | ||||
|             worker_ready_hook() | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_tenant | ||||
|     def source_inbuilt(self): | ||||
|         """Reconcile inbuilt source""" | ||||
| @ -25,18 +34,3 @@ class AuthentikCoreConfig(ManagedAppConfig): | ||||
|             }, | ||||
|             managed=Source.MANAGED_INBUILT, | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def tenant_schedule_specs(self) -> list[ScheduleSpec]: | ||||
|         from authentik.core.tasks import clean_expired_models, clean_temporary_users | ||||
|  | ||||
|         return [ | ||||
|             ScheduleSpec( | ||||
|                 actor=clean_expired_models, | ||||
|                 crontab="2-59/5 * * * *", | ||||
|             ), | ||||
|             ScheduleSpec( | ||||
|                 actor=clean_temporary_users, | ||||
|                 crontab="9-59/5 * * * *", | ||||
|             ), | ||||
|         ] | ||||
|  | ||||
| @ -24,15 +24,6 @@ class InbuiltBackend(ModelBackend): | ||||
|         self.set_method("password", request) | ||||
|         return user | ||||
|  | ||||
|     async def aauthenticate( | ||||
|         self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any | ||||
|     ) -> User | None: | ||||
|         user = await super().aauthenticate(request, username=username, password=password, **kwargs) | ||||
|         if not user: | ||||
|             return None | ||||
|         self.set_method("password", request) | ||||
|         return user | ||||
|  | ||||
|     def set_method(self, method: str, request: HttpRequest | None, **kwargs): | ||||
|         """Set method data on current flow, if possbiel""" | ||||
|         if not request: | ||||
|  | ||||
							
								
								
									
										21
									
								
								authentik/core/management/commands/bootstrap_tasks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								authentik/core/management/commands/bootstrap_tasks.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,21 @@ | ||||
| """Run bootstrap tasks""" | ||||
|  | ||||
| from django.core.management.base import BaseCommand | ||||
| from django_tenants.utils import get_public_schema_name | ||||
|  | ||||
| from authentik.root.celery import _get_startup_tasks_all_tenants, _get_startup_tasks_default_tenant | ||||
| from authentik.tenants.models import Tenant | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     """Run bootstrap tasks to ensure certain objects are created""" | ||||
|  | ||||
|     def handle(self, **options): | ||||
|         for task in _get_startup_tasks_default_tenant(): | ||||
|             with Tenant.objects.get(schema_name=get_public_schema_name()): | ||||
|                 task() | ||||
|  | ||||
|         for task in _get_startup_tasks_all_tenants(): | ||||
|             for tenant in Tenant.objects.filter(ready=True): | ||||
|                 with tenant: | ||||
|                     task() | ||||
| @ -13,6 +13,7 @@ class Command(TenantCommand): | ||||
|         parser.add_argument("usernames", nargs="*", type=str) | ||||
|  | ||||
|     def handle_per_tenant(self, **options): | ||||
|         print(options) | ||||
|         new_type = UserTypes(options["type"]) | ||||
|         qs = ( | ||||
|             User.objects.exclude_anonymous() | ||||
|  | ||||
| @ -1,15 +0,0 @@ | ||||
| """Change user type""" | ||||
|  | ||||
| from importlib import import_module | ||||
|  | ||||
| from django.conf import settings | ||||
|  | ||||
| from authentik.tenants.management import TenantCommand | ||||
|  | ||||
|  | ||||
| class Command(TenantCommand): | ||||
|     """Delete all sessions""" | ||||
|  | ||||
|     def handle_per_tenant(self, **options): | ||||
|         engine = import_module(settings.SESSION_ENGINE) | ||||
|         engine.SessionStore.clear_expired() | ||||
| @ -2,7 +2,6 @@ | ||||
|  | ||||
| from django.apps import apps | ||||
| from django.contrib.auth.management import create_permissions | ||||
| from django.core.management import call_command | ||||
| from django.core.management.base import BaseCommand, no_translations | ||||
| from guardian.management import create_anonymous_user | ||||
|  | ||||
| @ -17,10 +16,6 @@ class Command(BaseCommand): | ||||
|         """Check permissions for all apps""" | ||||
|         for tenant in Tenant.objects.filter(ready=True): | ||||
|             with tenant: | ||||
|                 # See https://code.djangoproject.com/ticket/28417 | ||||
|                 # Remove potential lingering old permissions | ||||
|                 call_command("remove_stale_contenttypes", "--no-input") | ||||
|  | ||||
|                 for app in apps.get_app_configs(): | ||||
|                     self.stdout.write(f"Checking app {app.name} ({app.label})\n") | ||||
|                     create_permissions(app, verbosity=0) | ||||
|  | ||||
							
								
								
									
										47
									
								
								authentik/core/management/commands/worker.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								authentik/core/management/commands/worker.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,47 @@ | ||||
| """Run worker""" | ||||
|  | ||||
| from sys import exit as sysexit | ||||
| from tempfile import tempdir | ||||
|  | ||||
| from celery.apps.worker import Worker | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.db import close_old_connections | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.debug import start_debug_server | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     """Run worker""" | ||||
|  | ||||
|     def add_arguments(self, parser): | ||||
|         parser.add_argument( | ||||
|             "-b", | ||||
|             "--beat", | ||||
|             action="store_false", | ||||
|             help="When set, this worker will _not_ run Beat (scheduled) tasks", | ||||
|         ) | ||||
|  | ||||
|     def handle(self, **options): | ||||
|         LOGGER.debug("Celery options", **options) | ||||
|         close_old_connections() | ||||
|         start_debug_server() | ||||
|         worker: Worker = CELERY_APP.Worker( | ||||
|             no_color=False, | ||||
|             quiet=True, | ||||
|             optimization="fair", | ||||
|             autoscale=(CONFIG.get_int("worker.concurrency"), 1), | ||||
|             task_events=True, | ||||
|             beat=options.get("beat", True), | ||||
|             schedule_filename=f"{tempdir}/celerybeat-schedule", | ||||
|             queues=["authentik", "authentik_scheduled", "authentik_events"], | ||||
|         ) | ||||
|         for task in CELERY_APP.tasks: | ||||
|             LOGGER.debug("Registered task", task=task) | ||||
|  | ||||
|         worker.start() | ||||
|         sysexit(worker.exitcode) | ||||
| @ -2,14 +2,9 @@ | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from contextvars import ContextVar | ||||
| from functools import partial | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from django.contrib.auth.models import AnonymousUser | ||||
| from django.core.exceptions import ImproperlyConfigured | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.utils.deprecation import MiddlewareMixin | ||||
| from django.utils.functional import SimpleLazyObject | ||||
| from django.utils.translation import override | ||||
| from sentry_sdk.api import set_tag | ||||
| from structlog.contextvars import STRUCTLOG_KEY_PREFIX | ||||
| @ -25,40 +20,6 @@ CTX_HOST = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + "host", default=None) | ||||
| CTX_AUTH_VIA = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None) | ||||
|  | ||||
|  | ||||
| def get_user(request): | ||||
|     if not hasattr(request, "_cached_user"): | ||||
|         user = None | ||||
|         if (authenticated_session := request.session.get("authenticatedsession", None)) is not None: | ||||
|             user = authenticated_session.user | ||||
|         request._cached_user = user or AnonymousUser() | ||||
|     return request._cached_user | ||||
|  | ||||
|  | ||||
| async def aget_user(request): | ||||
|     if not hasattr(request, "_cached_user"): | ||||
|         user = None | ||||
|         if ( | ||||
|             authenticated_session := await request.session.aget("authenticatedsession", None) | ||||
|         ) is not None: | ||||
|             user = authenticated_session.user | ||||
|         request._cached_user = user or AnonymousUser() | ||||
|     return request._cached_user | ||||
|  | ||||
|  | ||||
| class AuthenticationMiddleware(MiddlewareMixin): | ||||
|     def process_request(self, request): | ||||
|         if not hasattr(request, "session"): | ||||
|             raise ImproperlyConfigured( | ||||
|                 "The Django authentication middleware requires session " | ||||
|                 "middleware to be installed. Edit your MIDDLEWARE setting to " | ||||
|                 "insert " | ||||
|                 "'authentik.root.middleware.SessionMiddleware' before " | ||||
|                 "'authentik.core.middleware.AuthenticationMiddleware'." | ||||
|             ) | ||||
|         request.user = SimpleLazyObject(lambda: get_user(request)) | ||||
|         request.auser = partial(aget_user, request) | ||||
|  | ||||
|  | ||||
| class ImpersonateMiddleware: | ||||
|     """Middleware to impersonate users""" | ||||
|  | ||||
|  | ||||
| @ -1,19 +0,0 @@ | ||||
| # Generated by Django 5.0.13 on 2025-04-07 14:04 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0043_alter_group_options"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="usersourceconnection", | ||||
|             name="new_identifier", | ||||
|             field=models.TextField(default=""), | ||||
|             preserve_default=False, | ||||
|         ), | ||||
|     ] | ||||
| @ -1,30 +0,0 @@ | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0044_usersourceconnection_new_identifier"), | ||||
|         ("authentik_sources_kerberos", "0003_migrate_userkerberossourceconnection_identifier"), | ||||
|         ("authentik_sources_oauth", "0009_migrate_useroauthsourceconnection_identifier"), | ||||
|         ("authentik_sources_plex", "0005_migrate_userplexsourceconnection_identifier"), | ||||
|         ("authentik_sources_saml", "0019_migrate_usersamlsourceconnection_identifier"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RenameField( | ||||
|             model_name="usersourceconnection", | ||||
|             old_name="new_identifier", | ||||
|             new_name="identifier", | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="usersourceconnection", | ||||
|             index=models.Index(fields=["identifier"], name="authentik_c_identif_59226f_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="usersourceconnection", | ||||
|             index=models.Index( | ||||
|                 fields=["source", "identifier"], name="authentik_c_source__649e04_idx" | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,242 +0,0 @@ | ||||
| # Generated by Django 5.0.11 on 2025-01-27 12:58 | ||||
|  | ||||
| import uuid | ||||
| import pickle  # nosec | ||||
| from django.core import signing | ||||
| from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.conf import settings | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.utils.timezone import now, timedelta | ||||
| from authentik.lib.migrations import progress_bar | ||||
| from authentik.root.middleware import ClientIPMiddleware | ||||
|  | ||||
|  | ||||
| SESSION_CACHE_ALIAS = "default" | ||||
|  | ||||
|  | ||||
| class PickleSerializer: | ||||
|     """ | ||||
|     Simple wrapper around pickle to be used in signing.dumps()/loads() and | ||||
|     cache backends. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, protocol=None): | ||||
|         self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol | ||||
|  | ||||
|     def dumps(self, obj): | ||||
|         """Pickle data to be stored in redis""" | ||||
|         return pickle.dumps(obj, self.protocol) | ||||
|  | ||||
|     def loads(self, data): | ||||
|         """Unpickle data to be loaded from redis""" | ||||
|         try: | ||||
|             return pickle.loads(data)  # nosec | ||||
|         except Exception: | ||||
|             return {} | ||||
|  | ||||
|  | ||||
| def _migrate_session( | ||||
|     apps, | ||||
|     db_alias, | ||||
|     session_key, | ||||
|     session_data, | ||||
|     expires, | ||||
| ): | ||||
|     Session = apps.get_model("authentik_core", "Session") | ||||
|     OldAuthenticatedSession = apps.get_model("authentik_core", "OldAuthenticatedSession") | ||||
|     AuthenticatedSession = apps.get_model("authentik_core", "AuthenticatedSession") | ||||
|  | ||||
|     old_auth_session = ( | ||||
|         OldAuthenticatedSession.objects.using(db_alias).filter(session_key=session_key).first() | ||||
|     ) | ||||
|  | ||||
|     args = { | ||||
|         "session_key": session_key, | ||||
|         "expires": expires, | ||||
|         "last_ip": ClientIPMiddleware.default_ip, | ||||
|         "last_user_agent": "", | ||||
|         "session_data": {}, | ||||
|     } | ||||
|     for k, v in session_data.items(): | ||||
|         if k == "authentik/stages/user_login/last_ip": | ||||
|             args["last_ip"] = v | ||||
|         elif k in ["last_user_agent", "last_used"]: | ||||
|             args[k] = v | ||||
|         elif args in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY]: | ||||
|             pass | ||||
|         else: | ||||
|             args["session_data"][k] = v | ||||
|     if old_auth_session: | ||||
|         args["last_user_agent"] = old_auth_session.last_user_agent | ||||
|         args["last_used"] = old_auth_session.last_used | ||||
|  | ||||
|     args["session_data"] = pickle.dumps(args["session_data"]) | ||||
|     session = Session.objects.using(db_alias).create(**args) | ||||
|  | ||||
|     if old_auth_session: | ||||
|         AuthenticatedSession.objects.using(db_alias).create( | ||||
|             session=session, | ||||
|             user=old_auth_session.user, | ||||
|             uuid=old_auth_session.uuid, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def migrate_redis_sessions(apps, schema_editor): | ||||
|     from django.core.cache import caches | ||||
|  | ||||
|     db_alias = schema_editor.connection.alias | ||||
|     cache = caches[SESSION_CACHE_ALIAS] | ||||
|  | ||||
|     # Not a redis cache, skipping | ||||
|     if not hasattr(cache, "keys"): | ||||
|         return | ||||
|  | ||||
|     print("\nMigrating Redis sessions to database, this might take a couple of minutes...") | ||||
|     for key, session_data in progress_bar(cache.get_many(cache.keys(f"{KEY_PREFIX}*")).items()): | ||||
|         _migrate_session( | ||||
|             apps=apps, | ||||
|             db_alias=db_alias, | ||||
|             session_key=key.removeprefix(KEY_PREFIX), | ||||
|             session_data=session_data, | ||||
|             expires=now() + timedelta(seconds=cache.ttl(key)), | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def migrate_database_sessions(apps, schema_editor): | ||||
|     DjangoSession = apps.get_model("sessions", "Session") | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
|     print("\nMigration database sessions, this might take a couple of minutes...") | ||||
|     for django_session in progress_bar(DjangoSession.objects.using(db_alias).all()): | ||||
|         session_data = signing.loads( | ||||
|             django_session.session_data, | ||||
|             salt="django.contrib.sessions.SessionStore", | ||||
|             serializer=PickleSerializer, | ||||
|         ) | ||||
|         _migrate_session( | ||||
|             apps=apps, | ||||
|             db_alias=db_alias, | ||||
|             session_key=django_session.session_key, | ||||
|             session_data=session_data, | ||||
|             expires=django_session.expire_date, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("sessions", "0001_initial"), | ||||
|         ("authentik_core", "0045_rename_new_identifier_usersourceconnection_identifier_and_more"), | ||||
|         ("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"), | ||||
|         ("authentik_providers_rac", "0006_connectiontoken_authentik_p_expires_91f148_idx_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         # Rename AuthenticatedSession to OldAuthenticatedSession | ||||
|         migrations.RenameModel( | ||||
|             old_name="AuthenticatedSession", | ||||
|             new_name="OldAuthenticatedSession", | ||||
|         ), | ||||
|         migrations.RenameIndex( | ||||
|             model_name="oldauthenticatedsession", | ||||
|             new_name="authentik_c_expires_cf4f72_idx", | ||||
|             old_name="authentik_c_expires_08251d_idx", | ||||
|         ), | ||||
|         migrations.RenameIndex( | ||||
|             model_name="oldauthenticatedsession", | ||||
|             new_name="authentik_c_expirin_c1f17f_idx", | ||||
|             old_name="authentik_c_expirin_9cd839_idx", | ||||
|         ), | ||||
|         migrations.RenameIndex( | ||||
|             model_name="oldauthenticatedsession", | ||||
|             new_name="authentik_c_expirin_e04f5d_idx", | ||||
|             old_name="authentik_c_expirin_195a84_idx", | ||||
|         ), | ||||
|         migrations.RenameIndex( | ||||
|             model_name="oldauthenticatedsession", | ||||
|             new_name="authentik_c_session_a44819_idx", | ||||
|             old_name="authentik_c_session_d0f005_idx", | ||||
|         ), | ||||
|         migrations.RunSQL( | ||||
|             sql="ALTER INDEX authentik_core_authenticatedsession_user_id_5055b6cf RENAME TO authentik_core_oldauthenticatedsession_user_id_5055b6cf", | ||||
|             reverse_sql="ALTER INDEX authentik_core_oldauthenticatedsession_user_id_5055b6cf RENAME TO authentik_core_authenticatedsession_user_id_5055b6cf", | ||||
|         ), | ||||
|         # Create new Session and AuthenticatedSession models | ||||
|         migrations.CreateModel( | ||||
|             name="Session", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "session_key", | ||||
|                     models.CharField( | ||||
|                         max_length=40, primary_key=True, serialize=False, verbose_name="session key" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("expires", models.DateTimeField(default=None, null=True)), | ||||
|                 ("expiring", models.BooleanField(default=True)), | ||||
|                 ("session_data", models.BinaryField(verbose_name="session data")), | ||||
|                 ("last_ip", models.GenericIPAddressField()), | ||||
|                 ("last_user_agent", models.TextField(blank=True)), | ||||
|                 ("last_used", models.DateTimeField(auto_now=True)), | ||||
|             ], | ||||
|             options={ | ||||
|                 "default_permissions": [], | ||||
|                 "verbose_name": "Session", | ||||
|                 "verbose_name_plural": "Sessions", | ||||
|             }, | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="session", | ||||
|             index=models.Index(fields=["expires"], name="authentik_c_expires_d2f607_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="session", | ||||
|             index=models.Index(fields=["expiring"], name="authentik_c_expirin_7c2cfb_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="session", | ||||
|             index=models.Index( | ||||
|                 fields=["expiring", "expires"], name="authentik_c_expirin_1ab2e4_idx" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="session", | ||||
|             index=models.Index( | ||||
|                 fields=["expires", "session_key"], name="authentik_c_expires_c49143_idx" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="AuthenticatedSession", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "session", | ||||
|                     models.OneToOneField( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         primary_key=True, | ||||
|                         serialize=False, | ||||
|                         to="authentik_core.session", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("uuid", models.UUIDField(default=uuid.uuid4, unique=True)), | ||||
|                 ( | ||||
|                     "user", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "Authenticated Session", | ||||
|                 "verbose_name_plural": "Authenticated Sessions", | ||||
|             }, | ||||
|         ), | ||||
|         migrations.RunPython( | ||||
|             code=migrate_redis_sessions, | ||||
|             reverse_code=migrations.RunPython.noop, | ||||
|         ), | ||||
|         migrations.RunPython( | ||||
|             code=migrate_database_sessions, | ||||
|             reverse_code=migrations.RunPython.noop, | ||||
|         ), | ||||
|     ] | ||||
| @ -1,18 +0,0 @@ | ||||
| # Generated by Django 5.0.11 on 2025-01-27 13:02 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0046_session_and_more"), | ||||
|         ("authentik_providers_rac", "0007_migrate_session"), | ||||
|         ("authentik_providers_oauth2", "0028_migrate_session"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.DeleteModel( | ||||
|             name="OldAuthenticatedSession", | ||||
|         ), | ||||
|     ] | ||||
| @ -1,103 +0,0 @@ | ||||
| # Generated by Django 5.1.9 on 2025-05-14 11:15 | ||||
|  | ||||
| from django.apps.registry import Apps, apps as global_apps | ||||
| from django.db import migrations | ||||
| from django.contrib.contenttypes.management import create_contenttypes | ||||
| from django.contrib.auth.management import create_permissions | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
|  | ||||
|  | ||||
| def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession""" | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
|     # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the | ||||
|     # real config for creating permissions and content types | ||||
|     authentik_core_config = global_apps.get_app_config("authentik_core") | ||||
|     # These are only ran by django after all migrations, but we need them right now. | ||||
|     # `global_apps` is needed, | ||||
|     create_permissions(authentik_core_config, using=db_alias, verbosity=1) | ||||
|     create_contenttypes(authentik_core_config, using=db_alias, verbosity=1) | ||||
|  | ||||
|     # But from now on, this is just a regular migration, so use `apps` | ||||
|     Permission = apps.get_model("auth", "Permission") | ||||
|     ContentType = apps.get_model("contenttypes", "ContentType") | ||||
|  | ||||
|     try: | ||||
|         old_ct = ContentType.objects.using(db_alias).get( | ||||
|             app_label="authentik_core", model="oldauthenticatedsession" | ||||
|         ) | ||||
|         new_ct = ContentType.objects.using(db_alias).get( | ||||
|             app_label="authentik_core", model="authenticatedsession" | ||||
|         ) | ||||
|     except ContentType.DoesNotExist: | ||||
|         # This should exist at this point, but if not, let's cut our losses | ||||
|         return | ||||
|  | ||||
|     # Get all permissions for the old content type | ||||
|     old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct) | ||||
|  | ||||
|     # Create equivalent permissions for the new content type | ||||
|     for old_perm in old_perms: | ||||
|         new_perm = ( | ||||
|             Permission.objects.using(db_alias) | ||||
|             .filter( | ||||
|                 content_type=new_ct, | ||||
|                 codename=old_perm.codename, | ||||
|             ) | ||||
|             .first() | ||||
|         ) | ||||
|         if not new_perm: | ||||
|             # This should exist at this point, but if not, let's cut our losses | ||||
|             continue | ||||
|  | ||||
|         # Global user permissions | ||||
|         User = apps.get_model("authentik_core", "User") | ||||
|         User.user_permissions.through.objects.using(db_alias).filter( | ||||
|             permission=old_perm | ||||
|         ).all().update(permission=new_perm) | ||||
|  | ||||
|         # Global role permissions | ||||
|         DjangoGroup = apps.get_model("auth", "Group") | ||||
|         DjangoGroup.permissions.through.objects.using(db_alias).filter( | ||||
|             permission=old_perm | ||||
|         ).all().update(permission=new_perm) | ||||
|  | ||||
|         # Object user permissions | ||||
|         UserObjectPermission = apps.get_model("guardian", "UserObjectPermission") | ||||
|         UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( | ||||
|             permission=new_perm, content_type=new_ct | ||||
|         ) | ||||
|  | ||||
|         # Object role permissions | ||||
|         GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission") | ||||
|         GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( | ||||
|             permission=new_perm, content_type=new_ct | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def remove_old_authenticated_session_content_type( | ||||
|     apps: Apps, schema_editor: BaseDatabaseSchemaEditor | ||||
| ): | ||||
|     db_alias = schema_editor.connection.alias | ||||
|     ContentType = apps.get_model("contenttypes", "ContentType") | ||||
|  | ||||
|     ContentType.objects.using(db_alias).filter(model="oldauthenticatedsession").delete() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0047_delete_oldauthenticatedsession"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython( | ||||
|             code=migrate_authenticated_session_permissions, | ||||
|             reverse_code=migrations.RunPython.noop, | ||||
|         ), | ||||
|         migrations.RunPython( | ||||
|             code=remove_old_authenticated_session_content_type, | ||||
|             reverse_code=migrations.RunPython.noop, | ||||
|         ), | ||||
|     ] | ||||
| @ -1,7 +1,6 @@ | ||||
| """authentik core models""" | ||||
|  | ||||
| from datetime import datetime | ||||
| from enum import StrEnum | ||||
| from hashlib import sha256 | ||||
| from typing import Any, Optional, Self | ||||
| from uuid import uuid4 | ||||
| @ -10,7 +9,6 @@ from deepmerge import always_merger | ||||
| from django.contrib.auth.hashers import check_password | ||||
| from django.contrib.auth.models import AbstractUser | ||||
| from django.contrib.auth.models import UserManager as DjangoUserManager | ||||
| from django.contrib.sessions.base_session import AbstractBaseSession | ||||
| from django.db import models | ||||
| from django.db.models import Q, QuerySet, options | ||||
| from django.db.models.constants import LOOKUP_SEP | ||||
| @ -18,7 +16,7 @@ from django.http import HttpRequest | ||||
| from django.utils.functional import SimpleLazyObject, cached_property | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from django_cte import CTE, with_cte | ||||
| from django_cte import CTEQuerySet, With | ||||
| from guardian.conf import settings | ||||
| from guardian.mixins import GuardianUserMixin | ||||
| from model_utils.managers import InheritanceManager | ||||
| @ -136,7 +134,7 @@ class AttributesMixin(models.Model): | ||||
|         return instance, False | ||||
|  | ||||
|  | ||||
| class GroupQuerySet(QuerySet): | ||||
| class GroupQuerySet(CTEQuerySet): | ||||
|     def with_children_recursive(self): | ||||
|         """Recursively get all groups that have the current queryset as parents | ||||
|         or are indirectly related.""" | ||||
| @ -165,9 +163,9 @@ class GroupQuerySet(QuerySet): | ||||
|             ) | ||||
|  | ||||
|         # Build the recursive query, see above | ||||
|         cte = CTE.recursive(make_cte) | ||||
|         cte = With.recursive(make_cte) | ||||
|         # Return the result, as a usable queryset for Group. | ||||
|         return with_cte(cte, select=cte.join(Group, group_uuid=cte.col.group_uuid)) | ||||
|         return cte.join(Group, group_uuid=cte.col.group_uuid).with_cte(cte) | ||||
|  | ||||
|  | ||||
| class Group(SerializerModel, AttributesMixin): | ||||
| @ -648,30 +646,19 @@ class SourceUserMatchingModes(models.TextChoices): | ||||
|     """Different modes a source can handle new/returning users""" | ||||
|  | ||||
|     IDENTIFIER = "identifier", _("Use the source-specific identifier") | ||||
|     EMAIL_LINK = ( | ||||
|         "email_link", | ||||
|         _( | ||||
|             "Link to a user with identical email address. Can have security implications " | ||||
|             "when a source doesn't validate email addresses." | ||||
|         ), | ||||
|     EMAIL_LINK = "email_link", _( | ||||
|         "Link to a user with identical email address. Can have security implications " | ||||
|         "when a source doesn't validate email addresses." | ||||
|     ) | ||||
|     EMAIL_DENY = ( | ||||
|         "email_deny", | ||||
|         _( | ||||
|             "Use the user's email address, but deny enrollment when the email address already " | ||||
|             "exists." | ||||
|         ), | ||||
|     EMAIL_DENY = "email_deny", _( | ||||
|         "Use the user's email address, but deny enrollment when the email address already exists." | ||||
|     ) | ||||
|     USERNAME_LINK = ( | ||||
|         "username_link", | ||||
|         _( | ||||
|             "Link to a user with identical username. Can have security implications " | ||||
|             "when a username is used with another source." | ||||
|         ), | ||||
|     USERNAME_LINK = "username_link", _( | ||||
|         "Link to a user with identical username. Can have security implications " | ||||
|         "when a username is used with another source." | ||||
|     ) | ||||
|     USERNAME_DENY = ( | ||||
|         "username_deny", | ||||
|         _("Use the user's username, but deny enrollment when the username already exists."), | ||||
|     USERNAME_DENY = "username_deny", _( | ||||
|         "Use the user's username, but deny enrollment when the username already exists." | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @ -679,16 +666,12 @@ class SourceGroupMatchingModes(models.TextChoices): | ||||
|     """Different modes a source can handle new/returning groups""" | ||||
|  | ||||
|     IDENTIFIER = "identifier", _("Use the source-specific identifier") | ||||
|     NAME_LINK = ( | ||||
|         "name_link", | ||||
|         _( | ||||
|             "Link to a group with identical name. Can have security implications " | ||||
|             "when a group name is used with another source." | ||||
|         ), | ||||
|     NAME_LINK = "name_link", _( | ||||
|         "Link to a group with identical name. Can have security implications " | ||||
|         "when a group name is used with another source." | ||||
|     ) | ||||
|     NAME_DENY = ( | ||||
|         "name_deny", | ||||
|         _("Use the group name, but deny enrollment when the name already exists."), | ||||
|     NAME_DENY = "name_deny", _( | ||||
|         "Use the group name, but deny enrollment when the name already exists." | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @ -747,7 +730,8 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|         choices=SourceGroupMatchingModes.choices, | ||||
|         default=SourceGroupMatchingModes.IDENTIFIER, | ||||
|         help_text=_( | ||||
|             "How the source determines if an existing group should be used or a new group created." | ||||
|             "How the source determines if an existing group should be used or " | ||||
|             "a new group created." | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
| @ -777,17 +761,11 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         """Return component used to edit this object""" | ||||
|         if self.managed == self.MANAGED_INBUILT: | ||||
|             return "" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     @property | ||||
|     def property_mapping_type(self) -> "type[PropertyMapping]": | ||||
|         """Return property mapping type used by this object""" | ||||
|         if self.managed == self.MANAGED_INBUILT: | ||||
|             from authentik.core.models import PropertyMapping | ||||
|  | ||||
|             return PropertyMapping | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def ui_login_button(self, request: HttpRequest) -> UILoginButton | None: | ||||
| @ -802,14 +780,10 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|  | ||||
|     def get_base_user_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: | ||||
|         """Get base properties for a user to build final properties upon.""" | ||||
|         if self.managed == self.MANAGED_INBUILT: | ||||
|             return {} | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def get_base_group_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: | ||||
|         """Get base properties for a group to build final properties upon.""" | ||||
|         if self.managed == self.MANAGED_INBUILT: | ||||
|             return {} | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def __str__(self): | ||||
| @ -840,7 +814,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||
|  | ||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||
|     source = models.ForeignKey(Source, on_delete=models.CASCADE) | ||||
|     identifier = models.TextField() | ||||
|  | ||||
|     objects = InheritanceManager() | ||||
|  | ||||
| @ -854,10 +827,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||
|  | ||||
|     class Meta: | ||||
|         unique_together = (("user", "source"),) | ||||
|         indexes = ( | ||||
|             models.Index(fields=("identifier",)), | ||||
|             models.Index(fields=("source", "identifier")), | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class GroupSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||
| @ -1028,81 +997,45 @@ class PropertyMapping(SerializerModel, ManagedModel): | ||||
|         verbose_name_plural = _("Property Mappings") | ||||
|  | ||||
|  | ||||
| class Session(ExpiringModel, AbstractBaseSession): | ||||
|     """User session with extra fields for fast access""" | ||||
| class AuthenticatedSession(ExpiringModel): | ||||
|     """Additional session class for authenticated users. Augments the standard django session | ||||
|     to achieve the following: | ||||
|         - Make it queryable by user | ||||
|         - Have a direct connection to user objects | ||||
|         - Allow users to view their own sessions and terminate them | ||||
|         - Save structured and well-defined information. | ||||
|     """ | ||||
|  | ||||
|     # Remove upstream field because we're using our own ExpiringModel | ||||
|     expire_date = None | ||||
|     session_data = models.BinaryField(_("session data")) | ||||
|     uuid = models.UUIDField(default=uuid4, primary_key=True) | ||||
|  | ||||
|     # Keep in sync with Session.Keys | ||||
|     last_ip = models.GenericIPAddressField() | ||||
|     session_key = models.CharField(max_length=40) | ||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||
|  | ||||
|     last_ip = models.TextField() | ||||
|     last_user_agent = models.TextField(blank=True) | ||||
|     last_used = models.DateTimeField(auto_now=True) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Session") | ||||
|         verbose_name_plural = _("Sessions") | ||||
|         indexes = ExpiringModel.Meta.indexes + [ | ||||
|             models.Index(fields=["expires", "session_key"]), | ||||
|         ] | ||||
|         default_permissions = [] | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.session_key | ||||
|  | ||||
|     class Keys(StrEnum): | ||||
|         """ | ||||
|         Keys to be set with the session interface for the fields above to be updated. | ||||
|  | ||||
|         If a field is added here that needs to be initialized when the session is initialized, | ||||
|         it must also be reflected in authentik.root.middleware.SessionMiddleware.process_request | ||||
|         and in authentik.core.sessions.SessionStore.__init__ | ||||
|         """ | ||||
|  | ||||
|         LAST_IP = "last_ip" | ||||
|         LAST_USER_AGENT = "last_user_agent" | ||||
|         LAST_USED = "last_used" | ||||
|  | ||||
|     @classmethod | ||||
|     def get_session_store_class(cls): | ||||
|         from authentik.core.sessions import SessionStore | ||||
|  | ||||
|         return SessionStore | ||||
|  | ||||
|     def get_decoded(self): | ||||
|         raise NotImplementedError | ||||
|  | ||||
|  | ||||
| class AuthenticatedSession(SerializerModel): | ||||
|     session = models.OneToOneField(Session, on_delete=models.CASCADE, primary_key=True) | ||||
|     # We use the session as primary key, but we need the API to be able to reference | ||||
|     # this object uniquely without exposing the session key | ||||
|     uuid = models.UUIDField(default=uuid4, unique=True) | ||||
|  | ||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.core.api.authenticated_sessions import AuthenticatedSessionSerializer | ||||
|  | ||||
|         return AuthenticatedSessionSerializer | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Authenticated Session") | ||||
|         verbose_name_plural = _("Authenticated Sessions") | ||||
|         indexes = ExpiringModel.Meta.indexes + [ | ||||
|             models.Index(fields=["session_key"]), | ||||
|         ] | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"Authenticated Session {str(self.pk)[:10]}" | ||||
|         return f"Authenticated Session {self.session_key[:10]}" | ||||
|  | ||||
|     @staticmethod | ||||
|     def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]: | ||||
|         """Create a new session from a http request""" | ||||
|         if not hasattr(request, "session") or not request.session.exists( | ||||
|             request.session.session_key | ||||
|         ): | ||||
|         from authentik.root.middleware import ClientIPMiddleware | ||||
|  | ||||
|         if not hasattr(request, "session") or not request.session.session_key: | ||||
|             return None | ||||
|         return AuthenticatedSession( | ||||
|             session=Session.objects.filter(session_key=request.session.session_key).first(), | ||||
|             session_key=request.session.session_key, | ||||
|             user=user, | ||||
|             last_ip=ClientIPMiddleware.get_client_ip(request), | ||||
|             last_user_agent=request.META.get("HTTP_USER_AGENT", ""), | ||||
|             expires=request.session.get_expiry_date(), | ||||
|         ) | ||||
|  | ||||
| @ -1,168 +0,0 @@ | ||||
| """authentik sessions engine""" | ||||
|  | ||||
| import pickle  # nosec | ||||
|  | ||||
| from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY | ||||
| from django.contrib.sessions.backends.db import SessionStore as SessionBase | ||||
| from django.core.exceptions import SuspiciousOperation | ||||
| from django.utils import timezone | ||||
| from django.utils.functional import cached_property | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.root.middleware import ClientIPMiddleware | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class SessionStore(SessionBase): | ||||
|     def __init__(self, session_key=None, last_ip=None, last_user_agent=""): | ||||
|         super().__init__(session_key) | ||||
|         self._create_kwargs = { | ||||
|             "last_ip": last_ip or ClientIPMiddleware.default_ip, | ||||
|             "last_user_agent": last_user_agent, | ||||
|         } | ||||
|  | ||||
|     @classmethod | ||||
|     def get_model_class(cls): | ||||
|         from authentik.core.models import Session | ||||
|  | ||||
|         return Session | ||||
|  | ||||
|     @cached_property | ||||
|     def model_fields(self): | ||||
|         return [k.value for k in self.model.Keys] | ||||
|  | ||||
|     def _get_session_from_db(self): | ||||
|         try: | ||||
|             return ( | ||||
|                 self.model.objects.select_related( | ||||
|                     "authenticatedsession", | ||||
|                     "authenticatedsession__user", | ||||
|                 ) | ||||
|                 .prefetch_related( | ||||
|                     "authenticatedsession__user__groups", | ||||
|                     "authenticatedsession__user__user_permissions", | ||||
|                 ) | ||||
|                 .get( | ||||
|                     session_key=self.session_key, | ||||
|                     expires__gt=timezone.now(), | ||||
|                 ) | ||||
|             ) | ||||
|         except (self.model.DoesNotExist, SuspiciousOperation) as exc: | ||||
|             if isinstance(exc, SuspiciousOperation): | ||||
|                 LOGGER.warning(str(exc)) | ||||
|             self._session_key = None | ||||
|  | ||||
|     async def _aget_session_from_db(self): | ||||
|         try: | ||||
|             return ( | ||||
|                 await self.model.objects.select_related( | ||||
|                     "authenticatedsession", | ||||
|                     "authenticatedsession__user", | ||||
|                 ) | ||||
|                 .prefetch_related( | ||||
|                     "authenticatedsession__user__groups", | ||||
|                     "authenticatedsession__user__user_permissions", | ||||
|                 ) | ||||
|                 .aget( | ||||
|                     session_key=self.session_key, | ||||
|                     expires__gt=timezone.now(), | ||||
|                 ) | ||||
|             ) | ||||
|         except (self.model.DoesNotExist, SuspiciousOperation) as exc: | ||||
|             if isinstance(exc, SuspiciousOperation): | ||||
|                 LOGGER.warning(str(exc)) | ||||
|             self._session_key = None | ||||
|  | ||||
|     def encode(self, session_dict): | ||||
|         return pickle.dumps(session_dict, protocol=pickle.HIGHEST_PROTOCOL) | ||||
|  | ||||
|     def decode(self, session_data): | ||||
|         try: | ||||
|             return pickle.loads(session_data)  # nosec | ||||
|         except pickle.PickleError: | ||||
|             # ValueError, unpickling exceptions. If any of these happen, just return an empty | ||||
|             # dictionary (an empty session) | ||||
|             pass | ||||
|         return {} | ||||
|  | ||||
|     def load(self): | ||||
|         s = self._get_session_from_db() | ||||
|         if s: | ||||
|             return { | ||||
|                 "authenticatedsession": getattr(s, "authenticatedsession", None), | ||||
|                 **{k: getattr(s, k) for k in self.model_fields}, | ||||
|                 **self.decode(s.session_data), | ||||
|             } | ||||
|         else: | ||||
|             return {} | ||||
|  | ||||
|     async def aload(self): | ||||
|         s = await self._aget_session_from_db() | ||||
|         if s: | ||||
|             return { | ||||
|                 "authenticatedsession": getattr(s, "authenticatedsession", None), | ||||
|                 **{k: getattr(s, k) for k in self.model_fields}, | ||||
|                 **self.decode(s.session_data), | ||||
|             } | ||||
|         else: | ||||
|             return {} | ||||
|  | ||||
|     def create_model_instance(self, data): | ||||
|         args = { | ||||
|             "session_key": self._get_or_create_session_key(), | ||||
|             "expires": self.get_expiry_date(), | ||||
|             "session_data": {}, | ||||
|             **self._create_kwargs, | ||||
|         } | ||||
|         for k, v in data.items(): | ||||
|             # Don't save: | ||||
|             # - unused auth data | ||||
|             # - related models | ||||
|             if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]: | ||||
|                 pass | ||||
|             elif k in self.model_fields: | ||||
|                 args[k] = v | ||||
|             else: | ||||
|                 args["session_data"][k] = v | ||||
|         args["session_data"] = self.encode(args["session_data"]) | ||||
|         return self.model(**args) | ||||
|  | ||||
|     async def acreate_model_instance(self, data): | ||||
|         args = { | ||||
|             "session_key": await self._aget_or_create_session_key(), | ||||
|             "expires": await self.aget_expiry_date(), | ||||
|             "session_data": {}, | ||||
|             **self._create_kwargs, | ||||
|         } | ||||
|         for k, v in data.items(): | ||||
|             # Don't save: | ||||
|             # - unused auth data | ||||
|             # - related models | ||||
|             if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]: | ||||
|                 pass | ||||
|             elif k in self.model_fields: | ||||
|                 args[k] = v | ||||
|             else: | ||||
|                 args["session_data"][k] = v | ||||
|         args["session_data"] = self.encode(args["session_data"]) | ||||
|         return self.model(**args) | ||||
|  | ||||
|     @classmethod | ||||
|     def clear_expired(cls): | ||||
|         cls.get_model_class().objects.filter(expires__lt=timezone.now()).delete() | ||||
|  | ||||
|     @classmethod | ||||
|     async def aclear_expired(cls): | ||||
|         await cls.get_model_class().objects.filter(expires__lt=timezone.now()).adelete() | ||||
|  | ||||
|     def cycle_key(self): | ||||
|         data = self._session | ||||
|         key = self.session_key | ||||
|         self.create() | ||||
|         self._session_cache = data | ||||
|         if key: | ||||
|             self.delete(key) | ||||
|         if (authenticated_session := data.get("authenticatedsession")) is not None: | ||||
|             authenticated_session.session_id = self.session_key | ||||
|             authenticated_session.save(force_insert=True) | ||||
| @ -1,10 +1,11 @@ | ||||
| """authentik core signals""" | ||||
|  | ||||
| from django.contrib.auth.signals import user_logged_in | ||||
| from django.contrib.auth.signals import user_logged_in, user_logged_out | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.core.cache import cache | ||||
| from django.core.signals import Signal | ||||
| from django.db.models import Model | ||||
| from django.db.models.signals import post_delete, post_save, pre_save | ||||
| from django.db.models.signals import post_save, pre_delete, pre_save | ||||
| from django.dispatch import receiver | ||||
| from django.http.request import HttpRequest | ||||
| from structlog.stdlib import get_logger | ||||
| @ -14,7 +15,6 @@ from authentik.core.models import ( | ||||
|     AuthenticatedSession, | ||||
|     BackchannelProvider, | ||||
|     ExpiringModel, | ||||
|     Session, | ||||
|     User, | ||||
|     default_token_duration, | ||||
| ) | ||||
| @ -49,10 +49,19 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_): | ||||
|         session.save() | ||||
|  | ||||
|  | ||||
| @receiver(post_delete, sender=AuthenticatedSession) | ||||
| @receiver(user_logged_out) | ||||
| def user_logged_out_session(sender, request: HttpRequest, user: User, **_): | ||||
|     """Delete AuthenticatedSession if it exists""" | ||||
|     if not request.session or not request.session.session_key: | ||||
|         return | ||||
|     AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete() | ||||
|  | ||||
|  | ||||
| @receiver(pre_delete, sender=AuthenticatedSession) | ||||
| def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_): | ||||
|     """Delete session when authenticated session is deleted""" | ||||
|     Session.objects.filter(session_key=instance.pk).delete() | ||||
|     cache_key = f"{KEY_PREFIX}{instance.session_key}" | ||||
|     cache.delete(cache_key) | ||||
|  | ||||
|  | ||||
| @receiver(pre_save) | ||||
|  | ||||
| @ -48,7 +48,6 @@ LOGGER = get_logger() | ||||
|  | ||||
| PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | ||||
| SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages" | ||||
| SESSION_KEY_SOURCE_FLOW_CONTEXT = "authentik/flows/source_flow_context" | ||||
| SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | ||||
|  | ||||
|  | ||||
| @ -262,7 +261,6 @@ class SourceFlowManager: | ||||
|                 plan.append_stage(stage) | ||||
|         for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []): | ||||
|             plan.append_stage(stage) | ||||
|         plan.context.update(self.request.session.get(SESSION_KEY_SOURCE_FLOW_CONTEXT, {})) | ||||
|         return plan.to_redirect(self.request, flow) | ||||
|  | ||||
|     def handle_auth( | ||||
|  | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	