Compare commits
	
		
			1 Commits
		
	
	
		
			celery-2-d
			...
			permission
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| ff787a0f59 | 
| @ -1,5 +1,5 @@ | |||||||
| [bumpversion] | [bumpversion] | ||||||
| current_version = 2025.6.3 | current_version = 2025.2.1 | ||||||
| tag = True | tag = True | ||||||
| commit = True | commit = True | ||||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||||
| @ -17,12 +17,8 @@ optional_value = final | |||||||
|  |  | ||||||
| [bumpversion:file:pyproject.toml] | [bumpversion:file:pyproject.toml] | ||||||
|  |  | ||||||
| [bumpversion:file:uv.lock] |  | ||||||
|  |  | ||||||
| [bumpversion:file:package.json] | [bumpversion:file:package.json] | ||||||
|  |  | ||||||
| [bumpversion:file:package-lock.json] |  | ||||||
|  |  | ||||||
| [bumpversion:file:docker-compose.yml] | [bumpversion:file:docker-compose.yml] | ||||||
|  |  | ||||||
| [bumpversion:file:schema.yml] | [bumpversion:file:schema.yml] | ||||||
| @ -33,4 +29,6 @@ optional_value = final | |||||||
|  |  | ||||||
| [bumpversion:file:internal/constants/constants.go] | [bumpversion:file:internal/constants/constants.go] | ||||||
|  |  | ||||||
|  | [bumpversion:file:web/src/common/constants.ts] | ||||||
|  |  | ||||||
| [bumpversion:file:lifecycle/aws/template.yaml] | [bumpversion:file:lifecycle/aws/template.yaml] | ||||||
|  | |||||||
| @ -5,10 +5,8 @@ dist/** | |||||||
| build/** | build/** | ||||||
| build_docs/** | build_docs/** | ||||||
| *Dockerfile | *Dockerfile | ||||||
| **/*Dockerfile |  | ||||||
| blueprints/local | blueprints/local | ||||||
| .git | .git | ||||||
| !gen-ts-api/node_modules | !gen-ts-api/node_modules | ||||||
| !gen-ts-api/dist/** | !gen-ts-api/dist/** | ||||||
| !gen-go-api/ | !gen-go-api/ | ||||||
| .venv |  | ||||||
|  | |||||||
| @ -7,9 +7,6 @@ charset = utf-8 | |||||||
| trim_trailing_whitespace = true | trim_trailing_whitespace = true | ||||||
| insert_final_newline = true | insert_final_newline = true | ||||||
|  |  | ||||||
| [*.toml] |  | ||||||
| indent_size = 2 |  | ||||||
|  |  | ||||||
| [*.html] | [*.html] | ||||||
| indent_size = 2 | indent_size = 2 | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										22
									
								
								.github/ISSUE_TEMPLATE/docs_issue.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								.github/ISSUE_TEMPLATE/docs_issue.md
									
									
									
									
										vendored
									
									
								
							| @ -1,22 +0,0 @@ | |||||||
| --- |  | ||||||
| name: Documentation issue |  | ||||||
| about: Suggest an improvement or report a problem |  | ||||||
| title: "" |  | ||||||
| labels: documentation |  | ||||||
| assignees: "" |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| **Do you see an area that can be clarified or expanded, a technical inaccuracy, or a broken link? Please describe.** |  | ||||||
| A clear and concise description of what the problem is, or where the document can be improved. Ex. I believe we need more details about [...] |  | ||||||
|  |  | ||||||
| **Provide the URL or link to the exact page in the documentation to which you are referring.** |  | ||||||
| If there are multiple pages, list them all, and be sure to state the header or section where the content is. |  | ||||||
|  |  | ||||||
| **Describe the solution you'd like** |  | ||||||
| A clear and concise description of what you want to happen. |  | ||||||
|  |  | ||||||
| **Additional context** |  | ||||||
| Add any other context or screenshots about the documentation issue here. |  | ||||||
|  |  | ||||||
| **Consider opening a PR!** |  | ||||||
| If the issue is one that you can fix, or even make a good pass at, we'd appreciate a PR. For more information about making a contribution to the docs, and using our Style Guide and our templates, refer to ["Writing documentation"](https://docs.goauthentik.io/docs/developer-docs/docs/writing-documentation). |  | ||||||
| @ -44,6 +44,7 @@ if is_release: | |||||||
|         ] |         ] | ||||||
|         if not prerelease: |         if not prerelease: | ||||||
|             image_tags += [ |             image_tags += [ | ||||||
|  |                 f"{name}:latest", | ||||||
|                 f"{name}:{version_family}", |                 f"{name}:{version_family}", | ||||||
|             ] |             ] | ||||||
| else: | else: | ||||||
|  | |||||||
							
								
								
									
										18
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										18
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -9,22 +9,17 @@ inputs: | |||||||
| runs: | runs: | ||||||
|   using: "composite" |   using: "composite" | ||||||
|   steps: |   steps: | ||||||
|     - name: Install apt deps |     - name: Install poetry & deps | ||||||
|       shell: bash |       shell: bash | ||||||
|       run: | |       run: | | ||||||
|  |         pipx install poetry || true | ||||||
|         sudo apt-get update |         sudo apt-get update | ||||||
|         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server |         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server | ||||||
|     - name: Install uv |     - name: Setup python and restore poetry | ||||||
|       uses: astral-sh/setup-uv@v5 |  | ||||||
|       with: |  | ||||||
|         enable-cache: true |  | ||||||
|     - name: Setup python |  | ||||||
|       uses: actions/setup-python@v5 |       uses: actions/setup-python@v5 | ||||||
|       with: |       with: | ||||||
|         python-version-file: "pyproject.toml" |         python-version-file: "pyproject.toml" | ||||||
|     - name: Install Python deps |         cache: "poetry" | ||||||
|       shell: bash |  | ||||||
|       run: uv sync --all-extras --dev --frozen |  | ||||||
|     - name: Setup node |     - name: Setup node | ||||||
|       uses: actions/setup-node@v4 |       uses: actions/setup-node@v4 | ||||||
|       with: |       with: | ||||||
| @ -36,7 +31,7 @@ runs: | |||||||
|       with: |       with: | ||||||
|         go-version-file: "go.mod" |         go-version-file: "go.mod" | ||||||
|     - name: Setup docker cache |     - name: Setup docker cache | ||||||
|       uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 |       uses: ScribeMD/docker-cache@0.5.0 | ||||||
|       with: |       with: | ||||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} |         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} | ||||||
|     - name: Setup dependencies |     - name: Setup dependencies | ||||||
| @ -44,9 +39,10 @@ runs: | |||||||
|       run: | |       run: | | ||||||
|         export PSQL_TAG=${{ inputs.postgresql_version }} |         export PSQL_TAG=${{ inputs.postgresql_version }} | ||||||
|         docker compose -f .github/actions/setup/docker-compose.yml up -d |         docker compose -f .github/actions/setup/docker-compose.yml up -d | ||||||
|  |         poetry sync | ||||||
|         cd web && npm ci |         cd web && npm ci | ||||||
|     - name: Generate config |     - name: Generate config | ||||||
|       shell: uv run python {0} |       shell: poetry run python {0} | ||||||
|       run: | |       run: | | ||||||
|         from authentik.lib.generators import generate_id |         from authentik.lib.generators import generate_id | ||||||
|         from yaml import safe_dump |         from yaml import safe_dump | ||||||
|  | |||||||
							
								
								
									
										35
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										35
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -23,13 +23,7 @@ updates: | |||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directories: |     directories: | ||||||
|       - "/web" |       - "/web" | ||||||
|       - "/web/packages/sfe" |       - "/web/sfe" | ||||||
|       - "/web/packages/core" |  | ||||||
|       - "/web/packages/esbuild-plugin-live-reload" |  | ||||||
|       - "/packages/prettier-config" |  | ||||||
|       - "/packages/tsconfig" |  | ||||||
|       - "/packages/docusaurus-config" |  | ||||||
|       - "/packages/eslint-config" |  | ||||||
|     schedule: |     schedule: | ||||||
|       interval: daily |       interval: daily | ||||||
|       time: "04:00" |       time: "04:00" | ||||||
| @ -74,9 +68,6 @@ updates: | |||||||
|       wdio: |       wdio: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@wdio/*" |           - "@wdio/*" | ||||||
|       goauthentik: |  | ||||||
|         patterns: |  | ||||||
|           - "@goauthentik/*" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/website" |     directory: "/website" | ||||||
|     schedule: |     schedule: | ||||||
| @ -97,16 +88,6 @@ updates: | |||||||
|           - "swc-*" |           - "swc-*" | ||||||
|           - "lightningcss*" |           - "lightningcss*" | ||||||
|           - "@rspack/binding*" |           - "@rspack/binding*" | ||||||
|       goauthentik: |  | ||||||
|         patterns: |  | ||||||
|           - "@goauthentik/*" |  | ||||||
|       eslint: |  | ||||||
|         patterns: |  | ||||||
|           - "@eslint/*" |  | ||||||
|           - "@typescript-eslint/*" |  | ||||||
|           - "eslint-*" |  | ||||||
|           - "eslint" |  | ||||||
|           - "typescript-eslint" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/lifecycle/aws" |     directory: "/lifecycle/aws" | ||||||
|     schedule: |     schedule: | ||||||
| @ -117,7 +98,7 @@ updates: | |||||||
|       prefix: "lifecycle/aws:" |       prefix: "lifecycle/aws:" | ||||||
|     labels: |     labels: | ||||||
|       - dependencies |       - dependencies | ||||||
|   - package-ecosystem: uv |   - package-ecosystem: pip | ||||||
|     directory: "/" |     directory: "/" | ||||||
|     schedule: |     schedule: | ||||||
|       interval: daily |       interval: daily | ||||||
| @ -137,15 +118,3 @@ updates: | |||||||
|       prefix: "core:" |       prefix: "core:" | ||||||
|     labels: |     labels: | ||||||
|       - dependencies |       - dependencies | ||||||
|   - package-ecosystem: docker-compose |  | ||||||
|     directories: |  | ||||||
|       # - /scripts # Maybe |  | ||||||
|       - /tests/e2e |  | ||||||
|     schedule: |  | ||||||
|       interval: daily |  | ||||||
|       time: "04:00" |  | ||||||
|     open-pull-requests-limit: 10 |  | ||||||
|     commit-message: |  | ||||||
|       prefix: "core:" |  | ||||||
|     labels: |  | ||||||
|       - dependencies |  | ||||||
|  | |||||||
| @ -38,8 +38,6 @@ jobs: | |||||||
|       # Needed for attestation |       # Needed for attestation | ||||||
|       id-token: write |       id-token: write | ||||||
|       attestations: write |       attestations: write | ||||||
|       # Needed for checkout |  | ||||||
|       contents: read |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - uses: docker/setup-qemu-action@v3.6.0 |       - uses: docker/setup-qemu-action@v3.6.0 | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -30,6 +30,7 @@ jobs: | |||||||
|         uses: actions/setup-python@v5 |         uses: actions/setup-python@v5 | ||||||
|         with: |         with: | ||||||
|           python-version-file: "pyproject.toml" |           python-version-file: "pyproject.toml" | ||||||
|  |           cache: "poetry" | ||||||
|       - name: Generate API Client |       - name: Generate API Client | ||||||
|         run: make gen-client-py |         run: make gen-client-py | ||||||
|       - name: Publish package |       - name: Publish package | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | |||||||
|           signoff: true |           signoff: true | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |           # ID from https://api.github.com/users/authentik-automation[bot] | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							| @ -33,7 +33,7 @@ jobs: | |||||||
|           npm ci |           npm ci | ||||||
|       - name: Check changes have been applied |       - name: Check changes have been applied | ||||||
|         run: | |         run: | | ||||||
|           uv run make aws-cfn |           poetry run make aws-cfn | ||||||
|           git diff --exit-code |           git diff --exit-code | ||||||
|   ci-aws-cfn-mark: |   ci-aws-cfn-mark: | ||||||
|     if: always() |     if: always() | ||||||
|  | |||||||
							
								
								
									
										3
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							| @ -9,15 +9,14 @@ on: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   test-container: |   test-container: | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         version: |         version: | ||||||
|           - docs |           - docs | ||||||
|           - version-2025-4 |  | ||||||
|           - version-2025-2 |           - version-2025-2 | ||||||
|  |           - version-2024-12 | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - run: | |       - run: | | ||||||
|  | |||||||
							
								
								
									
										33
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										33
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -34,7 +34,7 @@ jobs: | |||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: run job |       - name: run job | ||||||
|         run: uv run make ci-${{ matrix.job }} |         run: poetry run make ci-${{ matrix.job }} | ||||||
|   test-migrations: |   test-migrations: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
| @ -42,7 +42,7 @@ jobs: | |||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: run migrations |       - name: run migrations | ||||||
|         run: uv run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|   test-make-seed: |   test-make-seed: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
| @ -62,7 +62,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -70,6 +69,8 @@ jobs: | |||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|       - name: checkout stable |       - name: checkout stable | ||||||
|         run: | |         run: | | ||||||
|  |           # Delete all poetry envs | ||||||
|  |           rm -rf /home/runner/.cache/pypoetry | ||||||
|           # Copy current, latest config to local |           # Copy current, latest config to local | ||||||
|           cp authentik/lib/default.yml local.env.yml |           cp authentik/lib/default.yml local.env.yml | ||||||
|           cp -R .github .. |           cp -R .github .. | ||||||
| @ -82,7 +83,7 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           postgresql_version: ${{ matrix.psql }} |           postgresql_version: ${{ matrix.psql }} | ||||||
|       - name: run migrations to stable |       - name: run migrations to stable | ||||||
|         run: uv run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|       - name: checkout current code |       - name: checkout current code | ||||||
|         run: | |         run: | | ||||||
|           set -x |           set -x | ||||||
| @ -90,13 +91,15 @@ jobs: | |||||||
|           git reset --hard HEAD |           git reset --hard HEAD | ||||||
|           git clean -d -fx . |           git clean -d -fx . | ||||||
|           git checkout $GITHUB_SHA |           git checkout $GITHUB_SHA | ||||||
|  |           # Delete previous poetry env | ||||||
|  |           rm -rf /home/runner/.cache/pypoetry/virtualenvs/* | ||||||
|       - name: Setup authentik env (ensure latest deps are installed) |       - name: Setup authentik env (ensure latest deps are installed) | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|         with: |         with: | ||||||
|           postgresql_version: ${{ matrix.psql }} |           postgresql_version: ${{ matrix.psql }} | ||||||
|       - name: migrate to latest |       - name: migrate to latest | ||||||
|         run: | |         run: | | ||||||
|           uv run python -m lifecycle.migrate |           poetry run python -m lifecycle.migrate | ||||||
|       - name: run tests |       - name: run tests | ||||||
|         env: |         env: | ||||||
|           # Test in the main database that we just migrated from the previous stable version |           # Test in the main database that we just migrated from the previous stable version | ||||||
| @ -105,7 +108,7 @@ jobs: | |||||||
|           CI_RUN_ID: ${{ matrix.run_id }} |           CI_RUN_ID: ${{ matrix.run_id }} | ||||||
|           CI_TOTAL_RUNS: "5" |           CI_TOTAL_RUNS: "5" | ||||||
|         run: | |         run: | | ||||||
|           uv run make ci-test |           poetry run make ci-test | ||||||
|   test-unittest: |   test-unittest: | ||||||
|     name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 |     name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
| @ -117,7 +120,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -131,7 +133,7 @@ jobs: | |||||||
|           CI_RUN_ID: ${{ matrix.run_id }} |           CI_RUN_ID: ${{ matrix.run_id }} | ||||||
|           CI_TOTAL_RUNS: "5" |           CI_TOTAL_RUNS: "5" | ||||||
|         run: | |         run: | | ||||||
|           uv run make ci-test |           poetry run make ci-test | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v5 | ||||||
|         with: |         with: | ||||||
| @ -154,8 +156,8 @@ jobs: | |||||||
|         uses: helm/kind-action@v1.12.0 |         uses: helm/kind-action@v1.12.0 | ||||||
|       - name: run integration |       - name: run integration | ||||||
|         run: | |         run: | | ||||||
|           uv run coverage run manage.py test tests/integration |           poetry run coverage run manage.py test tests/integration | ||||||
|           uv run coverage xml |           poetry run coverage xml | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v5 | ||||||
|         with: |         with: | ||||||
| @ -202,7 +204,7 @@ jobs: | |||||||
|         uses: actions/cache@v4 |         uses: actions/cache@v4 | ||||||
|         with: |         with: | ||||||
|           path: web/dist |           path: web/dist | ||||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b |           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }} | ||||||
|       - name: prepare web ui |       - name: prepare web ui | ||||||
|         if: steps.cache-web.outputs.cache-hit != 'true' |         if: steps.cache-web.outputs.cache-hit != 'true' | ||||||
|         working-directory: web |         working-directory: web | ||||||
| @ -210,11 +212,10 @@ jobs: | |||||||
|           npm ci |           npm ci | ||||||
|           make -C .. gen-client-ts |           make -C .. gen-client-ts | ||||||
|           npm run build |           npm run build | ||||||
|           npm run build:sfe |  | ||||||
|       - name: run e2e |       - name: run e2e | ||||||
|         run: | |         run: | | ||||||
|           uv run coverage run manage.py test ${{ matrix.job.glob }} |           poetry run coverage run manage.py test ${{ matrix.job.glob }} | ||||||
|           uv run coverage xml |           poetry run coverage xml | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v5 | ||||||
|         with: |         with: | ||||||
| @ -247,13 +248,11 @@ jobs: | |||||||
|       # Needed for attestation |       # Needed for attestation | ||||||
|       id-token: write |       id-token: write | ||||||
|       attestations: write |       attestations: write | ||||||
|       # Needed for checkout |  | ||||||
|       contents: read |  | ||||||
|     needs: ci-core-mark |     needs: ci-core-mark | ||||||
|     uses: ./.github/workflows/_reusable-docker-build.yaml |     uses: ./.github/workflows/_reusable-docker-build.yaml | ||||||
|     secrets: inherit |     secrets: inherit | ||||||
|     with: |     with: | ||||||
|       image_name: ${{ github.repository == 'goauthentik/authentik-internal' && 'ghcr.io/goauthentik/internal-server' || 'ghcr.io/goauthentik/dev-server' }} |       image_name: ghcr.io/goauthentik/dev-server | ||||||
|       release: false |       release: false | ||||||
|   pr-comment: |   pr-comment: | ||||||
|     needs: |     needs: | ||||||
|  | |||||||
							
								
								
									
										3
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -29,7 +29,7 @@ jobs: | |||||||
|       - name: Generate API |       - name: Generate API | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: golangci-lint |       - name: golangci-lint | ||||||
|         uses: golangci/golangci-lint-action@v8 |         uses: golangci/golangci-lint-action@v6 | ||||||
|         with: |         with: | ||||||
|           version: latest |           version: latest | ||||||
|           args: --timeout 5000s --verbose |           args: --timeout 5000s --verbose | ||||||
| @ -59,7 +59,6 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           jobs: ${{ toJSON(needs) }} |           jobs: ${{ toJSON(needs) }} | ||||||
|   build-container: |   build-container: | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     timeout-minutes: 120 |     timeout-minutes: 120 | ||||||
|     needs: |     needs: | ||||||
|       - ci-outpost-mark |       - ci-outpost-mark | ||||||
|  | |||||||
							
								
								
									
										52
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										52
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							| @ -49,7 +49,6 @@ jobs: | |||||||
|       matrix: |       matrix: | ||||||
|         job: |         job: | ||||||
|           - build |           - build | ||||||
|           - build:integrations |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
| @ -62,65 +61,14 @@ jobs: | |||||||
|       - name: build |       - name: build | ||||||
|         working-directory: website/ |         working-directory: website/ | ||||||
|         run: npm run ${{ matrix.job }} |         run: npm run ${{ matrix.job }} | ||||||
|   build-container: |  | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     permissions: |  | ||||||
|       # Needed to upload container images to ghcr.io |  | ||||||
|       packages: write |  | ||||||
|       # Needed for attestation |  | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|         with: |  | ||||||
|           ref: ${{ github.event.pull_request.head.sha }} |  | ||||||
|       - name: Set up QEMU |  | ||||||
|         uses: docker/setup-qemu-action@v3.6.0 |  | ||||||
|       - name: Set up Docker Buildx |  | ||||||
|         uses: docker/setup-buildx-action@v3 |  | ||||||
|       - name: prepare variables |  | ||||||
|         uses: ./.github/actions/docker-push-variables |  | ||||||
|         id: ev |  | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |  | ||||||
|           image-name: ghcr.io/goauthentik/dev-docs |  | ||||||
|       - name: Login to Container Registry |  | ||||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|         uses: docker/login-action@v3 |  | ||||||
|         with: |  | ||||||
|           registry: ghcr.io |  | ||||||
|           username: ${{ github.repository_owner }} |  | ||||||
|           password: ${{ secrets.GITHUB_TOKEN }} |  | ||||||
|       - name: Build Docker Image |  | ||||||
|         id: push |  | ||||||
|         uses: docker/build-push-action@v6 |  | ||||||
|         with: |  | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |  | ||||||
|           file: website/Dockerfile |  | ||||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|           platforms: linux/amd64,linux/arm64 |  | ||||||
|           context: . |  | ||||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache |  | ||||||
|           cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }} |  | ||||||
|       - uses: actions/attest-build-provenance@v2 |  | ||||||
|         id: attest |  | ||||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   ci-website-mark: |   ci-website-mark: | ||||||
|     if: always() |     if: always() | ||||||
|     needs: |     needs: | ||||||
|       - lint |       - lint | ||||||
|       - test |       - test | ||||||
|       - build |       - build | ||||||
|       - build-container |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: re-actors/alls-green@release/v1 |       - uses: re-actors/alls-green@release/v1 | ||||||
|         with: |         with: | ||||||
|           jobs: ${{ toJSON(needs) }} |           jobs: ${{ toJSON(needs) }} | ||||||
|           allowed-skips: ${{ github.repository == 'goauthentik/authentik-internal' && 'build-container' || '[]' }} |  | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @ -2,7 +2,7 @@ name: "CodeQL" | |||||||
|  |  | ||||||
| on: | on: | ||||||
|   push: |   push: | ||||||
|     branches: [main, next, version*] |     branches: [main, "*", next, version*] | ||||||
|   pull_request: |   pull_request: | ||||||
|     branches: [main] |     branches: [main] | ||||||
|   schedule: |   schedule: | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ name: authentik-gen-update-webauthn-mds | |||||||
| on: | on: | ||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
|   schedule: |   schedule: | ||||||
|     - cron: "30 1 1,15 * *" |     - cron: '30 1 1,15 * *' | ||||||
|  |  | ||||||
| env: | env: | ||||||
|   POSTGRES_DB: authentik |   POSTGRES_DB: authentik | ||||||
| @ -24,7 +24,7 @@ jobs: | |||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - run: uv run ak update_webauthn_mds |       - run: poetry run ak update_webauthn_mds | ||||||
|       - uses: peter-evans/create-pull-request@v7 |       - uses: peter-evans/create-pull-request@v7 | ||||||
|         id: cpr |         id: cpr | ||||||
|         with: |         with: | ||||||
| @ -37,7 +37,6 @@ jobs: | |||||||
|           signoff: true |           signoff: true | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |           # ID from https://api.github.com/users/authentik-automation[bot] | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | |||||||
|           body: ${{ steps.compress.outputs.markdown }} |           body: ${{ steps.compress.outputs.markdown }} | ||||||
|           delete-branch: true |           delete-branch: true | ||||||
|           signoff: true |           signoff: true | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" |         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										47
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										47
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,47 +0,0 @@ | |||||||
| name: authentik-packages-npm-publish |  | ||||||
| on: |  | ||||||
|   push: |  | ||||||
|     branches: [main] |  | ||||||
|     paths: |  | ||||||
|       - packages/docusaurus-config/** |  | ||||||
|       - packages/eslint-config/** |  | ||||||
|       - packages/prettier-config/** |  | ||||||
|       - packages/tsconfig/** |  | ||||||
|       - web/packages/esbuild-plugin-live-reload/** |  | ||||||
|   workflow_dispatch: |  | ||||||
| jobs: |  | ||||||
|   publish: |  | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|       matrix: |  | ||||||
|         package: |  | ||||||
|           - packages/docusaurus-config |  | ||||||
|           - packages/eslint-config |  | ||||||
|           - packages/prettier-config |  | ||||||
|           - packages/tsconfig |  | ||||||
|           - web/packages/esbuild-plugin-live-reload |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|         with: |  | ||||||
|           fetch-depth: 2 |  | ||||||
|       - uses: actions/setup-node@v4 |  | ||||||
|         with: |  | ||||||
|           node-version-file: ${{ matrix.package }}/package.json |  | ||||||
|           registry-url: "https://registry.npmjs.org" |  | ||||||
|       - name: Get changed files |  | ||||||
|         id: changed-files |  | ||||||
|         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c |  | ||||||
|         with: |  | ||||||
|           files: | |  | ||||||
|             ${{ matrix.package }}/package.json |  | ||||||
|       - name: Publish package |  | ||||||
|         if: steps.changed-files.outputs.any_changed == 'true' |  | ||||||
|         working-directory: ${{ matrix.package }} |  | ||||||
|         run: | |  | ||||||
|           npm ci |  | ||||||
|           npm run build |  | ||||||
|           npm publish |  | ||||||
|         env: |  | ||||||
|           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} |  | ||||||
							
								
								
									
										4
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							| @ -21,8 +21,8 @@ jobs: | |||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: generate docs |       - name: generate docs | ||||||
|         run: | |         run: | | ||||||
|           uv run make migrate |           poetry run make migrate | ||||||
|           uv run ak build_source_docs |           poetry run ak build_source_docs | ||||||
|       - name: Publish |       - name: Publish | ||||||
|         uses: netlify/actions/cli@master |         uses: netlify/actions/cli@master | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -20,49 +20,6 @@ jobs: | |||||||
|       release: true |       release: true | ||||||
|       registry_dockerhub: true |       registry_dockerhub: true | ||||||
|       registry_ghcr: true |       registry_ghcr: true | ||||||
|   build-docs: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     permissions: |  | ||||||
|       # Needed to upload container images to ghcr.io |  | ||||||
|       packages: write |  | ||||||
|       # Needed for attestation |  | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - name: Set up QEMU |  | ||||||
|         uses: docker/setup-qemu-action@v3.6.0 |  | ||||||
|       - name: Set up Docker Buildx |  | ||||||
|         uses: docker/setup-buildx-action@v3 |  | ||||||
|       - name: prepare variables |  | ||||||
|         uses: ./.github/actions/docker-push-variables |  | ||||||
|         id: ev |  | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |  | ||||||
|           image-name: ghcr.io/goauthentik/docs |  | ||||||
|       - name: Login to GitHub Container Registry |  | ||||||
|         uses: docker/login-action@v3 |  | ||||||
|         with: |  | ||||||
|           registry: ghcr.io |  | ||||||
|           username: ${{ github.repository_owner }} |  | ||||||
|           password: ${{ secrets.GITHUB_TOKEN }} |  | ||||||
|       - name: Build Docker Image |  | ||||||
|         id: push |  | ||||||
|         uses: docker/build-push-action@v6 |  | ||||||
|         with: |  | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |  | ||||||
|           file: website/Dockerfile |  | ||||||
|           push: true |  | ||||||
|           platforms: linux/amd64,linux/arm64 |  | ||||||
|           context: . |  | ||||||
|       - uses: actions/attest-build-provenance@v2 |  | ||||||
|         id: attest |  | ||||||
|         if: true |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   build-outpost: |   build-outpost: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     permissions: |     permissions: | ||||||
| @ -236,6 +193,6 @@ jobs: | |||||||
|           SENTRY_ORG: authentik-security-inc |           SENTRY_ORG: authentik-security-inc | ||||||
|           SENTRY_PROJECT: authentik |           SENTRY_PROJECT: authentik | ||||||
|         with: |         with: | ||||||
|           release: authentik@${{ steps.ev.outputs.version }} |           version: authentik@${{ steps.ev.outputs.version }} | ||||||
|           sourcemaps: "./web/dist" |           sourcemaps: "./web/dist" | ||||||
|           url_prefix: "~/static/dist" |           url_prefix: "~/static/dist" | ||||||
|  | |||||||
							
								
								
									
										21
									
								
								.github/workflows/repo-mirror-cleanup.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										21
									
								
								.github/workflows/repo-mirror-cleanup.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,21 +0,0 @@ | |||||||
| name: "authentik-repo-mirror-cleanup" |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   workflow_dispatch: |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   to_internal: |  | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|         with: |  | ||||||
|           fetch-depth: 0 |  | ||||||
|       - if: ${{ env.MIRROR_KEY != '' }} |  | ||||||
|         uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb |  | ||||||
|         with: |  | ||||||
|           target_repo_url: git@github.com:goauthentik/authentik-internal.git |  | ||||||
|           ssh_private_key: ${{ secrets.GH_MIRROR_KEY }} |  | ||||||
|           args: --tags --force --prune |  | ||||||
|         env: |  | ||||||
|           MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }} |  | ||||||
							
								
								
									
										9
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,10 +11,11 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|       - if: ${{ env.MIRROR_KEY != '' }} |       - if: ${{ env.MIRROR_KEY != '' }} | ||||||
|         uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb |         uses: pixta-dev/repository-mirroring-action@v1 | ||||||
|         with: |         with: | ||||||
|           target_repo_url: git@github.com:goauthentik/authentik-internal.git |           target_repo_url: | ||||||
|           ssh_private_key: ${{ secrets.GH_MIRROR_KEY }} |             git@github.com:goauthentik/authentik-internal.git | ||||||
|           args: --tags --force |           ssh_private_key: | ||||||
|  |             ${{ secrets.GH_MIRROR_KEY }} | ||||||
|         env: |         env: | ||||||
|           MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }} |           MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }} | ||||||
|  | |||||||
							
								
								
									
										27
									
								
								.github/workflows/semgrep.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								.github/workflows/semgrep.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,27 +0,0 @@ | |||||||
| name: authentik-semgrep |  | ||||||
| on: |  | ||||||
|   workflow_dispatch: {} |  | ||||||
|   pull_request: {} |  | ||||||
|   push: |  | ||||||
|     branches: |  | ||||||
|       - main |  | ||||||
|       - master |  | ||||||
|     paths: |  | ||||||
|       - .github/workflows/semgrep.yml |  | ||||||
|   schedule: |  | ||||||
|     # random HH:MM to avoid a load spike on GitHub Actions at 00:00 |  | ||||||
|     - cron: '12 15 * * *' |  | ||||||
| jobs: |  | ||||||
|   semgrep: |  | ||||||
|     name: semgrep/ci |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     permissions: |  | ||||||
|       contents: read |  | ||||||
|     env: |  | ||||||
|       SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} |  | ||||||
|     container: |  | ||||||
|       image: semgrep/semgrep |  | ||||||
|     if: (github.actor != 'dependabot[bot]') |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - run: semgrep ci |  | ||||||
| @ -16,7 +16,6 @@ env: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   compile: |   compile: | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - id: generate_token |       - id: generate_token | ||||||
| @ -37,10 +36,10 @@ jobs: | |||||||
|         run: make gen-client-ts |         run: make gen-client-ts | ||||||
|       - name: run extract |       - name: run extract | ||||||
|         run: | |         run: | | ||||||
|           uv run make i18n-extract |           poetry run make i18n-extract | ||||||
|       - name: run compile |       - name: run compile | ||||||
|         run: | |         run: | | ||||||
|           uv run ak compilemessages |           poetry run ak compilemessages | ||||||
|           make web-check-compile |           make web-check-compile | ||||||
|       - name: Create Pull Request |       - name: Create Pull Request | ||||||
|         if: ${{ github.event_name != 'pull_request' }} |         if: ${{ github.event_name != 'pull_request' }} | ||||||
| @ -53,6 +52,3 @@ jobs: | |||||||
|           body: "core, web: update translations" |           body: "core, web: update translations" | ||||||
|           delete-branch: true |           delete-branch: true | ||||||
|           signoff: true |           signoff: true | ||||||
|           labels: dependencies |  | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |  | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |  | ||||||
|  | |||||||
							
								
								
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							| @ -15,7 +15,6 @@ jobs: | |||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} |     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - id: generate_token |       - id: generate_token | ||||||
|         uses: tibdex/github-app-token@v2 |         uses: tibdex/github-app-token@v2 | ||||||
|         with: |         with: | ||||||
| @ -26,13 +25,23 @@ jobs: | |||||||
|         env: |         env: | ||||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} |           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||||
|         run: | |         run: | | ||||||
|           title=$(gh pr view ${{ github.event.pull_request.number }} --json  "title" -q ".title") |           title=$(curl -q -L \ | ||||||
|  |             -H "Accept: application/vnd.github+json" \ | ||||||
|  |             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||||
|  |             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||||
|  |             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title) | ||||||
|           echo "title=${title}" >> "$GITHUB_OUTPUT" |           echo "title=${title}" >> "$GITHUB_OUTPUT" | ||||||
|       - name: Rename |       - name: Rename | ||||||
|         env: |         env: | ||||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} |           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||||
|         run: | |         run: | | ||||||
|           gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies |           curl -L \ | ||||||
|  |             -X PATCH \ | ||||||
|  |             -H "Accept: application/vnd.github+json" \ | ||||||
|  |             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||||
|  |             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||||
|  |             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \ | ||||||
|  |             -d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}" | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										10
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -11,10 +11,6 @@ local_settings.py | |||||||
| db.sqlite3 | db.sqlite3 | ||||||
| media | media | ||||||
|  |  | ||||||
| # Node |  | ||||||
|  |  | ||||||
| node_modules |  | ||||||
|  |  | ||||||
| # If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ | # If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ | ||||||
| # in your Git repository. Update and uncomment the following line accordingly. | # in your Git repository. Update and uncomment the following line accordingly. | ||||||
| # <django-project-name>/staticfiles/ | # <django-project-name>/staticfiles/ | ||||||
| @ -37,7 +33,6 @@ eggs/ | |||||||
| lib64/ | lib64/ | ||||||
| parts/ | parts/ | ||||||
| dist/ | dist/ | ||||||
| out/ |  | ||||||
| sdist/ | sdist/ | ||||||
| var/ | var/ | ||||||
| wheels/ | wheels/ | ||||||
| @ -100,6 +95,9 @@ ipython_config.py | |||||||
| # pyenv | # pyenv | ||||||
| .python-version | .python-version | ||||||
|  |  | ||||||
|  | # celery beat schedule file | ||||||
|  | celerybeat-schedule | ||||||
|  |  | ||||||
| # SageMath parsed files | # SageMath parsed files | ||||||
| *.sage.py | *.sage.py | ||||||
|  |  | ||||||
| @ -163,6 +161,8 @@ dmypy.json | |||||||
|  |  | ||||||
| # pyenv | # pyenv | ||||||
|  |  | ||||||
|  | # celery beat schedule file | ||||||
|  |  | ||||||
| # SageMath parsed files | # SageMath parsed files | ||||||
|  |  | ||||||
| # Environments | # Environments | ||||||
|  | |||||||
| @ -1,47 +0,0 @@ | |||||||
| # Prettier Ignorefile |  | ||||||
|  |  | ||||||
| ## Static Files |  | ||||||
| **/LICENSE |  | ||||||
|  |  | ||||||
| authentik/stages/**/* |  | ||||||
|  |  | ||||||
| ## Build asset directories |  | ||||||
| coverage |  | ||||||
| dist |  | ||||||
| out |  | ||||||
| .docusaurus |  | ||||||
| website/docs/developer-docs/api/**/* |  | ||||||
|  |  | ||||||
| ## Environment |  | ||||||
| *.env |  | ||||||
|  |  | ||||||
| ## Secrets |  | ||||||
| *.secrets |  | ||||||
|  |  | ||||||
| ## Yarn |  | ||||||
| .yarn/**/* |  | ||||||
|  |  | ||||||
| ## Node |  | ||||||
| node_modules |  | ||||||
| coverage |  | ||||||
|  |  | ||||||
| ## Configs |  | ||||||
| *.log |  | ||||||
| *.yaml |  | ||||||
| *.yml |  | ||||||
|  |  | ||||||
| # Templates |  | ||||||
| # TODO: Rename affected files to *.template.* or similar. |  | ||||||
| *.html |  | ||||||
| *.mdx |  | ||||||
| *.md |  | ||||||
|  |  | ||||||
| ## Import order matters |  | ||||||
| poly.ts |  | ||||||
| src/locale-codes.ts |  | ||||||
| src/locales/ |  | ||||||
|  |  | ||||||
| # Storybook |  | ||||||
| storybook-static/ |  | ||||||
| .storybook/css-import-maps* |  | ||||||
|  |  | ||||||
							
								
								
									
										10
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -6,19 +6,17 @@ | |||||||
|         "!Context scalar", |         "!Context scalar", | ||||||
|         "!Enumerate sequence", |         "!Enumerate sequence", | ||||||
|         "!Env scalar", |         "!Env scalar", | ||||||
|         "!Env sequence", |  | ||||||
|         "!Find sequence", |         "!Find sequence", | ||||||
|         "!Format sequence", |         "!Format sequence", | ||||||
|         "!If sequence", |         "!If sequence", | ||||||
|         "!Index scalar", |         "!Index scalar", | ||||||
|         "!KeyOf scalar", |         "!KeyOf scalar", | ||||||
|         "!Value scalar", |         "!Value scalar", | ||||||
|         "!AtIndex scalar", |         "!AtIndex scalar" | ||||||
|         "!ParseJSON scalar" |  | ||||||
|     ], |     ], | ||||||
|     "typescript.preferences.importModuleSpecifier": "non-relative", |     "typescript.preferences.importModuleSpecifier": "non-relative", | ||||||
|     "typescript.preferences.importModuleSpecifierEnding": "index", |     "typescript.preferences.importModuleSpecifierEnding": "index", | ||||||
|     "typescript.tsdk": "./node_modules/typescript/lib", |     "typescript.tsdk": "./web/node_modules/typescript/lib", | ||||||
|     "typescript.enablePromptUseWorkspaceTsdk": true, |     "typescript.enablePromptUseWorkspaceTsdk": true, | ||||||
|     "yaml.schemas": { |     "yaml.schemas": { | ||||||
|         "./blueprints/schema.json": "blueprints/**/*.yaml" |         "./blueprints/schema.json": "blueprints/**/*.yaml" | ||||||
| @ -32,5 +30,7 @@ | |||||||
|         } |         } | ||||||
|     ], |     ], | ||||||
|     "go.testFlags": ["-count=1"], |     "go.testFlags": ["-count=1"], | ||||||
|     "github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"] |     "github-actions.workflows.pinned.workflows": [ | ||||||
|  |         ".github/workflows/ci-main.yml" | ||||||
|  |     ] | ||||||
| } | } | ||||||
|  | |||||||
							
								
								
									
										46
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										46
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @ -3,13 +3,8 @@ | |||||||
|     "tasks": [ |     "tasks": [ | ||||||
|         { |         { | ||||||
|             "label": "authentik/core: make", |             "label": "authentik/core: make", | ||||||
|             "command": "uv", |             "command": "poetry", | ||||||
|             "args": [ |             "args": ["run", "make", "lint-fix", "lint"], | ||||||
|                 "run", |  | ||||||
|                 "make", |  | ||||||
|                 "lint-fix", |  | ||||||
|                 "lint" |  | ||||||
|             ], |  | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "new" |                 "panel": "new" | ||||||
|             }, |             }, | ||||||
| @ -17,12 +12,8 @@ | |||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/core: run", |             "label": "authentik/core: run", | ||||||
|             "command": "uv", |             "command": "poetry", | ||||||
|             "args": [ |             "args": ["run", "ak", "server"], | ||||||
|                 "run", |  | ||||||
|                 "ak", |  | ||||||
|                 "server" |  | ||||||
|             ], |  | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
| @ -32,17 +23,13 @@ | |||||||
|         { |         { | ||||||
|             "label": "authentik/web: make", |             "label": "authentik/web: make", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["web"], | ||||||
|                 "web" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/web: watch", |             "label": "authentik/web: watch", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["web-watch"], | ||||||
|                 "web-watch" |  | ||||||
|             ], |  | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
| @ -52,26 +39,19 @@ | |||||||
|         { |         { | ||||||
|             "label": "authentik: install", |             "label": "authentik: install", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["install", "-j4"], | ||||||
|                 "install", |  | ||||||
|                 "-j4" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/website: make", |             "label": "authentik/website: make", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["website"], | ||||||
|                 "website" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/website: watch", |             "label": "authentik/website: watch", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["website-watch"], | ||||||
|                 "website-watch" |  | ||||||
|             ], |  | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
| @ -80,12 +60,8 @@ | |||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/api: generate", |             "label": "authentik/api: generate", | ||||||
|             "command": "uv", |             "command": "poetry", | ||||||
|             "args": [ |             "args": ["run", "make", "gen"], | ||||||
|                 "run", |  | ||||||
|                 "make", |  | ||||||
|                 "gen" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         } |         } | ||||||
|     ] |     ] | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ schemas/                        @goauthentik/backend | |||||||
| scripts/                        @goauthentik/backend | scripts/                        @goauthentik/backend | ||||||
| tests/                          @goauthentik/backend | tests/                          @goauthentik/backend | ||||||
| pyproject.toml                  @goauthentik/backend | pyproject.toml                  @goauthentik/backend | ||||||
| uv.lock                         @goauthentik/backend | poetry.lock                     @goauthentik/backend | ||||||
| go.mod                          @goauthentik/backend | go.mod                          @goauthentik/backend | ||||||
| go.sum                          @goauthentik/backend | go.sum                          @goauthentik/backend | ||||||
| # Infrastructure | # Infrastructure | ||||||
| @ -23,8 +23,6 @@ docker-compose.yml              @goauthentik/infrastructure | |||||||
| Makefile                        @goauthentik/infrastructure | Makefile                        @goauthentik/infrastructure | ||||||
| .editorconfig                   @goauthentik/infrastructure | .editorconfig                   @goauthentik/infrastructure | ||||||
| CODEOWNERS                      @goauthentik/infrastructure | CODEOWNERS                      @goauthentik/infrastructure | ||||||
| # Web packages |  | ||||||
| packages/                       @goauthentik/frontend |  | ||||||
| # Web | # Web | ||||||
| web/                            @goauthentik/frontend | web/                            @goauthentik/frontend | ||||||
| tests/wdio/                     @goauthentik/frontend | tests/wdio/                     @goauthentik/frontend | ||||||
|  | |||||||
							
								
								
									
										136
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										136
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,27 @@ | |||||||
| # syntax=docker/dockerfile:1 | # syntax=docker/dockerfile:1 | ||||||
|  |  | ||||||
| # Stage 1: Build webui | # Stage 1: Build website | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24-slim AS node-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | ||||||
|  |  | ||||||
|  | ENV NODE_ENV=production \ | ||||||
|  |     GIT_UNAVAILABLE=true | ||||||
|  |  | ||||||
|  | WORKDIR /work/website | ||||||
|  |  | ||||||
|  | RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \ | ||||||
|  |     --mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \ | ||||||
|  |     --mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \ | ||||||
|  |     npm ci --include=dev | ||||||
|  |  | ||||||
|  | COPY ./website /work/website/ | ||||||
|  | COPY ./blueprints /work/blueprints/ | ||||||
|  | COPY ./schema.yml /work/ | ||||||
|  | COPY ./SECURITY.md /work/ | ||||||
|  |  | ||||||
|  | RUN npm run build-bundled | ||||||
|  |  | ||||||
|  | # Stage 2: Build webui | ||||||
|  | FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | ||||||
|  |  | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||||
| @ -13,7 +33,7 @@ RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | |||||||
|     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ |     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ | ||||||
|     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ |     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ | ||||||
|     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ |     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ | ||||||
|     --mount=type=cache,id=npm-ak,sharing=shared,target=/root/.npm \ |     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ | ||||||
|     npm ci --include=dev |     npm ci --include=dev | ||||||
|  |  | ||||||
| COPY ./package.json /work | COPY ./package.json /work | ||||||
| @ -21,11 +41,10 @@ COPY ./web /work/web/ | |||||||
| COPY ./website /work/website/ | COPY ./website /work/website/ | ||||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||||
|  |  | ||||||
| RUN npm run build && \ | RUN npm run build | ||||||
|     npm run build:sfe |  | ||||||
|  |  | ||||||
| # Stage 2: Build go proxy | # Stage 3: Build go proxy | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder | ||||||
|  |  | ||||||
| ARG TARGETOS | ARG TARGETOS | ||||||
| ARG TARGETARCH | ARG TARGETARCH | ||||||
| @ -49,8 +68,8 @@ RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | |||||||
| COPY ./cmd /go/src/goauthentik.io/cmd | COPY ./cmd /go/src/goauthentik.io/cmd | ||||||
| COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib | COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib | ||||||
| COPY ./web/static.go /go/src/goauthentik.io/web/static.go | COPY ./web/static.go /go/src/goauthentik.io/web/static.go | ||||||
| COPY --from=node-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt | ||||||
| COPY --from=node-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt | COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt | ||||||
| COPY ./internal /go/src/goauthentik.io/internal | COPY ./internal /go/src/goauthentik.io/internal | ||||||
| COPY ./go.mod /go/src/goauthentik.io/go.mod | COPY ./go.mod /go/src/goauthentik.io/go.mod | ||||||
| COPY ./go.sum /go/src/goauthentik.io/go.sum | COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||||
| @ -58,76 +77,70 @@ COPY ./go.sum /go/src/goauthentik.io/go.sum | |||||||
| RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | ||||||
|     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ |     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ | ||||||
|     if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ |     if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ | ||||||
|     CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \ |     CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \ | ||||||
|     go build -o /go/authentik ./cmd/server |     go build -o /go/authentik ./cmd/server | ||||||
|  |  | ||||||
| # Stage 3: MaxMind GeoIP | # Stage 4: MaxMind GeoIP | ||||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip | FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip | ||||||
|  |  | ||||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||||
| ENV GEOIPUPDATE_VERBOSE="1" | ENV GEOIPUPDATE_VERBOSE="1" | ||||||
| ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ||||||
|  | ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY" | ||||||
|  |  | ||||||
| USER root | USER root | ||||||
| RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||||
|     --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ |     --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ | ||||||
|     mkdir -p /usr/share/GeoIP && \ |     mkdir -p /usr/share/GeoIP && \ | ||||||
|     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" |     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||||
|  |  | ||||||
| # Stage 4: Download uv | # Stage 5: Python dependencies | ||||||
| FROM ghcr.io/astral-sh/uv:0.7.17 AS uv | FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps | ||||||
| # Stage 5: Base python image |  | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base |  | ||||||
|  |  | ||||||
| ENV VENV_PATH="/ak-root/.venv" \ |  | ||||||
|     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ |  | ||||||
|     UV_COMPILE_BYTECODE=1 \ |  | ||||||
|     UV_LINK_MODE=copy \ |  | ||||||
|     UV_NATIVE_TLS=1 \ |  | ||||||
|     UV_PYTHON_DOWNLOADS=0 |  | ||||||
|  |  | ||||||
| WORKDIR /ak-root/ |  | ||||||
|  |  | ||||||
| COPY --from=uv /uv /uvx /bin/ |  | ||||||
|  |  | ||||||
| # Stage 6: Python dependencies |  | ||||||
| FROM python-base AS python-deps |  | ||||||
|  |  | ||||||
| ARG TARGETARCH | ARG TARGETARCH | ||||||
| ARG TARGETVARIANT | ARG TARGETVARIANT | ||||||
|  |  | ||||||
| RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache | WORKDIR /ak-root/poetry | ||||||
|  |  | ||||||
| ENV PATH="/root/.cargo/bin:$PATH" | ENV VENV_PATH="/ak-root/venv" \ | ||||||
|  |     POETRY_VIRTUALENVS_CREATE=false \ | ||||||
|  |     PATH="/ak-root/venv/bin:$PATH" | ||||||
|  |  | ||||||
|  | RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache | ||||||
|  |  | ||||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | ||||||
|     apt-get update && \ |     apt-get update && \ | ||||||
|     # Required for installing pip packages |     # Required for installing pip packages | ||||||
|  |     apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev | ||||||
|  |  | ||||||
|  | RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||||
|  |     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ | ||||||
|  |     --mount=type=cache,target=/root/.cache/pip \ | ||||||
|  |     --mount=type=cache,target=/root/.cache/pypoetry \ | ||||||
|  |     pip install --no-cache cffi && \ | ||||||
|  |     apt-get update && \ | ||||||
|     apt-get install -y --no-install-recommends \ |     apt-get install -y --no-install-recommends \ | ||||||
|     # Build essentials |         build-essential libffi-dev \ | ||||||
|     build-essential pkg-config libffi-dev git \ |         # Required for cryptography | ||||||
|     # cryptography |         curl pkg-config \ | ||||||
|     curl \ |         # Required for lxml | ||||||
|     # libxml |         libxslt-dev zlib1g-dev \ | ||||||
|     libxslt-dev zlib1g-dev \ |         # Required for xmlsec | ||||||
|     # postgresql |         libltdl-dev \ | ||||||
|     libpq-dev \ |         # Required for kadmin | ||||||
|     # python-kadmin-rs |         sccache clang && \ | ||||||
|     clang libkrb5-dev sccache \ |     curl https://sh.rustup.rs -sSf | sh -s -- -y && \ | ||||||
|     # xmlsec |     . "$HOME/.cargo/env" && \ | ||||||
|     libltdl-dev && \ |     python -m venv /ak-root/venv/ && \ | ||||||
|     curl https://sh.rustup.rs -sSf | sh -s -- -y |     bash -c "source ${VENV_PATH}/bin/activate && \ | ||||||
|  |     pip3 install --upgrade pip poetry && \ | ||||||
|  |     poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \ | ||||||
|  |     poetry install --only=main --no-ansi --no-interaction --no-root && \ | ||||||
|  |     pip uninstall cryptography -y && \ | ||||||
|  |     poetry install --only=main --no-ansi --no-interaction --no-root" | ||||||
|  |  | ||||||
| ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec" | # Stage 6: Run | ||||||
|  | FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image | ||||||
| RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \ |  | ||||||
|     --mount=type=bind,target=uv.lock,src=uv.lock \ |  | ||||||
|     --mount=type=bind,target=packages,src=packages \ |  | ||||||
|     --mount=type=cache,target=/root/.cache/uv \ |  | ||||||
|     uv sync --frozen --no-install-project --no-dev |  | ||||||
|  |  | ||||||
| # Stage 7: Run |  | ||||||
| FROM python-base AS final-image |  | ||||||
|  |  | ||||||
| ARG VERSION | ARG VERSION | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
| @ -159,7 +172,7 @@ RUN apt-get update && \ | |||||||
|  |  | ||||||
| COPY ./authentik/ /authentik | COPY ./authentik/ /authentik | ||||||
| COPY ./pyproject.toml / | COPY ./pyproject.toml / | ||||||
| COPY ./uv.lock / | COPY ./poetry.lock / | ||||||
| COPY ./schemas /schemas | COPY ./schemas /schemas | ||||||
| COPY ./locale /locale | COPY ./locale /locale | ||||||
| COPY ./tests /tests | COPY ./tests /tests | ||||||
| @ -168,10 +181,10 @@ COPY ./blueprints /blueprints | |||||||
| COPY ./lifecycle/ /lifecycle | COPY ./lifecycle/ /lifecycle | ||||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | ||||||
| COPY --from=go-builder /go/authentik /bin/authentik | COPY --from=go-builder /go/authentik /bin/authentik | ||||||
| COPY ./packages/ /ak-root/packages | COPY --from=python-deps /ak-root/venv /ak-root/venv | ||||||
| COPY --from=python-deps /ak-root/.venv /ak-root/.venv | COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||||
| COPY --from=node-builder /work/web/dist/ /web/dist/ | COPY --from=web-builder /work/web/authentik/ /web/authentik/ | ||||||
| COPY --from=node-builder /work/web/authentik/ /web/authentik/ | COPY --from=website-builder /work/website/build/ /website/help/ | ||||||
| COPY --from=geoip /usr/share/GeoIP /geoip | COPY --from=geoip /usr/share/GeoIP /geoip | ||||||
|  |  | ||||||
| USER 1000 | USER 1000 | ||||||
| @ -179,6 +192,9 @@ USER 1000 | |||||||
| ENV TMPDIR=/dev/shm/ \ | ENV TMPDIR=/dev/shm/ \ | ||||||
|     PYTHONDONTWRITEBYTECODE=1 \ |     PYTHONDONTWRITEBYTECODE=1 \ | ||||||
|     PYTHONUNBUFFERED=1 \ |     PYTHONUNBUFFERED=1 \ | ||||||
|  |     PATH="/ak-root/venv/bin:/lifecycle:$PATH" \ | ||||||
|  |     VENV_PATH="/ak-root/venv" \ | ||||||
|  |     POETRY_VIRTUALENVS_CREATE=false \ | ||||||
|     GOFIPS=1 |     GOFIPS=1 | ||||||
|  |  | ||||||
| HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | ||||||
|  | |||||||
							
								
								
									
										110
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										110
									
								
								Makefile
									
									
									
									
									
								
							| @ -1,21 +1,20 @@ | |||||||
| .PHONY: gen dev-reset all clean test web website | .PHONY: gen dev-reset all clean test web website | ||||||
|  |  | ||||||
| SHELL := /usr/bin/env bash | .SHELLFLAGS += ${SHELLFLAGS} -e | ||||||
| .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail |  | ||||||
| PWD = $(shell pwd) | PWD = $(shell pwd) | ||||||
| UID = $(shell id -u) | UID = $(shell id -u) | ||||||
| GID = $(shell id -g) | GID = $(shell id -g) | ||||||
| NPM_VERSION = $(shell python -m scripts.generate_semver) | NPM_VERSION = $(shell python -m scripts.generate_semver) | ||||||
| PY_SOURCES = authentik packages tests scripts lifecycle .github | PY_SOURCES = authentik tests scripts lifecycle .github | ||||||
| DOCKER_IMAGE ?= "authentik:test" | DOCKER_IMAGE ?= "authentik:test" | ||||||
|  |  | ||||||
| GEN_API_TS = gen-ts-api | GEN_API_TS = "gen-ts-api" | ||||||
| GEN_API_PY = gen-py-api | GEN_API_PY = "gen-py-api" | ||||||
| GEN_API_GO = gen-go-api | GEN_API_GO = "gen-go-api" | ||||||
|  |  | ||||||
| pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | pg_user := $(shell poetry run python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||||
| pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | pg_host := $(shell poetry run python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||||
| pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null) | pg_name := $(shell poetry run python -m authentik.lib.config postgresql.name 2>/dev/null) | ||||||
|  |  | ||||||
| all: lint-fix lint test gen web  ## Lint, build, and test everything | all: lint-fix lint test gen web  ## Lint, build, and test everything | ||||||
|  |  | ||||||
| @ -33,37 +32,34 @@ go-test: | |||||||
| 	go test -timeout 0 -v -race -cover ./... | 	go test -timeout 0 -v -race -cover ./... | ||||||
|  |  | ||||||
| test: ## Run the server tests and produce a coverage report (locally) | test: ## Run the server tests and produce a coverage report (locally) | ||||||
| 	uv run coverage run manage.py test --keepdb authentik | 	poetry run coverage run manage.py test --keepdb authentik | ||||||
| 	uv run coverage html | 	poetry run coverage html | ||||||
| 	uv run coverage report | 	poetry run coverage report | ||||||
|  |  | ||||||
| lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | ||||||
| 	uv run black $(PY_SOURCES) | 	poetry run black $(PY_SOURCES) | ||||||
| 	uv run ruff check --fix $(PY_SOURCES) | 	poetry run ruff check --fix $(PY_SOURCES) | ||||||
|  |  | ||||||
| lint-codespell:  ## Reports spelling errors. | lint-codespell:  ## Reports spelling errors. | ||||||
| 	uv run codespell -w | 	poetry run codespell -w | ||||||
|  |  | ||||||
| lint: ## Lint the python and golang sources | lint: ## Lint the python and golang sources | ||||||
| 	uv run bandit -c pyproject.toml -r $(PY_SOURCES) | 	poetry run bandit -c pyproject.toml -r $(PY_SOURCES) | ||||||
| 	golangci-lint run -v | 	golangci-lint run -v | ||||||
|  |  | ||||||
| core-install: | core-install: | ||||||
| 	uv sync --frozen | 	poetry install | ||||||
|  |  | ||||||
| migrate: ## Run the Authentik Django server's migrations | migrate: ## Run the Authentik Django server's migrations | ||||||
| 	uv run python -m lifecycle.migrate | 	poetry run python -m lifecycle.migrate | ||||||
|  |  | ||||||
| i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | ||||||
|  |  | ||||||
| aws-cfn: | aws-cfn: | ||||||
| 	cd lifecycle/aws && npm run aws-cfn | 	cd lifecycle/aws && npm run aws-cfn | ||||||
|  |  | ||||||
| run:  ## Run the main authentik server process |  | ||||||
| 	uv run ak server |  | ||||||
|  |  | ||||||
| core-i18n-extract: | core-i18n-extract: | ||||||
| 	uv run ak makemessages \ | 	poetry run ak makemessages \ | ||||||
| 		--add-location file \ | 		--add-location file \ | ||||||
| 		--no-obsolete \ | 		--no-obsolete \ | ||||||
| 		--ignore web \ | 		--ignore web \ | ||||||
| @ -86,10 +82,6 @@ dev-create-db: | |||||||
|  |  | ||||||
| dev-reset: dev-drop-db dev-create-db migrate  ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state. | dev-reset: dev-drop-db dev-create-db migrate  ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state. | ||||||
|  |  | ||||||
| update-test-mmdb:  ## Update test GeoIP and ASN Databases |  | ||||||
| 	curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb -o ${PWD}/tests/GeoLite2-ASN-Test.mmdb |  | ||||||
| 	curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb -o ${PWD}/tests/GeoLite2-City-Test.mmdb |  | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| ## API Schema | ## API Schema | ||||||
| ######################### | ######################### | ||||||
| @ -98,11 +90,11 @@ gen-build:  ## Extract the schema from the database | |||||||
| 	AUTHENTIK_DEBUG=true \ | 	AUTHENTIK_DEBUG=true \ | ||||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||||
| 		uv run ak make_blueprint_schema --file blueprints/schema.json | 		poetry run ak make_blueprint_schema > blueprints/schema.json | ||||||
| 	AUTHENTIK_DEBUG=true \ | 	AUTHENTIK_DEBUG=true \ | ||||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||||
| 		uv run ak spectacular --file schema.yml | 		poetry run ak spectacular --file schema.yml | ||||||
|  |  | ||||||
| gen-changelog:  ## (Release) generate the changelog based from the commits since the last tag | gen-changelog:  ## (Release) generate the changelog based from the commits since the last tag | ||||||
| 	git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md | 	git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md | ||||||
| @ -122,19 +114,14 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a | |||||||
| 	npx prettier --write diff.md | 	npx prettier --write diff.md | ||||||
|  |  | ||||||
| gen-clean-ts:  ## Remove generated API client for Typescript | gen-clean-ts:  ## Remove generated API client for Typescript | ||||||
| 	rm -rf ${PWD}/${GEN_API_TS}/ | 	rm -rf ./${GEN_API_TS}/ | ||||||
| 	rm -rf ${PWD}/web/node_modules/@goauthentik/api/ | 	rm -rf ./web/node_modules/@goauthentik/api/ | ||||||
|  |  | ||||||
| gen-clean-go:  ## Remove generated API client for Go | gen-clean-go:  ## Remove generated API client for Go | ||||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | 	rm -rf ./${GEN_API_GO}/ | ||||||
| ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) |  | ||||||
| 	make -C ${PWD}/${GEN_API_GO} clean |  | ||||||
| else |  | ||||||
| 	rm -rf ${PWD}/${GEN_API_GO} |  | ||||||
| endif |  | ||||||
|  |  | ||||||
| gen-clean-py:  ## Remove generated API client for Python | gen-clean-py:  ## Remove generated API client for Python | ||||||
| 	rm -rf ${PWD}/${GEN_API_PY}/ | 	rm -rf ./${GEN_API_PY}/ | ||||||
|  |  | ||||||
| gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | ||||||
|  |  | ||||||
| @ -150,9 +137,9 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | |||||||
| 		--additional-properties=npmVersion=${NPM_VERSION} \ | 		--additional-properties=npmVersion=${NPM_VERSION} \ | ||||||
| 		--git-repo-id authentik \ | 		--git-repo-id authentik \ | ||||||
| 		--git-user-id goauthentik | 		--git-user-id goauthentik | ||||||
|  | 	mkdir -p web/node_modules/@goauthentik/api | ||||||
| 	cd ${PWD}/${GEN_API_TS} && npm link | 	cd ./${GEN_API_TS} && npm i | ||||||
| 	cd ${PWD}/web && npm link @goauthentik/api | 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||||
|  |  | ||||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python | gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||||
| 	docker run \ | 	docker run \ | ||||||
| @ -166,20 +153,27 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python | |||||||
| 		--additional-properties=packageVersion=${NPM_VERSION} \ | 		--additional-properties=packageVersion=${NPM_VERSION} \ | ||||||
| 		--git-repo-id authentik \ | 		--git-repo-id authentik \ | ||||||
| 		--git-user-id goauthentik | 		--git-user-id goauthentik | ||||||
|  | 	pip install ./${GEN_API_PY} | ||||||
|  |  | ||||||
| gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | ||||||
| ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | ||||||
| 	git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO} | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache | ||||||
| else | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache | ||||||
| 	cd ${PWD}/${GEN_API_GO} && git pull | 	cp schema.yml ./${GEN_API_GO}/ | ||||||
| endif | 	docker run \ | ||||||
| 	cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO} | 		--rm -v ${PWD}/${GEN_API_GO}:/local \ | ||||||
| 	make -C ${PWD}/${GEN_API_GO} build | 		--user ${UID}:${GID} \ | ||||||
|  | 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||||
|  | 		-i /local/schema.yml \ | ||||||
|  | 		-g go \ | ||||||
|  | 		-o /local/ \ | ||||||
|  | 		-c /local/config.yaml | ||||||
| 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | ||||||
|  | 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | ||||||
|  |  | ||||||
| gen-dev-config:  ## Generate a local development config file | gen-dev-config:  ## Generate a local development config file | ||||||
| 	uv run scripts/generate_config.py | 	poetry run scripts/generate_config.py | ||||||
|  |  | ||||||
| gen: gen-build gen-client-ts | gen: gen-build gen-client-ts | ||||||
|  |  | ||||||
| @ -247,7 +241,7 @@ docker:  ## Build a docker image of the current source tree | |||||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||||
|  |  | ||||||
| test-docker: | test-docker: | ||||||
| 	BUILD=true ${PWD}/scripts/test_docker.sh | 	BUILD=true ./scripts/test_docker.sh | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| ## CI | ## CI | ||||||
| @ -260,21 +254,21 @@ ci--meta-debug: | |||||||
| 	node --version | 	node --version | ||||||
|  |  | ||||||
| ci-black: ci--meta-debug | ci-black: ci--meta-debug | ||||||
| 	uv run black --check $(PY_SOURCES) | 	poetry run black --check $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-ruff: ci--meta-debug | ci-ruff: ci--meta-debug | ||||||
| 	uv run ruff check $(PY_SOURCES) | 	poetry run ruff check $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-codespell: ci--meta-debug | ci-codespell: ci--meta-debug | ||||||
| 	uv run codespell -s | 	poetry run codespell -s | ||||||
|  |  | ||||||
| ci-bandit: ci--meta-debug | ci-bandit: ci--meta-debug | ||||||
| 	uv run bandit -r $(PY_SOURCES) | 	poetry run bandit -r $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-pending-migrations: ci--meta-debug | ci-pending-migrations: ci--meta-debug | ||||||
| 	uv run ak makemigrations --check | 	poetry run ak makemigrations --check | ||||||
|  |  | ||||||
| ci-test: ci--meta-debug | ci-test: ci--meta-debug | ||||||
| 	uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | 	poetry run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | ||||||
| 	uv run coverage report | 	poetry run coverage report | ||||||
| 	uv run coverage xml | 	poetry run coverage xml | ||||||
|  | |||||||
| @ -42,4 +42,4 @@ See [SECURITY.md](SECURITY.md) | |||||||
|  |  | ||||||
| ## Adoption and Contributions | ## Adoption and Contributions | ||||||
|  |  | ||||||
| Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github). | Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md). | ||||||
|  | |||||||
| @ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | |||||||
|  |  | ||||||
| | Version   | Supported | | | Version   | Supported | | ||||||
| | --------- | --------- | | | --------- | --------- | | ||||||
| | 2025.4.x  | ✅        | | | 2024.12.x | ✅        | | ||||||
| | 2025.6.x  | ✅        | | | 2025.2.x  | ✅        | | ||||||
|  |  | ||||||
| ## Reporting a Vulnerability | ## Reporting a Vulnerability | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
|  |  | ||||||
| from os import environ | from os import environ | ||||||
|  |  | ||||||
| __version__ = "2025.6.3" | __version__ = "2025.2.1" | ||||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,79 @@ | |||||||
|  | """authentik administration metrics""" | ||||||
|  |  | ||||||
|  | from datetime import timedelta | ||||||
|  |  | ||||||
|  | from django.db.models.functions import ExtractHour | ||||||
|  | from drf_spectacular.utils import extend_schema, extend_schema_field | ||||||
|  | from guardian.shortcuts import get_objects_for_user | ||||||
|  | from rest_framework.fields import IntegerField, SerializerMethodField | ||||||
|  | from rest_framework.permissions import IsAuthenticated | ||||||
|  | from rest_framework.request import Request | ||||||
|  | from rest_framework.response import Response | ||||||
|  | from rest_framework.views import APIView | ||||||
|  |  | ||||||
|  | from authentik.core.api.utils import PassiveSerializer | ||||||
|  | from authentik.events.models import EventAction | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class CoordinateSerializer(PassiveSerializer): | ||||||
|  |     """Coordinates for diagrams""" | ||||||
|  |  | ||||||
|  |     x_cord = IntegerField(read_only=True) | ||||||
|  |     y_cord = IntegerField(read_only=True) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class LoginMetricsSerializer(PassiveSerializer): | ||||||
|  |     """Login Metrics per 1h""" | ||||||
|  |  | ||||||
|  |     logins = SerializerMethodField() | ||||||
|  |     logins_failed = SerializerMethodField() | ||||||
|  |     authorizations = SerializerMethodField() | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins(self, _): | ||||||
|  |         """Get successful logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins_failed(self, _): | ||||||
|  |         """Get failed logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN_FAILED | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_authorizations(self, _): | ||||||
|  |         """Get successful authorizations per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.AUTHORIZE_APPLICATION | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class AdministrationMetricsViewSet(APIView): | ||||||
|  |     """Login Metrics per 1h""" | ||||||
|  |  | ||||||
|  |     permission_classes = [IsAuthenticated] | ||||||
|  |  | ||||||
|  |     @extend_schema(responses={200: LoginMetricsSerializer(many=False)}) | ||||||
|  |     def get(self, request: Request) -> Response: | ||||||
|  |         """Login Metrics per 1h""" | ||||||
|  |         serializer = LoginMetricsSerializer(True) | ||||||
|  |         serializer.context["user"] = request.user | ||||||
|  |         return Response(serializer.data) | ||||||
| @ -59,7 +59,7 @@ class SystemInfoSerializer(PassiveSerializer): | |||||||
|             if not isinstance(value, str): |             if not isinstance(value, str): | ||||||
|                 continue |                 continue | ||||||
|             actual_value = value |             actual_value = value | ||||||
|             if raw_session is not None and raw_session in actual_value: |             if raw_session in actual_value: | ||||||
|                 actual_value = actual_value.replace( |                 actual_value = actual_value.replace( | ||||||
|                     raw_session, SafeExceptionReporterFilter.cleansed_substitute |                     raw_session, SafeExceptionReporterFilter.cleansed_substitute | ||||||
|                 ) |                 ) | ||||||
|  | |||||||
| @ -1,7 +1,6 @@ | |||||||
| """authentik administration overview""" | """authentik administration overview""" | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django_tenants.utils import get_public_schema_name |  | ||||||
| from drf_spectacular.utils import extend_schema | from drf_spectacular.utils import extend_schema | ||||||
| from packaging.version import parse | from packaging.version import parse | ||||||
| from rest_framework.fields import SerializerMethodField | from rest_framework.fields import SerializerMethodField | ||||||
| @ -14,7 +13,6 @@ from authentik import __version__, get_build_hash | |||||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.outposts.models import Outpost | from authentik.outposts.models import Outpost | ||||||
| from authentik.tenants.utils import get_current_tenant |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionSerializer(PassiveSerializer): | class VersionSerializer(PassiveSerializer): | ||||||
| @ -37,11 +35,9 @@ class VersionSerializer(PassiveSerializer): | |||||||
|  |  | ||||||
|     def get_version_latest(self, _) -> str: |     def get_version_latest(self, _) -> str: | ||||||
|         """Get latest version from cache""" |         """Get latest version from cache""" | ||||||
|         if get_current_tenant().schema_name == get_public_schema_name(): |  | ||||||
|             return __version__ |  | ||||||
|         version_in_cache = cache.get(VERSION_CACHE_KEY) |         version_in_cache = cache.get(VERSION_CACHE_KEY) | ||||||
|         if not version_in_cache:  # pragma: no cover |         if not version_in_cache:  # pragma: no cover | ||||||
|             update_latest_version.send() |             update_latest_version.delay() | ||||||
|             return __version__ |             return __version__ | ||||||
|         return version_in_cache |         return version_in_cache | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										57
									
								
								authentik/admin/api/workers.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								authentik/admin/api/workers.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,57 @@ | |||||||
|  | """authentik administration overview""" | ||||||
|  |  | ||||||
|  | from socket import gethostname | ||||||
|  |  | ||||||
|  | from django.conf import settings | ||||||
|  | from drf_spectacular.utils import extend_schema, inline_serializer | ||||||
|  | from packaging.version import parse | ||||||
|  | from rest_framework.fields import BooleanField, CharField | ||||||
|  | from rest_framework.request import Request | ||||||
|  | from rest_framework.response import Response | ||||||
|  | from rest_framework.views import APIView | ||||||
|  |  | ||||||
|  | from authentik import get_full_version | ||||||
|  | from authentik.rbac.permissions import HasPermission | ||||||
|  | from authentik.root.celery import CELERY_APP | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class WorkerView(APIView): | ||||||
|  |     """Get currently connected worker count.""" | ||||||
|  |  | ||||||
|  |     permission_classes = [HasPermission("authentik_rbac.view_system_info")] | ||||||
|  |  | ||||||
|  |     @extend_schema( | ||||||
|  |         responses=inline_serializer( | ||||||
|  |             "Worker", | ||||||
|  |             fields={ | ||||||
|  |                 "worker_id": CharField(), | ||||||
|  |                 "version": CharField(), | ||||||
|  |                 "version_matching": BooleanField(), | ||||||
|  |             }, | ||||||
|  |             many=True, | ||||||
|  |         ) | ||||||
|  |     ) | ||||||
|  |     def get(self, request: Request) -> Response: | ||||||
|  |         """Get currently connected worker count.""" | ||||||
|  |         raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) | ||||||
|  |         our_version = parse(get_full_version()) | ||||||
|  |         response = [] | ||||||
|  |         for worker in raw: | ||||||
|  |             key = list(worker.keys())[0] | ||||||
|  |             version = worker[key].get("version") | ||||||
|  |             version_matching = False | ||||||
|  |             if version: | ||||||
|  |                 version_matching = parse(version) == our_version | ||||||
|  |             response.append( | ||||||
|  |                 {"worker_id": key, "version": version, "version_matching": version_matching} | ||||||
|  |             ) | ||||||
|  |         # In debug we run with `task_always_eager`, so tasks are ran on the main process | ||||||
|  |         if settings.DEBUG:  # pragma: no cover | ||||||
|  |             response.append( | ||||||
|  |                 { | ||||||
|  |                     "worker_id": f"authentik-debug@{gethostname()}", | ||||||
|  |                     "version": get_full_version(), | ||||||
|  |                     "version_matching": True, | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |         return Response(response) | ||||||
| @ -3,9 +3,6 @@ | |||||||
| from prometheus_client import Info | from prometheus_client import Info | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
| from authentik.lib.config import CONFIG |  | ||||||
| from authentik.lib.utils.time import fqdn_rand |  | ||||||
| from authentik.tasks.schedules.lib import ScheduleSpec |  | ||||||
|  |  | ||||||
| PROM_INFO = Info("authentik_version", "Currently running authentik version") | PROM_INFO = Info("authentik_version", "Currently running authentik version") | ||||||
|  |  | ||||||
| @ -17,31 +14,3 @@ class AuthentikAdminConfig(ManagedAppConfig): | |||||||
|     label = "authentik_admin" |     label = "authentik_admin" | ||||||
|     verbose_name = "authentik Admin" |     verbose_name = "authentik Admin" | ||||||
|     default = True |     default = True | ||||||
|  |  | ||||||
|     @ManagedAppConfig.reconcile_global |  | ||||||
|     def clear_update_notifications(self): |  | ||||||
|         """Clear update notifications on startup if the notification was for the version |  | ||||||
|         we're running now.""" |  | ||||||
|         from packaging.version import parse |  | ||||||
|  |  | ||||||
|         from authentik.admin.tasks import LOCAL_VERSION |  | ||||||
|         from authentik.events.models import EventAction, Notification |  | ||||||
|  |  | ||||||
|         for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): |  | ||||||
|             if "new_version" not in notification.event.context: |  | ||||||
|                 continue |  | ||||||
|             notification_version = notification.event.context["new_version"] |  | ||||||
|             if LOCAL_VERSION >= parse(notification_version): |  | ||||||
|                 notification.delete() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def global_schedule_specs(self) -> list[ScheduleSpec]: |  | ||||||
|         from authentik.admin.tasks import update_latest_version |  | ||||||
|  |  | ||||||
|         return [ |  | ||||||
|             ScheduleSpec( |  | ||||||
|                 actor=update_latest_version, |  | ||||||
|                 crontab=f"{fqdn_rand('admin_latest_version')} * * * *", |  | ||||||
|                 paused=CONFIG.get_bool("disable_update_check"), |  | ||||||
|             ), |  | ||||||
|         ] |  | ||||||
|  | |||||||
							
								
								
									
										13
									
								
								authentik/admin/settings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								authentik/admin/settings.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,13 @@ | |||||||
|  | """authentik admin settings""" | ||||||
|  |  | ||||||
|  | from celery.schedules import crontab | ||||||
|  |  | ||||||
|  | from authentik.lib.utils.time import fqdn_rand | ||||||
|  |  | ||||||
|  | CELERY_BEAT_SCHEDULE = { | ||||||
|  |     "admin_latest_version": { | ||||||
|  |         "task": "authentik.admin.tasks.update_latest_version", | ||||||
|  |         "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"), | ||||||
|  |         "options": {"queue": "authentik_scheduled"}, | ||||||
|  |     } | ||||||
|  | } | ||||||
							
								
								
									
										35
									
								
								authentik/admin/signals.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								authentik/admin/signals.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,35 @@ | |||||||
|  | """admin signals""" | ||||||
|  |  | ||||||
|  | from django.dispatch import receiver | ||||||
|  | from packaging.version import parse | ||||||
|  | from prometheus_client import Gauge | ||||||
|  |  | ||||||
|  | from authentik import get_full_version | ||||||
|  | from authentik.root.celery import CELERY_APP | ||||||
|  | from authentik.root.monitoring import monitoring_set | ||||||
|  |  | ||||||
|  | GAUGE_WORKERS = Gauge( | ||||||
|  |     "authentik_admin_workers", | ||||||
|  |     "Currently connected workers, their versions and if they are the same version as authentik", | ||||||
|  |     ["version", "version_matched"], | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | _version = parse(get_full_version()) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @receiver(monitoring_set) | ||||||
|  | def monitoring_set_workers(sender, **kwargs): | ||||||
|  |     """Set worker gauge""" | ||||||
|  |     raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) | ||||||
|  |     worker_version_count = {} | ||||||
|  |     for worker in raw: | ||||||
|  |         key = list(worker.keys())[0] | ||||||
|  |         version = worker[key].get("version") | ||||||
|  |         version_matching = False | ||||||
|  |         if version: | ||||||
|  |             version_matching = parse(version) == _version | ||||||
|  |         worker_version_count.setdefault(version, {"count": 0, "matching": version_matching}) | ||||||
|  |         worker_version_count[version]["count"] += 1 | ||||||
|  |     for version, stats in worker_version_count.items(): | ||||||
|  |         GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"]) | ||||||
| @ -1,19 +1,19 @@ | |||||||
| """authentik admin tasks""" | """authentik admin tasks""" | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
|  | from django.db import DatabaseError, InternalError, ProgrammingError | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from django_dramatiq_postgres.middleware import CurrentTask |  | ||||||
| from dramatiq import actor |  | ||||||
| from packaging.version import parse | from packaging.version import parse | ||||||
| from requests import RequestException | from requests import RequestException | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik import __version__, get_build_hash | from authentik import __version__, get_build_hash | ||||||
| from authentik.admin.apps import PROM_INFO | from authentik.admin.apps import PROM_INFO | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction, Notification | ||||||
|  | from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.utils.http import get_http_session | from authentik.lib.utils.http import get_http_session | ||||||
| from authentik.tasks.models import Task | from authentik.root.celery import CELERY_APP | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| VERSION_NULL = "0.0.0" | VERSION_NULL = "0.0.0" | ||||||
| @ -33,12 +33,27 @@ def _set_prom_info(): | |||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @actor(description=_("Update latest version info.")) | @CELERY_APP.task( | ||||||
| def update_latest_version(): |     throws=(DatabaseError, ProgrammingError, InternalError), | ||||||
|     self: Task = CurrentTask.get_task() | ) | ||||||
|  | def clear_update_notifications(): | ||||||
|  |     """Clear update notifications on startup if the notification was for the version | ||||||
|  |     we're running now.""" | ||||||
|  |     for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE): | ||||||
|  |         if "new_version" not in notification.event.context: | ||||||
|  |             continue | ||||||
|  |         notification_version = notification.event.context["new_version"] | ||||||
|  |         if LOCAL_VERSION >= parse(notification_version): | ||||||
|  |             notification.delete() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @CELERY_APP.task(bind=True, base=SystemTask) | ||||||
|  | @prefill_task | ||||||
|  | def update_latest_version(self: SystemTask): | ||||||
|  |     """Update latest version info""" | ||||||
|     if CONFIG.get_bool("disable_update_check"): |     if CONFIG.get_bool("disable_update_check"): | ||||||
|         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) |         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) | ||||||
|         self.info("Version check disabled.") |         self.set_status(TaskStatus.WARNING, "Version check disabled.") | ||||||
|         return |         return | ||||||
|     try: |     try: | ||||||
|         response = get_http_session().get( |         response = get_http_session().get( | ||||||
| @ -48,7 +63,7 @@ def update_latest_version(): | |||||||
|         data = response.json() |         data = response.json() | ||||||
|         upstream_version = data.get("stable", {}).get("version") |         upstream_version = data.get("stable", {}).get("version") | ||||||
|         cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT) |         cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT) | ||||||
|         self.info("Successfully updated latest Version") |         self.set_status(TaskStatus.SUCCESSFUL, "Successfully updated latest Version") | ||||||
|         _set_prom_info() |         _set_prom_info() | ||||||
|         # Check if upstream version is newer than what we're running, |         # Check if upstream version is newer than what we're running, | ||||||
|         # and if no event exists yet, create one. |         # and if no event exists yet, create one. | ||||||
| @ -71,7 +86,7 @@ def update_latest_version(): | |||||||
|             ).save() |             ).save() | ||||||
|     except (RequestException, IndexError) as exc: |     except (RequestException, IndexError) as exc: | ||||||
|         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) |         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) | ||||||
|         raise exc |         self.set_error(exc) | ||||||
|  |  | ||||||
|  |  | ||||||
| _set_prom_info() | _set_prom_info() | ||||||
|  | |||||||
| @ -29,6 +29,18 @@ class TestAdminAPI(TestCase): | |||||||
|         body = loads(response.content) |         body = loads(response.content) | ||||||
|         self.assertEqual(body["version_current"], __version__) |         self.assertEqual(body["version_current"], __version__) | ||||||
|  |  | ||||||
|  |     def test_workers(self): | ||||||
|  |         """Test Workers API""" | ||||||
|  |         response = self.client.get(reverse("authentik_api:admin_workers")) | ||||||
|  |         self.assertEqual(response.status_code, 200) | ||||||
|  |         body = loads(response.content) | ||||||
|  |         self.assertEqual(len(body), 0) | ||||||
|  |  | ||||||
|  |     def test_metrics(self): | ||||||
|  |         """Test metrics API""" | ||||||
|  |         response = self.client.get(reverse("authentik_api:admin_metrics")) | ||||||
|  |         self.assertEqual(response.status_code, 200) | ||||||
|  |  | ||||||
|     def test_apps(self): |     def test_apps(self): | ||||||
|         """Test apps API""" |         """Test apps API""" | ||||||
|         response = self.client.get(reverse("authentik_api:apps-list")) |         response = self.client.get(reverse("authentik_api:apps-list")) | ||||||
|  | |||||||
| @ -1,12 +1,12 @@ | |||||||
| """test admin tasks""" | """test admin tasks""" | ||||||
|  |  | ||||||
| from django.apps import apps |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
| from requests_mock import Mocker | from requests_mock import Mocker | ||||||
|  |  | ||||||
| from authentik.admin.tasks import ( | from authentik.admin.tasks import ( | ||||||
|     VERSION_CACHE_KEY, |     VERSION_CACHE_KEY, | ||||||
|  |     clear_update_notifications, | ||||||
|     update_latest_version, |     update_latest_version, | ||||||
| ) | ) | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
| @ -30,7 +30,7 @@ class TestAdminTasks(TestCase): | |||||||
|         """Test Update checker with valid response""" |         """Test Update checker with valid response""" | ||||||
|         with Mocker() as mocker, CONFIG.patch("disable_update_check", False): |         with Mocker() as mocker, CONFIG.patch("disable_update_check", False): | ||||||
|             mocker.get("https://version.goauthentik.io/version.json", json=RESPONSE_VALID) |             mocker.get("https://version.goauthentik.io/version.json", json=RESPONSE_VALID) | ||||||
|             update_latest_version.send() |             update_latest_version.delay().get() | ||||||
|             self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999") |             self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999") | ||||||
|             self.assertTrue( |             self.assertTrue( | ||||||
|                 Event.objects.filter( |                 Event.objects.filter( | ||||||
| @ -40,7 +40,7 @@ class TestAdminTasks(TestCase): | |||||||
|                 ).exists() |                 ).exists() | ||||||
|             ) |             ) | ||||||
|             # test that a consecutive check doesn't create a duplicate event |             # test that a consecutive check doesn't create a duplicate event | ||||||
|             update_latest_version.send() |             update_latest_version.delay().get() | ||||||
|             self.assertEqual( |             self.assertEqual( | ||||||
|                 len( |                 len( | ||||||
|                     Event.objects.filter( |                     Event.objects.filter( | ||||||
| @ -56,7 +56,7 @@ class TestAdminTasks(TestCase): | |||||||
|         """Test Update checker with invalid response""" |         """Test Update checker with invalid response""" | ||||||
|         with Mocker() as mocker: |         with Mocker() as mocker: | ||||||
|             mocker.get("https://version.goauthentik.io/version.json", status_code=400) |             mocker.get("https://version.goauthentik.io/version.json", status_code=400) | ||||||
|             update_latest_version.send() |             update_latest_version.delay().get() | ||||||
|             self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0") |             self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0") | ||||||
|             self.assertFalse( |             self.assertFalse( | ||||||
|                 Event.objects.filter( |                 Event.objects.filter( | ||||||
| @ -67,19 +67,17 @@ class TestAdminTasks(TestCase): | |||||||
|     def test_version_disabled(self): |     def test_version_disabled(self): | ||||||
|         """Test Update checker while its disabled""" |         """Test Update checker while its disabled""" | ||||||
|         with CONFIG.patch("disable_update_check", True): |         with CONFIG.patch("disable_update_check", True): | ||||||
|             update_latest_version.send() |             update_latest_version.delay().get() | ||||||
|             self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0") |             self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0") | ||||||
|  |  | ||||||
|     def test_clear_update_notifications(self): |     def test_clear_update_notifications(self): | ||||||
|         """Test clear of previous notification""" |         """Test clear of previous notification""" | ||||||
|         admin_config = apps.get_app_config("authentik_admin") |  | ||||||
|         Event.objects.create( |         Event.objects.create( | ||||||
|             action=EventAction.UPDATE_AVAILABLE, |             action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"} | ||||||
|             context={"new_version": "99999999.9999999.9999999"}, |  | ||||||
|         ) |         ) | ||||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) |         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) | ||||||
|         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) |         Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) | ||||||
|         admin_config.clear_update_notifications() |         clear_update_notifications() | ||||||
|         self.assertFalse( |         self.assertFalse( | ||||||
|             Event.objects.filter( |             Event.objects.filter( | ||||||
|                 action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" |                 action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" | ||||||
|  | |||||||
| @ -3,14 +3,22 @@ | |||||||
| from django.urls import path | from django.urls import path | ||||||
|  |  | ||||||
| from authentik.admin.api.meta import AppsViewSet, ModelViewSet | from authentik.admin.api.meta import AppsViewSet, ModelViewSet | ||||||
|  | from authentik.admin.api.metrics import AdministrationMetricsViewSet | ||||||
| from authentik.admin.api.system import SystemView | from authentik.admin.api.system import SystemView | ||||||
| from authentik.admin.api.version import VersionView | from authentik.admin.api.version import VersionView | ||||||
| from authentik.admin.api.version_history import VersionHistoryViewSet | from authentik.admin.api.version_history import VersionHistoryViewSet | ||||||
|  | from authentik.admin.api.workers import WorkerView | ||||||
|  |  | ||||||
| api_urlpatterns = [ | api_urlpatterns = [ | ||||||
|     ("admin/apps", AppsViewSet, "apps"), |     ("admin/apps", AppsViewSet, "apps"), | ||||||
|     ("admin/models", ModelViewSet, "models"), |     ("admin/models", ModelViewSet, "models"), | ||||||
|  |     path( | ||||||
|  |         "admin/metrics/", | ||||||
|  |         AdministrationMetricsViewSet.as_view(), | ||||||
|  |         name="admin_metrics", | ||||||
|  |     ), | ||||||
|     path("admin/version/", VersionView.as_view(), name="admin_version"), |     path("admin/version/", VersionView.as_view(), name="admin_version"), | ||||||
|     ("admin/version/history", VersionHistoryViewSet, "version_history"), |     ("admin/version/history", VersionHistoryViewSet, "version_history"), | ||||||
|  |     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), | ||||||
|     path("admin/system/", SystemView.as_view(), name="admin_system"), |     path("admin/system/", SystemView.as_view(), name="admin_system"), | ||||||
| ] | ] | ||||||
|  | |||||||
| @ -1,13 +1,12 @@ | |||||||
| """authentik API AppConfig""" | """authentik API AppConfig""" | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig | from django.apps import AppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikAPIConfig(ManagedAppConfig): | class AuthentikAPIConfig(AppConfig): | ||||||
|     """authentik API Config""" |     """authentik API Config""" | ||||||
|  |  | ||||||
|     name = "authentik.api" |     name = "authentik.api" | ||||||
|     label = "authentik_api" |     label = "authentik_api" | ||||||
|     mountpoint = "api/" |     mountpoint = "api/" | ||||||
|     verbose_name = "authentik API" |     verbose_name = "authentik API" | ||||||
|     default = True |  | ||||||
|  | |||||||
| @ -1,12 +1,9 @@ | |||||||
| """API Authentication""" | """API Authentication""" | ||||||
|  |  | ||||||
| from hmac import compare_digest | from hmac import compare_digest | ||||||
| from pathlib import Path |  | ||||||
| from tempfile import gettempdir |  | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from django.contrib.auth.models import AnonymousUser |  | ||||||
| from drf_spectacular.extensions import OpenApiAuthenticationExtension | from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||||
| from rest_framework.exceptions import AuthenticationFailed | from rest_framework.exceptions import AuthenticationFailed | ||||||
| @ -14,17 +11,11 @@ from rest_framework.request import Request | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.core.middleware import CTX_AUTH_VIA | from authentik.core.middleware import CTX_AUTH_VIA | ||||||
| from authentik.core.models import Token, TokenIntents, User, UserTypes | from authentik.core.models import Token, TokenIntents, User | ||||||
| from authentik.outposts.models import Outpost | from authentik.outposts.models import Outpost | ||||||
| from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| _tmp = Path(gettempdir()) |  | ||||||
| try: |  | ||||||
|     with open(_tmp / "authentik-core-ipc.key") as _f: |  | ||||||
|         ipc_key = _f.read() |  | ||||||
| except OSError: |  | ||||||
|     ipc_key = None |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def validate_auth(header: bytes) -> str | None: | def validate_auth(header: bytes) -> str | None: | ||||||
| @ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None: | |||||||
|     if user: |     if user: | ||||||
|         CTX_AUTH_VIA.set("secret_key") |         CTX_AUTH_VIA.set("secret_key") | ||||||
|         return user |         return user | ||||||
|     # then try to auth via secret key (for embedded outpost/etc) |  | ||||||
|     user = token_ipc(auth_credentials) |  | ||||||
|     if user: |  | ||||||
|         CTX_AUTH_VIA.set("ipc") |  | ||||||
|         return user |  | ||||||
|     raise AuthenticationFailed("Token invalid/expired") |     raise AuthenticationFailed("Token invalid/expired") | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None: | |||||||
|     return outpost.user |     return outpost.user | ||||||
|  |  | ||||||
|  |  | ||||||
| class IPCUser(AnonymousUser): |  | ||||||
|     """'Virtual' user for IPC communication between authentik core and the authentik router""" |  | ||||||
|  |  | ||||||
|     username = "authentik:system" |  | ||||||
|     is_active = True |  | ||||||
|     is_superuser = True |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def type(self): |  | ||||||
|         return UserTypes.INTERNAL_SERVICE_ACCOUNT |  | ||||||
|  |  | ||||||
|     def has_perm(self, perm, obj=None): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def has_perms(self, perm_list, obj=None): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def has_module_perms(self, module): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_anonymous(self): |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_authenticated(self): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def token_ipc(value: str) -> User | None: |  | ||||||
|     """Check if the token is the secret key |  | ||||||
|     and return the service account for the managed outpost""" |  | ||||||
|     if not ipc_key or not compare_digest(value, ipc_key): |  | ||||||
|         return None |  | ||||||
|     return IPCUser() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TokenAuthentication(BaseAuthentication): | class TokenAuthentication(BaseAuthentication): | ||||||
|     """Token-based authentication using HTTP Bearer authentication""" |     """Token-based authentication using HTTP Bearer authentication""" | ||||||
|  |  | ||||||
|  | |||||||
| @ -54,7 +54,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom | |||||||
|     return component |     return component | ||||||
|  |  | ||||||
|  |  | ||||||
| def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): | def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):  # noqa: W0613 | ||||||
|     """Workaround to set a default response for endpoints. |     """Workaround to set a default response for endpoints. | ||||||
|     Workaround suggested at |     Workaround suggested at | ||||||
|     <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> |     <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> | ||||||
|  | |||||||
| @ -7,7 +7,7 @@ from rest_framework.exceptions import ValidationError | |||||||
| from rest_framework.fields import CharField, DateTimeField | from rest_framework.fields import CharField, DateTimeField | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.serializers import ListSerializer | from rest_framework.serializers import ListSerializer, ModelSerializer | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
| from authentik.blueprints.models import BlueprintInstance | from authentik.blueprints.models import BlueprintInstance | ||||||
| @ -15,7 +15,7 @@ from authentik.blueprints.v1.importer import Importer | |||||||
| from authentik.blueprints.v1.oci import OCI_PREFIX | from authentik.blueprints.v1.oci import OCI_PREFIX | ||||||
| from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict | from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer | from authentik.core.api.utils import JSONDictField, PassiveSerializer | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -39,7 +39,7 @@ class BlueprintInstanceSerializer(ModelSerializer): | |||||||
|         """Ensure the path (if set) specified is retrievable""" |         """Ensure the path (if set) specified is retrievable""" | ||||||
|         if path == "" or path.startswith(OCI_PREFIX): |         if path == "" or path.startswith(OCI_PREFIX): | ||||||
|             return path |             return path | ||||||
|         files: list[dict] = blueprints_find_dict.send().get_result(block=True) |         files: list[dict] = blueprints_find_dict.delay().get() | ||||||
|         if path not in [file["path"] for file in files]: |         if path not in [file["path"] for file in files]: | ||||||
|             raise ValidationError(_("Blueprint file does not exist")) |             raise ValidationError(_("Blueprint file does not exist")) | ||||||
|         return path |         return path | ||||||
| @ -115,7 +115,7 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet): | |||||||
|     @action(detail=False, pagination_class=None, filter_backends=[]) |     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||||
|     def available(self, request: Request) -> Response: |     def available(self, request: Request) -> Response: | ||||||
|         """Get blueprints""" |         """Get blueprints""" | ||||||
|         files: list[dict] = blueprints_find_dict.send().get_result(block=True) |         files: list[dict] = blueprints_find_dict.delay().get() | ||||||
|         return Response(files) |         return Response(files) | ||||||
|  |  | ||||||
|     @permission_required("authentik_blueprints.view_blueprintinstance") |     @permission_required("authentik_blueprints.view_blueprintinstance") | ||||||
| @ -129,5 +129,5 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def apply(self, request: Request, *args, **kwargs) -> Response: |     def apply(self, request: Request, *args, **kwargs) -> Response: | ||||||
|         """Apply a blueprint""" |         """Apply a blueprint""" | ||||||
|         blueprint = self.get_object() |         blueprint = self.get_object() | ||||||
|         apply_blueprint.send_with_options(args=(blueprint.pk,), rel_obj=blueprint) |         apply_blueprint.delay(str(blueprint.pk)).get() | ||||||
|         return self.retrieve(request, *args, **kwargs) |         return self.retrieve(request, *args, **kwargs) | ||||||
|  | |||||||
| @ -6,12 +6,9 @@ from inspect import ismethod | |||||||
|  |  | ||||||
| from django.apps import AppConfig | from django.apps import AppConfig | ||||||
| from django.db import DatabaseError, InternalError, ProgrammingError | from django.db import DatabaseError, InternalError, ProgrammingError | ||||||
| from dramatiq.broker import get_broker |  | ||||||
| from structlog.stdlib import BoundLogger, get_logger | from structlog.stdlib import BoundLogger, get_logger | ||||||
|  |  | ||||||
| from authentik.lib.utils.time import fqdn_rand |  | ||||||
| from authentik.root.signals import startup | from authentik.root.signals import startup | ||||||
| from authentik.tasks.schedules.lib import ScheduleSpec |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ManagedAppConfig(AppConfig): | class ManagedAppConfig(AppConfig): | ||||||
| @ -37,7 +34,7 @@ class ManagedAppConfig(AppConfig): | |||||||
|  |  | ||||||
|     def import_related(self): |     def import_related(self): | ||||||
|         """Automatically import related modules which rely on just being imported |         """Automatically import related modules which rely on just being imported | ||||||
|         to register themselves (mainly django signals and tasks)""" |         to register themselves (mainly django signals and celery tasks)""" | ||||||
|  |  | ||||||
|         def import_relative(rel_module: str): |         def import_relative(rel_module: str): | ||||||
|             try: |             try: | ||||||
| @ -83,16 +80,6 @@ class ManagedAppConfig(AppConfig): | |||||||
|         func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY |         func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY | ||||||
|         return func |         return func | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def tenant_schedule_specs(self) -> list[ScheduleSpec]: |  | ||||||
|         """Get a list of schedule specs that must exist in each tenant""" |  | ||||||
|         return [] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def global_schedule_specs(self) -> list[ScheduleSpec]: |  | ||||||
|         """Get a list of schedule specs that must exist in the default tenant""" |  | ||||||
|         return [] |  | ||||||
|  |  | ||||||
|     def _reconcile_tenant(self) -> None: |     def _reconcile_tenant(self) -> None: | ||||||
|         """reconcile ourselves for tenanted methods""" |         """reconcile ourselves for tenanted methods""" | ||||||
|         from authentik.tenants.models import Tenant |         from authentik.tenants.models import Tenant | ||||||
| @ -113,12 +100,8 @@ class ManagedAppConfig(AppConfig): | |||||||
|         """ |         """ | ||||||
|         from django_tenants.utils import get_public_schema_name, schema_context |         from django_tenants.utils import get_public_schema_name, schema_context | ||||||
|  |  | ||||||
|         try: |         with schema_context(get_public_schema_name()): | ||||||
|             with schema_context(get_public_schema_name()): |             self._reconcile(self.RECONCILE_GLOBAL_CATEGORY) | ||||||
|                 self._reconcile(self.RECONCILE_GLOBAL_CATEGORY) |  | ||||||
|         except (DatabaseError, ProgrammingError, InternalError) as exc: |  | ||||||
|             self.logger.debug("Failed to access database to run reconcile", exc=exc) |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikBlueprintsConfig(ManagedAppConfig): | class AuthentikBlueprintsConfig(ManagedAppConfig): | ||||||
| @ -129,29 +112,19 @@ class AuthentikBlueprintsConfig(ManagedAppConfig): | |||||||
|     verbose_name = "authentik Blueprints" |     verbose_name = "authentik Blueprints" | ||||||
|     default = True |     default = True | ||||||
|  |  | ||||||
|  |     @ManagedAppConfig.reconcile_global | ||||||
|  |     def load_blueprints_v1_tasks(self): | ||||||
|  |         """Load v1 tasks""" | ||||||
|  |         self.import_module("authentik.blueprints.v1.tasks") | ||||||
|  |  | ||||||
|  |     @ManagedAppConfig.reconcile_tenant | ||||||
|  |     def blueprints_discovery(self): | ||||||
|  |         """Run blueprint discovery""" | ||||||
|  |         from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints | ||||||
|  |  | ||||||
|  |         blueprints_discovery.delay() | ||||||
|  |         clear_failed_blueprints.delay() | ||||||
|  |  | ||||||
|     def import_models(self): |     def import_models(self): | ||||||
|         super().import_models() |         super().import_models() | ||||||
|         self.import_module("authentik.blueprints.v1.meta.apply_blueprint") |         self.import_module("authentik.blueprints.v1.meta.apply_blueprint") | ||||||
|  |  | ||||||
|     @ManagedAppConfig.reconcile_global |  | ||||||
|     def tasks_middlewares(self): |  | ||||||
|         from authentik.blueprints.v1.tasks import BlueprintWatcherMiddleware |  | ||||||
|  |  | ||||||
|         get_broker().add_middleware(BlueprintWatcherMiddleware()) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def tenant_schedule_specs(self) -> list[ScheduleSpec]: |  | ||||||
|         from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints |  | ||||||
|  |  | ||||||
|         return [ |  | ||||||
|             ScheduleSpec( |  | ||||||
|                 actor=blueprints_discovery, |  | ||||||
|                 crontab=f"{fqdn_rand('blueprints_v1_discover')} * * * *", |  | ||||||
|                 send_on_startup=True, |  | ||||||
|             ), |  | ||||||
|             ScheduleSpec( |  | ||||||
|                 actor=clear_failed_blueprints, |  | ||||||
|                 crontab=f"{fqdn_rand('blueprints_v1_cleanup')} * * * *", |  | ||||||
|                 send_on_startup=True, |  | ||||||
|             ), |  | ||||||
|         ] |  | ||||||
|  | |||||||
| @ -72,33 +72,20 @@ class Command(BaseCommand): | |||||||
|                     "additionalProperties": True, |                     "additionalProperties": True, | ||||||
|                 }, |                 }, | ||||||
|                 "entries": { |                 "entries": { | ||||||
|                     "anyOf": [ |                     "type": "array", | ||||||
|                         { |                     "items": { | ||||||
|                             "type": "array", |                         "oneOf": [], | ||||||
|                             "items": {"$ref": "#/$defs/blueprint_entry"}, |                     }, | ||||||
|                         }, |  | ||||||
|                         { |  | ||||||
|                             "type": "object", |  | ||||||
|                             "additionalProperties": { |  | ||||||
|                                 "type": "array", |  | ||||||
|                                 "items": {"$ref": "#/$defs/blueprint_entry"}, |  | ||||||
|                             }, |  | ||||||
|                         }, |  | ||||||
|                     ], |  | ||||||
|                 }, |                 }, | ||||||
|             }, |             }, | ||||||
|             "$defs": {"blueprint_entry": {"oneOf": []}}, |             "$defs": {}, | ||||||
|         } |         } | ||||||
|  |  | ||||||
|     def add_arguments(self, parser): |  | ||||||
|         parser.add_argument("--file", type=str) |  | ||||||
|  |  | ||||||
|     @no_translations |     @no_translations | ||||||
|     def handle(self, *args, file: str, **options): |     def handle(self, *args, **options): | ||||||
|         """Generate JSON Schema for blueprints""" |         """Generate JSON Schema for blueprints""" | ||||||
|         self.build() |         self.build() | ||||||
|         with open(file, "w") as _schema: |         self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default)) | ||||||
|             _schema.write(dumps(self.schema, indent=4, default=Command.json_default)) |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def json_default(value: Any) -> Any: |     def json_default(value: Any) -> Any: | ||||||
| @ -125,7 +112,7 @@ class Command(BaseCommand): | |||||||
|                 } |                 } | ||||||
|             ) |             ) | ||||||
|             model_path = f"{model._meta.app_label}.{model._meta.model_name}" |             model_path = f"{model._meta.app_label}.{model._meta.model_name}" | ||||||
|             self.schema["$defs"]["blueprint_entry"]["oneOf"].append( |             self.schema["properties"]["entries"]["items"]["oneOf"].append( | ||||||
|                 self.template_entry(model_path, model, serializer) |                 self.template_entry(model_path, model, serializer) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
| @ -147,7 +134,7 @@ class Command(BaseCommand): | |||||||
|                 "id": {"type": "string"}, |                 "id": {"type": "string"}, | ||||||
|                 "state": { |                 "state": { | ||||||
|                     "type": "string", |                     "type": "string", | ||||||
|                     "enum": sorted([s.value for s in BlueprintEntryDesiredState]), |                     "enum": [s.value for s in BlueprintEntryDesiredState], | ||||||
|                     "default": "present", |                     "default": "present", | ||||||
|                 }, |                 }, | ||||||
|                 "conditions": {"type": "array", "items": {"type": "boolean"}}, |                 "conditions": {"type": "array", "items": {"type": "boolean"}}, | ||||||
| @ -218,7 +205,7 @@ class Command(BaseCommand): | |||||||
|                 "type": "object", |                 "type": "object", | ||||||
|                 "required": ["permission"], |                 "required": ["permission"], | ||||||
|                 "properties": { |                 "properties": { | ||||||
|                     "permission": {"type": "string", "enum": sorted(perms)}, |                     "permission": {"type": "string", "enum": perms}, | ||||||
|                     "user": {"type": "integer"}, |                     "user": {"type": "integer"}, | ||||||
|                     "role": {"type": "string"}, |                     "role": {"type": "string"}, | ||||||
|                 }, |                 }, | ||||||
|  | |||||||
| @ -3,7 +3,6 @@ | |||||||
| from pathlib import Path | from pathlib import Path | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from django.contrib.contenttypes.fields import GenericRelation |  | ||||||
| from django.contrib.postgres.fields import ArrayField | from django.contrib.postgres.fields import ArrayField | ||||||
| from django.db import models | from django.db import models | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| @ -72,13 +71,6 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel): | |||||||
|     enabled = models.BooleanField(default=True) |     enabled = models.BooleanField(default=True) | ||||||
|     managed_models = ArrayField(models.TextField(), default=list) |     managed_models = ArrayField(models.TextField(), default=list) | ||||||
|  |  | ||||||
|     # Manual link to tasks instead of using TasksModel because of loop imports |  | ||||||
|     tasks = GenericRelation( |  | ||||||
|         "authentik_tasks.Task", |  | ||||||
|         content_type_field="rel_obj_content_type", |  | ||||||
|         object_id_field="rel_obj_id", |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("Blueprint Instance") |         verbose_name = _("Blueprint Instance") | ||||||
|         verbose_name_plural = _("Blueprint Instances") |         verbose_name_plural = _("Blueprint Instances") | ||||||
|  | |||||||
							
								
								
									
										18
									
								
								authentik/blueprints/settings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								authentik/blueprints/settings.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,18 @@ | |||||||
|  | """blueprint Settings""" | ||||||
|  |  | ||||||
|  | from celery.schedules import crontab | ||||||
|  |  | ||||||
|  | from authentik.lib.utils.time import fqdn_rand | ||||||
|  |  | ||||||
|  | CELERY_BEAT_SCHEDULE = { | ||||||
|  |     "blueprints_v1_discover": { | ||||||
|  |         "task": "authentik.blueprints.v1.tasks.blueprints_discovery", | ||||||
|  |         "schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"), | ||||||
|  |         "options": {"queue": "authentik_scheduled"}, | ||||||
|  |     }, | ||||||
|  |     "blueprints_v1_cleanup": { | ||||||
|  |         "task": "authentik.blueprints.v1.tasks.clear_failed_blueprints", | ||||||
|  |         "schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"), | ||||||
|  |         "options": {"queue": "authentik_scheduled"}, | ||||||
|  |     }, | ||||||
|  | } | ||||||
| @ -1,2 +0,0 @@ | |||||||
| # Import all v1 tasks for auto task discovery |  | ||||||
| from authentik.blueprints.v1.tasks import *  # noqa: F403 |  | ||||||
| @ -1,11 +1,10 @@ | |||||||
| version: 1 | version: 1 | ||||||
| entries: | entries: | ||||||
|   foo: |     - identifiers: | ||||||
|       - identifiers: |           name: "%(id)s" | ||||||
|             name: "%(id)s" |           slug: "%(id)s" | ||||||
|             slug: "%(id)s" |       model: authentik_flows.flow | ||||||
|         model: authentik_flows.flow |       state: present | ||||||
|         state: present |       attrs: | ||||||
|         attrs: |           designation: stage_configuration | ||||||
|             designation: stage_configuration |           title: foo | ||||||
|             title: foo |  | ||||||
|  | |||||||
| @ -37,7 +37,6 @@ entries: | |||||||
|     - attrs: |     - attrs: | ||||||
|           attributes: |           attributes: | ||||||
|               env_null: !Env [bar-baz, null] |               env_null: !Env [bar-baz, null] | ||||||
|               json_parse: !ParseJSON '{"foo": "bar"}' |  | ||||||
|               policy_pk1: |               policy_pk1: | ||||||
|                   !Format [ |                   !Format [ | ||||||
|                       "%s-%s", |                       "%s-%s", | ||||||
|  | |||||||
| @ -1,14 +0,0 @@ | |||||||
| from django.test import TestCase |  | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig |  | ||||||
| from authentik.enterprise.apps import EnterpriseConfig |  | ||||||
| from authentik.lib.utils.reflection import get_apps |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestManagedAppConfig(TestCase): |  | ||||||
|     def test_apps_use_managed_app_config(self): |  | ||||||
|         for app in get_apps(): |  | ||||||
|             if app.name.startswith("authentik.enterprise"): |  | ||||||
|                 self.assertIn(EnterpriseConfig, app.__class__.__bases__) |  | ||||||
|             else: |  | ||||||
|                 self.assertIn(ManagedAppConfig, app.__class__.__bases__) |  | ||||||
| @ -35,6 +35,6 @@ def blueprint_tester(file_name: Path) -> Callable: | |||||||
|  |  | ||||||
|  |  | ||||||
| for blueprint_file in Path("blueprints/").glob("**/*.yaml"): | for blueprint_file in Path("blueprints/").glob("**/*.yaml"): | ||||||
|     if "local" in str(blueprint_file) or "testing" in str(blueprint_file): |     if "local" in str(blueprint_file): | ||||||
|         continue |         continue | ||||||
|     setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file)) |     setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file)) | ||||||
|  | |||||||
| @ -5,6 +5,7 @@ from collections.abc import Callable | |||||||
| from django.apps import apps | from django.apps import apps | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
|  |  | ||||||
|  | from authentik.blueprints.v1.importer import is_model_allowed | ||||||
| from authentik.lib.models import SerializerModel | from authentik.lib.models import SerializerModel | ||||||
| from authentik.providers.oauth2.models import RefreshToken | from authentik.providers.oauth2.models import RefreshToken | ||||||
|  |  | ||||||
| @ -21,13 +22,10 @@ def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable: | |||||||
|             return |             return | ||||||
|         model_class = test_model() |         model_class = test_model() | ||||||
|         self.assertTrue(isinstance(model_class, SerializerModel)) |         self.assertTrue(isinstance(model_class, SerializerModel)) | ||||||
|         # Models that have subclasses don't have to have a serializer |  | ||||||
|         if len(test_model.__subclasses__()) > 0: |  | ||||||
|             return |  | ||||||
|         self.assertIsNotNone(model_class.serializer) |         self.assertIsNotNone(model_class.serializer) | ||||||
|         if model_class.serializer.Meta().model == RefreshToken: |         if model_class.serializer.Meta().model == RefreshToken: | ||||||
|             return |             return | ||||||
|         self.assertTrue(issubclass(test_model, model_class.serializer.Meta().model)) |         self.assertEqual(model_class.serializer.Meta().model, test_model) | ||||||
|  |  | ||||||
|     return tester |     return tester | ||||||
|  |  | ||||||
| @ -36,6 +34,6 @@ for app in apps.get_app_configs(): | |||||||
|     if not app.label.startswith("authentik"): |     if not app.label.startswith("authentik"): | ||||||
|         continue |         continue | ||||||
|     for model in app.get_models(): |     for model in app.get_models(): | ||||||
|         if not issubclass(model, SerializerModel): |         if not is_model_allowed(model): | ||||||
|             continue |             continue | ||||||
|         setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model)) |         setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model)) | ||||||
|  | |||||||
| @ -215,7 +215,6 @@ class TestBlueprintsV1(TransactionTestCase): | |||||||
|                     }, |                     }, | ||||||
|                     "nested_context": "context-nested-value", |                     "nested_context": "context-nested-value", | ||||||
|                     "env_null": None, |                     "env_null": None, | ||||||
|                     "json_parse": {"foo": "bar"}, |  | ||||||
|                     "at_index_sequence": "foo", |                     "at_index_sequence": "foo", | ||||||
|                     "at_index_sequence_default": "non existent", |                     "at_index_sequence_default": "non existent", | ||||||
|                     "at_index_mapping": 2, |                     "at_index_mapping": 2, | ||||||
|  | |||||||
| @ -54,7 +54,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | |||||||
|             file.seek(0) |             file.seek(0) | ||||||
|             file_hash = sha512(file.read().encode()).hexdigest() |             file_hash = sha512(file.read().encode()).hexdigest() | ||||||
|             file.flush() |             file.flush() | ||||||
|             blueprints_discovery.send() |             blueprints_discovery() | ||||||
|             instance = BlueprintInstance.objects.filter(name=blueprint_id).first() |             instance = BlueprintInstance.objects.filter(name=blueprint_id).first() | ||||||
|             self.assertEqual(instance.last_applied_hash, file_hash) |             self.assertEqual(instance.last_applied_hash, file_hash) | ||||||
|             self.assertEqual( |             self.assertEqual( | ||||||
| @ -82,7 +82,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | |||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|             file.flush() |             file.flush() | ||||||
|             blueprints_discovery.send() |             blueprints_discovery() | ||||||
|             blueprint = BlueprintInstance.objects.filter(name="foo").first() |             blueprint = BlueprintInstance.objects.filter(name="foo").first() | ||||||
|             self.assertEqual( |             self.assertEqual( | ||||||
|                 blueprint.last_applied_hash, |                 blueprint.last_applied_hash, | ||||||
| @ -107,7 +107,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | |||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|             file.flush() |             file.flush() | ||||||
|             blueprints_discovery.send() |             blueprints_discovery() | ||||||
|             blueprint.refresh_from_db() |             blueprint.refresh_from_db() | ||||||
|             self.assertEqual( |             self.assertEqual( | ||||||
|                 blueprint.last_applied_hash, |                 blueprint.last_applied_hash, | ||||||
|  | |||||||
| @ -6,7 +6,6 @@ from copy import copy | |||||||
| from dataclasses import asdict, dataclass, field, is_dataclass | from dataclasses import asdict, dataclass, field, is_dataclass | ||||||
| from enum import Enum | from enum import Enum | ||||||
| from functools import reduce | from functools import reduce | ||||||
| from json import JSONDecodeError, loads |  | ||||||
| from operator import ixor | from operator import ixor | ||||||
| from os import getenv | from os import getenv | ||||||
| from typing import Any, Literal, Union | from typing import Any, Literal, Union | ||||||
| @ -165,7 +164,9 @@ class BlueprintEntry: | |||||||
|         """Get the blueprint model, with yaml tags resolved if present""" |         """Get the blueprint model, with yaml tags resolved if present""" | ||||||
|         return str(self.tag_resolver(self.model, blueprint)) |         return str(self.tag_resolver(self.model, blueprint)) | ||||||
|  |  | ||||||
|     def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]: |     def get_permissions( | ||||||
|  |         self, blueprint: "Blueprint" | ||||||
|  |     ) -> Generator[BlueprintEntryPermission, None, None]: | ||||||
|         """Get permissions of this entry, with all yaml tags resolved""" |         """Get permissions of this entry, with all yaml tags resolved""" | ||||||
|         for perm in self.permissions: |         for perm in self.permissions: | ||||||
|             yield BlueprintEntryPermission( |             yield BlueprintEntryPermission( | ||||||
| @ -192,18 +193,11 @@ class Blueprint: | |||||||
|     """Dataclass used for a full export""" |     """Dataclass used for a full export""" | ||||||
|  |  | ||||||
|     version: int = field(default=1) |     version: int = field(default=1) | ||||||
|     entries: list[BlueprintEntry] | dict[str, list[BlueprintEntry]] = field(default_factory=list) |     entries: list[BlueprintEntry] = field(default_factory=list) | ||||||
|     context: dict = field(default_factory=dict) |     context: dict = field(default_factory=dict) | ||||||
|  |  | ||||||
|     metadata: BlueprintMetadata | None = field(default=None) |     metadata: BlueprintMetadata | None = field(default=None) | ||||||
|  |  | ||||||
|     def iter_entries(self) -> Iterable[BlueprintEntry]: |  | ||||||
|         if isinstance(self.entries, dict): |  | ||||||
|             for _section, entries in self.entries.items(): |  | ||||||
|                 yield from entries |  | ||||||
|         else: |  | ||||||
|             yield from self.entries |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class YAMLTag: | class YAMLTag: | ||||||
|     """Base class for all YAML Tags""" |     """Base class for all YAML Tags""" | ||||||
| @ -234,7 +228,7 @@ class KeyOf(YAMLTag): | |||||||
|         self.id_from = node.value |         self.id_from = node.value | ||||||
|  |  | ||||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: |     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||||
|         for _entry in blueprint.iter_entries(): |         for _entry in blueprint.entries: | ||||||
|             if _entry.id == self.id_from and _entry._state.instance: |             if _entry.id == self.id_from and _entry._state.instance: | ||||||
|                 # Special handling for PolicyBindingModels, as they'll have a different PK |                 # Special handling for PolicyBindingModels, as they'll have a different PK | ||||||
|                 # which is used when creating policy bindings |                 # which is used when creating policy bindings | ||||||
| @ -292,22 +286,6 @@ class Context(YAMLTag): | |||||||
|         return value |         return value | ||||||
|  |  | ||||||
|  |  | ||||||
| class ParseJSON(YAMLTag): |  | ||||||
|     """Parse JSON from context/env/etc value""" |  | ||||||
|  |  | ||||||
|     raw: str |  | ||||||
|  |  | ||||||
|     def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None: |  | ||||||
|         super().__init__() |  | ||||||
|         self.raw = node.value |  | ||||||
|  |  | ||||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: |  | ||||||
|         try: |  | ||||||
|             return loads(self.raw) |  | ||||||
|         except JSONDecodeError as exc: |  | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Format(YAMLTag): | class Format(YAMLTag): | ||||||
|     """Format a string""" |     """Format a string""" | ||||||
|  |  | ||||||
| @ -683,7 +661,6 @@ class BlueprintLoader(SafeLoader): | |||||||
|         self.add_constructor("!Value", Value) |         self.add_constructor("!Value", Value) | ||||||
|         self.add_constructor("!Index", Index) |         self.add_constructor("!Index", Index) | ||||||
|         self.add_constructor("!AtIndex", AtIndex) |         self.add_constructor("!AtIndex", AtIndex) | ||||||
|         self.add_constructor("!ParseJSON", ParseJSON) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class EntryInvalidError(SentryIgnoredException): | class EntryInvalidError(SentryIgnoredException): | ||||||
|  | |||||||
| @ -36,7 +36,6 @@ from authentik.core.models import ( | |||||||
|     GroupSourceConnection, |     GroupSourceConnection, | ||||||
|     PropertyMapping, |     PropertyMapping, | ||||||
|     Provider, |     Provider, | ||||||
|     Session, |  | ||||||
|     Source, |     Source, | ||||||
|     User, |     User, | ||||||
|     UserSourceConnection, |     UserSourceConnection, | ||||||
| @ -57,6 +56,7 @@ from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import ( | |||||||
|     EndpointDeviceConnection, |     EndpointDeviceConnection, | ||||||
| ) | ) | ||||||
| from authentik.events.logs import LogEvent, capture_logs | from authentik.events.logs import LogEvent, capture_logs | ||||||
|  | from authentik.events.models import SystemTask | ||||||
| from authentik.events.utils import cleanse_dict | from authentik.events.utils import cleanse_dict | ||||||
| from authentik.flows.models import FlowToken, Stage | from authentik.flows.models import FlowToken, Stage | ||||||
| from authentik.lib.models import SerializerModel | from authentik.lib.models import SerializerModel | ||||||
| @ -76,7 +76,6 @@ from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser | |||||||
| from authentik.rbac.models import Role | from authentik.rbac.models import Role | ||||||
| from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser | from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser | ||||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType | from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType | ||||||
| from authentik.tasks.models import Task |  | ||||||
| from authentik.tenants.models import Tenant | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| # Context set when the serializer is created in a blueprint context | # Context set when the serializer is created in a blueprint context | ||||||
| @ -109,7 +108,6 @@ def excluded_models() -> list[type[Model]]: | |||||||
|         Policy, |         Policy, | ||||||
|         PolicyBindingModel, |         PolicyBindingModel, | ||||||
|         # Classes that have other dependencies |         # Classes that have other dependencies | ||||||
|         Session, |  | ||||||
|         AuthenticatedSession, |         AuthenticatedSession, | ||||||
|         # Classes which are only internally managed |         # Classes which are only internally managed | ||||||
|         # FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin |         # FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin | ||||||
| @ -118,7 +116,7 @@ def excluded_models() -> list[type[Model]]: | |||||||
|         SCIMProviderGroup, |         SCIMProviderGroup, | ||||||
|         SCIMProviderUser, |         SCIMProviderUser, | ||||||
|         Tenant, |         Tenant, | ||||||
|         Task, |         SystemTask, | ||||||
|         ConnectionToken, |         ConnectionToken, | ||||||
|         AuthorizationCode, |         AuthorizationCode, | ||||||
|         AccessToken, |         AccessToken, | ||||||
| @ -384,7 +382,7 @@ class Importer: | |||||||
|     def _apply_models(self, raise_errors=False) -> bool: |     def _apply_models(self, raise_errors=False) -> bool: | ||||||
|         """Apply (create/update) models yaml""" |         """Apply (create/update) models yaml""" | ||||||
|         self.__pk_map = {} |         self.__pk_map = {} | ||||||
|         for entry in self._import.iter_entries(): |         for entry in self._import.entries: | ||||||
|             model_app_label, model_name = entry.get_model(self._import).split(".") |             model_app_label, model_name = entry.get_model(self._import).split(".") | ||||||
|             try: |             try: | ||||||
|                 model: type[SerializerModel] = registry.get_model(model_app_label, model_name) |                 model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||||
|  | |||||||
| @ -44,7 +44,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer): | |||||||
|             return MetaResult() |             return MetaResult() | ||||||
|         LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance) |         LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance) | ||||||
|  |  | ||||||
|         apply_blueprint(self.blueprint_instance.pk) |         apply_blueprint(str(self.blueprint_instance.pk)) | ||||||
|         return MetaResult() |         return MetaResult() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -47,7 +47,7 @@ class MetaModelRegistry: | |||||||
|         models = apps.get_models() |         models = apps.get_models() | ||||||
|         for _, value in self.models.items(): |         for _, value in self.models.items(): | ||||||
|             models.append(value) |             models.append(value) | ||||||
|         return sorted(models, key=str) |         return models | ||||||
|  |  | ||||||
|     def get_model(self, app_label: str, model_id: str) -> type[Model]: |     def get_model(self, app_label: str, model_id: str) -> type[Model]: | ||||||
|         """Get model checks if any virtual models are registered, and falls back |         """Get model checks if any virtual models are registered, and falls back | ||||||
|  | |||||||
| @ -4,17 +4,12 @@ from dataclasses import asdict, dataclass, field | |||||||
| from hashlib import sha512 | from hashlib import sha512 | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| from sys import platform | from sys import platform | ||||||
| from uuid import UUID |  | ||||||
|  |  | ||||||
| from dacite.core import from_dict | from dacite.core import from_dict | ||||||
| from django.conf import settings |  | ||||||
| from django.db import DatabaseError, InternalError, ProgrammingError | from django.db import DatabaseError, InternalError, ProgrammingError | ||||||
| from django.utils.text import slugify | from django.utils.text import slugify | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from django_dramatiq_postgres.middleware import CurrentTask, CurrentTaskNotFound |  | ||||||
| from dramatiq.actor import actor |  | ||||||
| from dramatiq.middleware import Middleware |  | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| from watchdog.events import ( | from watchdog.events import ( | ||||||
|     FileCreatedEvent, |     FileCreatedEvent, | ||||||
| @ -36,13 +31,15 @@ from authentik.blueprints.v1.importer import Importer | |||||||
| from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE | from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE | ||||||
| from authentik.blueprints.v1.oci import OCI_PREFIX | from authentik.blueprints.v1.oci import OCI_PREFIX | ||||||
| from authentik.events.logs import capture_logs | from authentik.events.logs import capture_logs | ||||||
|  | from authentik.events.models import TaskStatus | ||||||
|  | from authentik.events.system_tasks import SystemTask, prefill_task | ||||||
| from authentik.events.utils import sanitize_dict | from authentik.events.utils import sanitize_dict | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.tasks.models import Task | from authentik.root.celery import CELERY_APP | ||||||
| from authentik.tasks.schedules.models import Schedule |  | ||||||
| from authentik.tenants.models import Tenant | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
|  | _file_watcher_started = False | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass | @dataclass | ||||||
| @ -56,21 +53,22 @@ class BlueprintFile: | |||||||
|     meta: BlueprintMetadata | None = field(default=None) |     meta: BlueprintMetadata | None = field(default=None) | ||||||
|  |  | ||||||
|  |  | ||||||
| class BlueprintWatcherMiddleware(Middleware): | def start_blueprint_watcher(): | ||||||
|     def start_blueprint_watcher(self): |     """Start blueprint watcher, if it's not running already.""" | ||||||
|         """Start blueprint watcher""" |     # This function might be called twice since it's called on celery startup | ||||||
|         observer = Observer() |  | ||||||
|         kwargs = {} |  | ||||||
|         if platform.startswith("linux"): |  | ||||||
|             kwargs["event_filter"] = (FileCreatedEvent, FileModifiedEvent) |  | ||||||
|         observer.schedule( |  | ||||||
|             BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True, **kwargs |  | ||||||
|         ) |  | ||||||
|         observer.start() |  | ||||||
|  |  | ||||||
|     def after_worker_boot(self, broker, worker): |     global _file_watcher_started  # noqa: PLW0603 | ||||||
|         if not settings.TEST: |     if _file_watcher_started: | ||||||
|             self.start_blueprint_watcher() |         return | ||||||
|  |     observer = Observer() | ||||||
|  |     kwargs = {} | ||||||
|  |     if platform.startswith("linux"): | ||||||
|  |         kwargs["event_filter"] = (FileCreatedEvent, FileModifiedEvent) | ||||||
|  |     observer.schedule( | ||||||
|  |         BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True, **kwargs | ||||||
|  |     ) | ||||||
|  |     observer.start() | ||||||
|  |     _file_watcher_started = True | ||||||
|  |  | ||||||
|  |  | ||||||
| class BlueprintEventHandler(FileSystemEventHandler): | class BlueprintEventHandler(FileSystemEventHandler): | ||||||
| @ -94,7 +92,7 @@ class BlueprintEventHandler(FileSystemEventHandler): | |||||||
|         LOGGER.debug("new blueprint file created, starting discovery") |         LOGGER.debug("new blueprint file created, starting discovery") | ||||||
|         for tenant in Tenant.objects.filter(ready=True): |         for tenant in Tenant.objects.filter(ready=True): | ||||||
|             with tenant: |             with tenant: | ||||||
|                 Schedule.dispatch_by_actor(blueprints_discovery) |                 blueprints_discovery.delay() | ||||||
|  |  | ||||||
|     def on_modified(self, event: FileSystemEvent): |     def on_modified(self, event: FileSystemEvent): | ||||||
|         """Process file modification""" |         """Process file modification""" | ||||||
| @ -105,14 +103,14 @@ class BlueprintEventHandler(FileSystemEventHandler): | |||||||
|             with tenant: |             with tenant: | ||||||
|                 for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True): |                 for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True): | ||||||
|                     LOGGER.debug("modified blueprint file, starting apply", instance=instance) |                     LOGGER.debug("modified blueprint file, starting apply", instance=instance) | ||||||
|                     apply_blueprint.send_with_options(args=(instance.pk,), rel_obj=instance) |                     apply_blueprint.delay(instance.pk.hex) | ||||||
|  |  | ||||||
|  |  | ||||||
| @actor( | @CELERY_APP.task( | ||||||
|     description=_("Find blueprints as `blueprints_find` does, but return a safe dict."), |  | ||||||
|     throws=(DatabaseError, ProgrammingError, InternalError), |     throws=(DatabaseError, ProgrammingError, InternalError), | ||||||
| ) | ) | ||||||
| def blueprints_find_dict(): | def blueprints_find_dict(): | ||||||
|  |     """Find blueprints as `blueprints_find` does, but return a safe dict""" | ||||||
|     blueprints = [] |     blueprints = [] | ||||||
|     for blueprint in blueprints_find(): |     for blueprint in blueprints_find(): | ||||||
|         blueprints.append(sanitize_dict(asdict(blueprint))) |         blueprints.append(sanitize_dict(asdict(blueprint))) | ||||||
| @ -148,19 +146,21 @@ def blueprints_find() -> list[BlueprintFile]: | |||||||
|     return blueprints |     return blueprints | ||||||
|  |  | ||||||
|  |  | ||||||
| @actor( | @CELERY_APP.task( | ||||||
|     description=_("Find blueprints and check if they need to be created in the database."), |     throws=(DatabaseError, ProgrammingError, InternalError), base=SystemTask, bind=True | ||||||
|     throws=(DatabaseError, ProgrammingError, InternalError), |  | ||||||
| ) | ) | ||||||
| def blueprints_discovery(path: str | None = None): | @prefill_task | ||||||
|     self: Task = CurrentTask.get_task() | def blueprints_discovery(self: SystemTask, path: str | None = None): | ||||||
|  |     """Find blueprints and check if they need to be created in the database""" | ||||||
|     count = 0 |     count = 0 | ||||||
|     for blueprint in blueprints_find(): |     for blueprint in blueprints_find(): | ||||||
|         if path and blueprint.path != path: |         if path and blueprint.path != path: | ||||||
|             continue |             continue | ||||||
|         check_blueprint_v1_file(blueprint) |         check_blueprint_v1_file(blueprint) | ||||||
|         count += 1 |         count += 1 | ||||||
|     self.info(f"Successfully imported {count} files.") |     self.set_status( | ||||||
|  |         TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=count)) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| def check_blueprint_v1_file(blueprint: BlueprintFile): | def check_blueprint_v1_file(blueprint: BlueprintFile): | ||||||
| @ -187,26 +187,22 @@ def check_blueprint_v1_file(blueprint: BlueprintFile): | |||||||
|         ) |         ) | ||||||
|     if instance.last_applied_hash != blueprint.hash: |     if instance.last_applied_hash != blueprint.hash: | ||||||
|         LOGGER.info("Applying blueprint due to changed file", instance=instance, path=instance.path) |         LOGGER.info("Applying blueprint due to changed file", instance=instance, path=instance.path) | ||||||
|         apply_blueprint.send_with_options(args=(instance.pk,), rel_obj=instance) |         apply_blueprint.delay(str(instance.pk)) | ||||||
|  |  | ||||||
|  |  | ||||||
| @actor(description=_("Apply single blueprint.")) | @CELERY_APP.task( | ||||||
| def apply_blueprint(instance_pk: UUID): |     bind=True, | ||||||
|     try: |     base=SystemTask, | ||||||
|         self: Task = CurrentTask.get_task() | ) | ||||||
|     except CurrentTaskNotFound: | def apply_blueprint(self: SystemTask, instance_pk: str): | ||||||
|         self = Task() |     """Apply single blueprint""" | ||||||
|     self.set_uid(str(instance_pk)) |     self.save_on_success = False | ||||||
|     instance: BlueprintInstance | None = None |     instance: BlueprintInstance | None = None | ||||||
|     try: |     try: | ||||||
|         instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first() |         instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first() | ||||||
|         if not instance: |         if not instance or not instance.enabled: | ||||||
|             self.warning(f"Could not find blueprint {instance_pk}, skipping") |  | ||||||
|             return |             return | ||||||
|         self.set_uid(slugify(instance.name)) |         self.set_uid(slugify(instance.name)) | ||||||
|         if not instance.enabled: |  | ||||||
|             self.info(f"Blueprint {instance.name} is disabled, skipping") |  | ||||||
|             return |  | ||||||
|         blueprint_content = instance.retrieve() |         blueprint_content = instance.retrieve() | ||||||
|         file_hash = sha512(blueprint_content.encode()).hexdigest() |         file_hash = sha512(blueprint_content.encode()).hexdigest() | ||||||
|         importer = Importer.from_string(blueprint_content, instance.context) |         importer = Importer.from_string(blueprint_content, instance.context) | ||||||
| @ -216,18 +212,19 @@ def apply_blueprint(instance_pk: UUID): | |||||||
|         if not valid: |         if not valid: | ||||||
|             instance.status = BlueprintInstanceStatus.ERROR |             instance.status = BlueprintInstanceStatus.ERROR | ||||||
|             instance.save() |             instance.save() | ||||||
|             self.logs(logs) |             self.set_status(TaskStatus.ERROR, *logs) | ||||||
|             return |             return | ||||||
|         with capture_logs() as logs: |         with capture_logs() as logs: | ||||||
|             applied = importer.apply() |             applied = importer.apply() | ||||||
|             if not applied: |             if not applied: | ||||||
|                 instance.status = BlueprintInstanceStatus.ERROR |                 instance.status = BlueprintInstanceStatus.ERROR | ||||||
|                 instance.save() |                 instance.save() | ||||||
|                 self.logs(logs) |                 self.set_status(TaskStatus.ERROR, *logs) | ||||||
|                 return |                 return | ||||||
|         instance.status = BlueprintInstanceStatus.SUCCESSFUL |         instance.status = BlueprintInstanceStatus.SUCCESSFUL | ||||||
|         instance.last_applied_hash = file_hash |         instance.last_applied_hash = file_hash | ||||||
|         instance.last_applied = now() |         instance.last_applied = now() | ||||||
|  |         self.set_status(TaskStatus.SUCCESSFUL) | ||||||
|     except ( |     except ( | ||||||
|         OSError, |         OSError, | ||||||
|         DatabaseError, |         DatabaseError, | ||||||
| @ -238,14 +235,15 @@ def apply_blueprint(instance_pk: UUID): | |||||||
|     ) as exc: |     ) as exc: | ||||||
|         if instance: |         if instance: | ||||||
|             instance.status = BlueprintInstanceStatus.ERROR |             instance.status = BlueprintInstanceStatus.ERROR | ||||||
|         self.error(exc) |         self.set_error(exc) | ||||||
|     finally: |     finally: | ||||||
|         if instance: |         if instance: | ||||||
|             instance.save() |             instance.save() | ||||||
|  |  | ||||||
|  |  | ||||||
| @actor(description=_("Remove blueprints which couldn't be fetched.")) | @CELERY_APP.task() | ||||||
| def clear_failed_blueprints(): | def clear_failed_blueprints(): | ||||||
|  |     """Remove blueprints which couldn't be fetched""" | ||||||
|     # Exclude OCI blueprints as those might be temporarily unavailable |     # Exclude OCI blueprints as those might be temporarily unavailable | ||||||
|     for blueprint in BlueprintInstance.objects.exclude(path__startswith=OCI_PREFIX): |     for blueprint in BlueprintInstance.objects.exclude(path__startswith=OCI_PREFIX): | ||||||
|         try: |         try: | ||||||
|  | |||||||
| @ -49,8 +49,6 @@ class BrandSerializer(ModelSerializer): | |||||||
|             "branding_title", |             "branding_title", | ||||||
|             "branding_logo", |             "branding_logo", | ||||||
|             "branding_favicon", |             "branding_favicon", | ||||||
|             "branding_custom_css", |  | ||||||
|             "branding_default_flow_background", |  | ||||||
|             "flow_authentication", |             "flow_authentication", | ||||||
|             "flow_invalidation", |             "flow_invalidation", | ||||||
|             "flow_recovery", |             "flow_recovery", | ||||||
| @ -59,7 +57,6 @@ class BrandSerializer(ModelSerializer): | |||||||
|             "flow_device_code", |             "flow_device_code", | ||||||
|             "default_application", |             "default_application", | ||||||
|             "web_certificate", |             "web_certificate", | ||||||
|             "client_certificates", |  | ||||||
|             "attributes", |             "attributes", | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
| @ -89,7 +86,6 @@ class CurrentBrandSerializer(PassiveSerializer): | |||||||
|     branding_title = CharField() |     branding_title = CharField() | ||||||
|     branding_logo = CharField(source="branding_logo_url") |     branding_logo = CharField(source="branding_logo_url") | ||||||
|     branding_favicon = CharField(source="branding_favicon_url") |     branding_favicon = CharField(source="branding_favicon_url") | ||||||
|     branding_custom_css = CharField() |  | ||||||
|     ui_footer_links = ListField( |     ui_footer_links = ListField( | ||||||
|         child=FooterLinkSerializer(), |         child=FooterLinkSerializer(), | ||||||
|         read_only=True, |         read_only=True, | ||||||
| @ -121,7 +117,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "domain", |         "domain", | ||||||
|         "branding_title", |         "branding_title", | ||||||
|         "web_certificate__name", |         "web_certificate__name", | ||||||
|         "client_certificates__name", |  | ||||||
|     ] |     ] | ||||||
|     filterset_fields = [ |     filterset_fields = [ | ||||||
|         "brand_uuid", |         "brand_uuid", | ||||||
| @ -130,7 +125,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "branding_title", |         "branding_title", | ||||||
|         "branding_logo", |         "branding_logo", | ||||||
|         "branding_favicon", |         "branding_favicon", | ||||||
|         "branding_default_flow_background", |  | ||||||
|         "flow_authentication", |         "flow_authentication", | ||||||
|         "flow_invalidation", |         "flow_invalidation", | ||||||
|         "flow_recovery", |         "flow_recovery", | ||||||
| @ -138,7 +132,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "flow_user_settings", |         "flow_user_settings", | ||||||
|         "flow_device_code", |         "flow_device_code", | ||||||
|         "web_certificate", |         "web_certificate", | ||||||
|         "client_certificates", |  | ||||||
|     ] |     ] | ||||||
|     ordering = ["domain"] |     ordering = ["domain"] | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,16 +1,14 @@ | |||||||
| """authentik brands app""" | """authentik brands app""" | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig | from django.apps import AppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikBrandsConfig(ManagedAppConfig): | class AuthentikBrandsConfig(AppConfig): | ||||||
|     """authentik Brand app""" |     """authentik Brand app""" | ||||||
|  |  | ||||||
|     name = "authentik.brands" |     name = "authentik.brands" | ||||||
|     label = "authentik_brands" |     label = "authentik_brands" | ||||||
|     verbose_name = "authentik Brands" |     verbose_name = "authentik Brands" | ||||||
|     default = True |  | ||||||
|     mountpoints = { |     mountpoints = { | ||||||
|         "authentik.brands.urls_root": "", |         "authentik.brands.urls_root": "", | ||||||
|     } |     } | ||||||
|     default = True |  | ||||||
|  | |||||||
| @ -1,35 +0,0 @@ | |||||||
| # Generated by Django 5.0.12 on 2025-02-22 01:51 |  | ||||||
|  |  | ||||||
| from pathlib import Path |  | ||||||
| from django.db import migrations, models |  | ||||||
| from django.apps.registry import Apps |  | ||||||
|  |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     Brand = apps.get_model("authentik_brands", "brand") |  | ||||||
|  |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     path = Path("/web/dist/custom.css") |  | ||||||
|     if not path.exists(): |  | ||||||
|         return |  | ||||||
|     css = path.read_text() |  | ||||||
|     Brand.objects.using(db_alias).all().update(branding_custom_css=css) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0007_brand_default_application"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="branding_custom_css", |  | ||||||
|             field=models.TextField(blank=True, default=""), |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython(migrate_custom_css), |  | ||||||
|     ] |  | ||||||
| @ -1,18 +0,0 @@ | |||||||
| # Generated by Django 5.0.13 on 2025-03-19 22:54 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0008_brand_branding_custom_css"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="branding_default_flow_background", |  | ||||||
|             field=models.TextField(default="/static/dist/assets/images/flow_background.jpg"), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,37 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-19 15:09 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0009_brand_branding_default_flow_background"), |  | ||||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="client_certificates", |  | ||||||
|             field=models.ManyToManyField( |  | ||||||
|                 blank=True, |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Certificates used for client authentication.", |  | ||||||
|                 to="authentik_crypto.certificatekeypair", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="web_certificate", |  | ||||||
|             field=models.ForeignKey( |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Web Certificate used by the authentik Core webserver.", |  | ||||||
|                 null=True, |  | ||||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, |  | ||||||
|                 related_name="+", |  | ||||||
|                 to="authentik_crypto.certificatekeypair", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -33,10 +33,6 @@ class Brand(SerializerModel): | |||||||
|  |  | ||||||
|     branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg") |     branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg") | ||||||
|     branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png") |     branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png") | ||||||
|     branding_custom_css = models.TextField(default="", blank=True) |  | ||||||
|     branding_default_flow_background = models.TextField( |  | ||||||
|         default="/static/dist/assets/images/flow_background.jpg" |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     flow_authentication = models.ForeignKey( |     flow_authentication = models.ForeignKey( | ||||||
|         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication" |         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication" | ||||||
| @ -73,13 +69,6 @@ class Brand(SerializerModel): | |||||||
|         default=None, |         default=None, | ||||||
|         on_delete=models.SET_DEFAULT, |         on_delete=models.SET_DEFAULT, | ||||||
|         help_text=_("Web Certificate used by the authentik Core webserver."), |         help_text=_("Web Certificate used by the authentik Core webserver."), | ||||||
|         related_name="+", |  | ||||||
|     ) |  | ||||||
|     client_certificates = models.ManyToManyField( |  | ||||||
|         CertificateKeyPair, |  | ||||||
|         default=None, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_("Certificates used for client authentication."), |  | ||||||
|     ) |     ) | ||||||
|     attributes = models.JSONField(default=dict, blank=True) |     attributes = models.JSONField(default=dict, blank=True) | ||||||
|  |  | ||||||
| @ -95,12 +84,6 @@ class Brand(SerializerModel): | |||||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon |             return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon | ||||||
|         return self.branding_favicon |         return self.branding_favicon | ||||||
|  |  | ||||||
|     def branding_default_flow_background_url(self) -> str: |  | ||||||
|         """Get branding_default_flow_background with the correct prefix""" |  | ||||||
|         if self.branding_default_flow_background.startswith("/static"): |  | ||||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_default_flow_background |  | ||||||
|         return self.branding_default_flow_background |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> Serializer: |     def serializer(self) -> Serializer: | ||||||
|         from authentik.brands.api import BrandSerializer |         from authentik.brands.api import BrandSerializer | ||||||
|  | |||||||
| @ -24,7 +24,6 @@ class TestBrands(APITestCase): | |||||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", |                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", |                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||||
|                 "branding_title": "authentik", |                 "branding_title": "authentik", | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": brand.domain, |                 "matched_domain": brand.domain, | ||||||
|                 "ui_footer_links": [], |                 "ui_footer_links": [], | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |                 "ui_theme": Themes.AUTOMATIC, | ||||||
| @ -44,7 +43,6 @@ class TestBrands(APITestCase): | |||||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", |                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", |                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||||
|                 "branding_title": "custom", |                 "branding_title": "custom", | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": "bar.baz", |                 "matched_domain": "bar.baz", | ||||||
|                 "ui_footer_links": [], |                 "ui_footer_links": [], | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |                 "ui_theme": Themes.AUTOMATIC, | ||||||
| @ -61,7 +59,6 @@ class TestBrands(APITestCase): | |||||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", |                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", |                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||||
|                 "branding_title": "authentik", |                 "branding_title": "authentik", | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": "fallback", |                 "matched_domain": "fallback", | ||||||
|                 "ui_footer_links": [], |                 "ui_footer_links": [], | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |                 "ui_theme": Themes.AUTOMATIC, | ||||||
| @ -124,38 +121,3 @@ class TestBrands(APITestCase): | |||||||
|                 "subject": None, |                 "subject": None, | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_branding_url(self): |  | ||||||
|         """Test branding attributes return correct values""" |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         brand.branding_default_flow_background = "https://goauthentik.io/img/icon.png" |  | ||||||
|         brand.branding_favicon = "https://goauthentik.io/img/icon.png" |  | ||||||
|         brand.branding_logo = "https://goauthentik.io/img/icon.png" |  | ||||||
|         brand.save() |  | ||||||
|         self.assertEqual( |  | ||||||
|             brand.branding_default_flow_background_url(), "https://goauthentik.io/img/icon.png" |  | ||||||
|         ) |  | ||||||
|         self.assertJSONEqual( |  | ||||||
|             self.client.get(reverse("authentik_api:brand-current")).content.decode(), |  | ||||||
|             { |  | ||||||
|                 "branding_logo": "https://goauthentik.io/img/icon.png", |  | ||||||
|                 "branding_favicon": "https://goauthentik.io/img/icon.png", |  | ||||||
|                 "branding_title": "authentik", |  | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": brand.domain, |  | ||||||
|                 "ui_footer_links": [], |  | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |  | ||||||
|                 "default_locale": "", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_custom_css(self): |  | ||||||
|         """Test custom_css""" |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         brand.branding_custom_css = """* { |  | ||||||
|             font-family: "Foo bar"; |  | ||||||
|         }""" |  | ||||||
|         brand.save() |  | ||||||
|         res = self.client.get(reverse("authentik_core:if-user")) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertIn(brand.branding_custom_css, res.content.decode()) |  | ||||||
|  | |||||||
| @ -5,12 +5,10 @@ from typing import Any | |||||||
| from django.db.models import F, Q | from django.db.models import F, Q | ||||||
| from django.db.models import Value as V | from django.db.models import Value as V | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
| from django.utils.html import _json_script_escapes | from sentry_sdk import get_current_span | ||||||
| from django.utils.safestring import mark_safe |  | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import get_full_version | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.lib.sentry import get_http_meta |  | ||||||
| from authentik.tenants.models import Tenant | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| _q_default = Q(default=True) | _q_default = Q(default=True) | ||||||
| @ -34,14 +32,13 @@ def context_processor(request: HttpRequest) -> dict[str, Any]: | |||||||
|     """Context Processor that injects brand object into every template""" |     """Context Processor that injects brand object into every template""" | ||||||
|     brand = getattr(request, "brand", DEFAULT_BRAND) |     brand = getattr(request, "brand", DEFAULT_BRAND) | ||||||
|     tenant = getattr(request, "tenant", Tenant()) |     tenant = getattr(request, "tenant", Tenant()) | ||||||
|     # similarly to `json_script` we escape everything HTML-related, however django |     trace = "" | ||||||
|     # only directly exposes this as a function that also wraps it in a <script> tag |     span = get_current_span() | ||||||
|     # which we dont want for CSS |     if span: | ||||||
|     brand_css = mark_safe(str(brand.branding_custom_css).translate(_json_script_escapes))  # nosec |         trace = span.to_traceparent() | ||||||
|     return { |     return { | ||||||
|         "brand": brand, |         "brand": brand, | ||||||
|         "brand_css": brand_css, |  | ||||||
|         "footer_links": tenant.footer_links, |         "footer_links": tenant.footer_links, | ||||||
|         "html_meta": {**get_http_meta()}, |         "sentry_trace": trace, | ||||||
|         "version": get_full_version(), |         "version": get_full_version(), | ||||||
|     } |     } | ||||||
|  | |||||||
| @ -2,9 +2,11 @@ | |||||||
|  |  | ||||||
| from collections.abc import Iterator | from collections.abc import Iterator | ||||||
| from copy import copy | from copy import copy | ||||||
|  | from datetime import timedelta | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.db.models import QuerySet | from django.db.models import QuerySet | ||||||
|  | from django.db.models.functions import ExtractHour | ||||||
| from django.shortcuts import get_object_or_404 | from django.shortcuts import get_object_or_404 | ||||||
| from drf_spectacular.types import OpenApiTypes | from drf_spectacular.types import OpenApiTypes | ||||||
| from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | ||||||
| @ -18,6 +20,7 @@ from rest_framework.response import Response | |||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
|  | from authentik.admin.api.metrics import CoordinateSerializer | ||||||
| from authentik.api.pagination import Pagination | from authentik.api.pagination import Pagination | ||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||||
| from authentik.core.api.providers import ProviderSerializer | from authentik.core.api.providers import ProviderSerializer | ||||||
| @ -25,6 +28,7 @@ from authentik.core.api.used_by import UsedByMixin | |||||||
| from authentik.core.api.utils import ModelSerializer | from authentik.core.api.utils import ModelSerializer | ||||||
| from authentik.core.models import Application, User | from authentik.core.models import Application, User | ||||||
| from authentik.events.logs import LogEventSerializer, capture_logs | from authentik.events.logs import LogEventSerializer, capture_logs | ||||||
|  | from authentik.events.models import EventAction | ||||||
| from authentik.lib.utils.file import ( | from authentik.lib.utils.file import ( | ||||||
|     FilePathSerializer, |     FilePathSerializer, | ||||||
|     FileUploadSerializer, |     FileUploadSerializer, | ||||||
| @ -42,7 +46,7 @@ LOGGER = get_logger() | |||||||
|  |  | ||||||
| def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str: | def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str: | ||||||
|     """Cache key where application list for user is saved""" |     """Cache key where application list for user is saved""" | ||||||
|     key = f"{CACHE_PREFIX}app_access/{user_pk}" |     key = f"{CACHE_PREFIX}/app_access/{user_pk}" | ||||||
|     if page_number: |     if page_number: | ||||||
|         key += f"/{page_number}" |         key += f"/{page_number}" | ||||||
|     return key |     return key | ||||||
| @ -317,3 +321,18 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | |||||||
|         """Set application icon (as URL)""" |         """Set application icon (as URL)""" | ||||||
|         app: Application = self.get_object() |         app: Application = self.get_object() | ||||||
|         return set_file_url(request, app, "meta_icon") |         return set_file_url(request, app, "meta_icon") | ||||||
|  |  | ||||||
|  |     @permission_required("authentik_core.view_application", ["authentik_events.view_event"]) | ||||||
|  |     @extend_schema(responses={200: CoordinateSerializer(many=True)}) | ||||||
|  |     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||||
|  |     def metrics(self, request: Request, slug: str): | ||||||
|  |         """Metrics for application logins""" | ||||||
|  |         app = self.get_object() | ||||||
|  |         return Response( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.AUTHORIZE_APPLICATION, | ||||||
|  |                 context__authorized_application__pk=app.pk.hex, | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  | |||||||
| @ -5,7 +5,6 @@ from typing import TypedDict | |||||||
| from rest_framework import mixins | from rest_framework import mixins | ||||||
| from rest_framework.fields import SerializerMethodField | from rest_framework.fields import SerializerMethodField | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.serializers import CharField, DateTimeField, IPAddressField |  | ||||||
| from rest_framework.viewsets import GenericViewSet | from rest_framework.viewsets import GenericViewSet | ||||||
| from ua_parser import user_agent_parser | from ua_parser import user_agent_parser | ||||||
|  |  | ||||||
| @ -55,11 +54,6 @@ class UserAgentDict(TypedDict): | |||||||
| class AuthenticatedSessionSerializer(ModelSerializer): | class AuthenticatedSessionSerializer(ModelSerializer): | ||||||
|     """AuthenticatedSession Serializer""" |     """AuthenticatedSession Serializer""" | ||||||
|  |  | ||||||
|     expires = DateTimeField(source="session.expires", read_only=True) |  | ||||||
|     last_ip = IPAddressField(source="session.last_ip", read_only=True) |  | ||||||
|     last_user_agent = CharField(source="session.last_user_agent", read_only=True) |  | ||||||
|     last_used = DateTimeField(source="session.last_used", read_only=True) |  | ||||||
|  |  | ||||||
|     current = SerializerMethodField() |     current = SerializerMethodField() | ||||||
|     user_agent = SerializerMethodField() |     user_agent = SerializerMethodField() | ||||||
|     geo_ip = SerializerMethodField() |     geo_ip = SerializerMethodField() | ||||||
| @ -68,19 +62,19 @@ class AuthenticatedSessionSerializer(ModelSerializer): | |||||||
|     def get_current(self, instance: AuthenticatedSession) -> bool: |     def get_current(self, instance: AuthenticatedSession) -> bool: | ||||||
|         """Check if session is currently active session""" |         """Check if session is currently active session""" | ||||||
|         request: Request = self.context["request"] |         request: Request = self.context["request"] | ||||||
|         return request._request.session.session_key == instance.session.session_key |         return request._request.session.session_key == instance.session_key | ||||||
|  |  | ||||||
|     def get_user_agent(self, instance: AuthenticatedSession) -> UserAgentDict: |     def get_user_agent(self, instance: AuthenticatedSession) -> UserAgentDict: | ||||||
|         """Get parsed user agent""" |         """Get parsed user agent""" | ||||||
|         return user_agent_parser.Parse(instance.session.last_user_agent) |         return user_agent_parser.Parse(instance.last_user_agent) | ||||||
|  |  | ||||||
|     def get_geo_ip(self, instance: AuthenticatedSession) -> GeoIPDict | None:  # pragma: no cover |     def get_geo_ip(self, instance: AuthenticatedSession) -> GeoIPDict | None:  # pragma: no cover | ||||||
|         """Get GeoIP Data""" |         """Get GeoIP Data""" | ||||||
|         return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.session.last_ip) |         return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.last_ip) | ||||||
|  |  | ||||||
|     def get_asn(self, instance: AuthenticatedSession) -> ASNDict | None:  # pragma: no cover |     def get_asn(self, instance: AuthenticatedSession) -> ASNDict | None:  # pragma: no cover | ||||||
|         """Get ASN Data""" |         """Get ASN Data""" | ||||||
|         return ASN_CONTEXT_PROCESSOR.asn_dict(instance.session.last_ip) |         return ASN_CONTEXT_PROCESSOR.asn_dict(instance.last_ip) | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         model = AuthenticatedSession |         model = AuthenticatedSession | ||||||
| @ -96,7 +90,6 @@ class AuthenticatedSessionSerializer(ModelSerializer): | |||||||
|             "last_used", |             "last_used", | ||||||
|             "expires", |             "expires", | ||||||
|         ] |         ] | ||||||
|         extra_args = {"uuid": {"read_only": True}} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthenticatedSessionViewSet( | class AuthenticatedSessionViewSet( | ||||||
| @ -108,10 +101,9 @@ class AuthenticatedSessionViewSet( | |||||||
| ): | ): | ||||||
|     """AuthenticatedSession Viewset""" |     """AuthenticatedSession Viewset""" | ||||||
|  |  | ||||||
|     lookup_field = "uuid" |     queryset = AuthenticatedSession.objects.all() | ||||||
|     queryset = AuthenticatedSession.objects.select_related("session").all() |  | ||||||
|     serializer_class = AuthenticatedSessionSerializer |     serializer_class = AuthenticatedSessionSerializer | ||||||
|     search_fields = ["user__username", "session__last_ip", "session__last_user_agent"] |     search_fields = ["user__username", "last_ip", "last_user_agent"] | ||||||
|     filterset_fields = ["user__username", "session__last_ip", "session__last_user_agent"] |     filterset_fields = ["user__username", "last_ip", "last_user_agent"] | ||||||
|     ordering = ["user__username"] |     ordering = ["user__username"] | ||||||
|     owner_field = "user" |     owner_field = "user" | ||||||
|  | |||||||
| @ -1,6 +1,8 @@ | |||||||
| """Authenticator Devices API Views""" | """Authenticator Devices API Views""" | ||||||
|  |  | ||||||
| from drf_spectacular.utils import extend_schema | from django.utils.translation import gettext_lazy as _ | ||||||
|  | from drf_spectacular.types import OpenApiTypes | ||||||
|  | from drf_spectacular.utils import OpenApiParameter, extend_schema | ||||||
| from guardian.shortcuts import get_objects_for_user | from guardian.shortcuts import get_objects_for_user | ||||||
| from rest_framework.fields import ( | from rest_framework.fields import ( | ||||||
|     BooleanField, |     BooleanField, | ||||||
| @ -13,7 +15,6 @@ from rest_framework.request import Request | |||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.viewsets import ViewSet | from rest_framework.viewsets import ViewSet | ||||||
|  |  | ||||||
| from authentik.core.api.users import ParamUserSerializer |  | ||||||
| from authentik.core.api.utils import MetaNameSerializer | from authentik.core.api.utils import MetaNameSerializer | ||||||
| from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice | from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice | ||||||
| from authentik.stages.authenticator import device_classes, devices_for_user | from authentik.stages.authenticator import device_classes, devices_for_user | ||||||
| @ -22,7 +23,7 @@ from authentik.stages.authenticator_webauthn.models import WebAuthnDevice | |||||||
|  |  | ||||||
|  |  | ||||||
| class DeviceSerializer(MetaNameSerializer): | class DeviceSerializer(MetaNameSerializer): | ||||||
|     """Serializer for authenticator devices""" |     """Serializer for Duo authenticator devices""" | ||||||
|  |  | ||||||
|     pk = CharField() |     pk = CharField() | ||||||
|     name = CharField() |     name = CharField() | ||||||
| @ -32,27 +33,22 @@ class DeviceSerializer(MetaNameSerializer): | |||||||
|     last_updated = DateTimeField(read_only=True) |     last_updated = DateTimeField(read_only=True) | ||||||
|     last_used = DateTimeField(read_only=True, allow_null=True) |     last_used = DateTimeField(read_only=True, allow_null=True) | ||||||
|     extra_description = SerializerMethodField() |     extra_description = SerializerMethodField() | ||||||
|     external_id = SerializerMethodField() |  | ||||||
|  |  | ||||||
|     def get_type(self, instance: Device) -> str: |     def get_type(self, instance: Device) -> str: | ||||||
|         """Get type of device""" |         """Get type of device""" | ||||||
|         return instance._meta.label |         return instance._meta.label | ||||||
|  |  | ||||||
|     def get_extra_description(self, instance: Device) -> str | None: |     def get_extra_description(self, instance: Device) -> str: | ||||||
|         """Get extra description""" |         """Get extra description""" | ||||||
|         if isinstance(instance, WebAuthnDevice): |         if isinstance(instance, WebAuthnDevice): | ||||||
|             return instance.device_type.description if instance.device_type else None |             return ( | ||||||
|  |                 instance.device_type.description | ||||||
|  |                 if instance.device_type | ||||||
|  |                 else _("Extra description not available") | ||||||
|  |             ) | ||||||
|         if isinstance(instance, EndpointDevice): |         if isinstance(instance, EndpointDevice): | ||||||
|             return instance.data.get("deviceSignals", {}).get("deviceModel") |             return instance.data.get("deviceSignals", {}).get("deviceModel") | ||||||
|         return None |         return "" | ||||||
|  |  | ||||||
|     def get_external_id(self, instance: Device) -> str | None: |  | ||||||
|         """Get external Device ID""" |  | ||||||
|         if isinstance(instance, WebAuthnDevice): |  | ||||||
|             return instance.device_type.aaguid if instance.device_type else None |  | ||||||
|         if isinstance(instance, EndpointDevice): |  | ||||||
|             return instance.data.get("deviceSignals", {}).get("deviceModel") |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeviceViewSet(ViewSet): | class DeviceViewSet(ViewSet): | ||||||
| @ -61,6 +57,7 @@ class DeviceViewSet(ViewSet): | |||||||
|     serializer_class = DeviceSerializer |     serializer_class = DeviceSerializer | ||||||
|     permission_classes = [IsAuthenticated] |     permission_classes = [IsAuthenticated] | ||||||
|  |  | ||||||
|  |     @extend_schema(responses={200: DeviceSerializer(many=True)}) | ||||||
|     def list(self, request: Request) -> Response: |     def list(self, request: Request) -> Response: | ||||||
|         """Get all devices for current user""" |         """Get all devices for current user""" | ||||||
|         devices = devices_for_user(request.user) |         devices = devices_for_user(request.user) | ||||||
| @ -82,11 +79,18 @@ class AdminDeviceViewSet(ViewSet): | |||||||
|             yield from device_set |             yield from device_set | ||||||
|  |  | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         parameters=[ParamUserSerializer], |         parameters=[ | ||||||
|  |             OpenApiParameter( | ||||||
|  |                 name="user", | ||||||
|  |                 location=OpenApiParameter.QUERY, | ||||||
|  |                 type=OpenApiTypes.INT, | ||||||
|  |             ) | ||||||
|  |         ], | ||||||
|         responses={200: DeviceSerializer(many=True)}, |         responses={200: DeviceSerializer(many=True)}, | ||||||
|     ) |     ) | ||||||
|     def list(self, request: Request) -> Response: |     def list(self, request: Request) -> Response: | ||||||
|         """Get all devices for current user""" |         """Get all devices for current user""" | ||||||
|         args = ParamUserSerializer(data=request.query_params) |         kwargs = {} | ||||||
|         args.is_valid(raise_exception=True) |         if "user" in request.query_params: | ||||||
|         return Response(DeviceSerializer(self.get_devices(**args.validated_data), many=True).data) |             kwargs = {"user": request.query_params["user"]} | ||||||
|  |         return Response(DeviceSerializer(self.get_devices(**kwargs), many=True).data) | ||||||
|  | |||||||
| @ -99,17 +99,18 @@ class GroupSerializer(ModelSerializer): | |||||||
|             if superuser |             if superuser | ||||||
|             else "authentik_core.disable_group_superuser" |             else "authentik_core.disable_group_superuser" | ||||||
|         ) |         ) | ||||||
|         if self.instance or superuser: |         has_perm = user.has_perm(perm) | ||||||
|             has_perm = user.has_perm(perm) or user.has_perm(perm, self.instance) |         if self.instance and not has_perm: | ||||||
|             if not has_perm: |             has_perm = user.has_perm(perm, self.instance) | ||||||
|                 raise ValidationError( |         if not has_perm: | ||||||
|                     _( |             raise ValidationError( | ||||||
|                         ( |                 _( | ||||||
|                             "User does not have permission to set " |                     ( | ||||||
|                             "superuser status to {superuser_status}." |                         "User does not have permission to set " | ||||||
|                         ).format_map({"superuser_status": superuser}) |                         "superuser status to {superuser_status}." | ||||||
|                     ) |                     ).format_map({"superuser_status": superuser}) | ||||||
|                 ) |                 ) | ||||||
|  |             ) | ||||||
|         return superuser |         return superuser | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|  | |||||||
| @ -179,13 +179,10 @@ class UserSourceConnectionSerializer(SourceSerializer): | |||||||
|             "user", |             "user", | ||||||
|             "source", |             "source", | ||||||
|             "source_obj", |             "source_obj", | ||||||
|             "identifier", |  | ||||||
|             "created", |             "created", | ||||||
|             "last_updated", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
|             "created": {"read_only": True}, |             "created": {"read_only": True}, | ||||||
|             "last_updated": {"read_only": True}, |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -202,7 +199,7 @@ class UserSourceConnectionViewSet( | |||||||
|     queryset = UserSourceConnection.objects.all() |     queryset = UserSourceConnection.objects.all() | ||||||
|     serializer_class = UserSourceConnectionSerializer |     serializer_class = UserSourceConnectionSerializer | ||||||
|     filterset_fields = ["user", "source__slug"] |     filterset_fields = ["user", "source__slug"] | ||||||
|     search_fields = ["user__username", "source__slug", "identifier"] |     search_fields = ["source__slug"] | ||||||
|     ordering = ["source__slug", "pk"] |     ordering = ["source__slug", "pk"] | ||||||
|     owner_field = "user" |     owner_field = "user" | ||||||
|  |  | ||||||
| @ -221,11 +218,9 @@ class GroupSourceConnectionSerializer(SourceSerializer): | |||||||
|             "source_obj", |             "source_obj", | ||||||
|             "identifier", |             "identifier", | ||||||
|             "created", |             "created", | ||||||
|             "last_updated", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
|             "created": {"read_only": True}, |             "created": {"read_only": True}, | ||||||
|             "last_updated": {"read_only": True}, |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -242,5 +237,6 @@ class GroupSourceConnectionViewSet( | |||||||
|     queryset = GroupSourceConnection.objects.all() |     queryset = GroupSourceConnection.objects.all() | ||||||
|     serializer_class = GroupSourceConnectionSerializer |     serializer_class = GroupSourceConnectionSerializer | ||||||
|     filterset_fields = ["group", "source__slug"] |     filterset_fields = ["group", "source__slug"] | ||||||
|     search_fields = ["group__name", "source__slug", "identifier"] |     search_fields = ["source__slug"] | ||||||
|     ordering = ["source__slug", "pk"] |     ordering = ["source__slug", "pk"] | ||||||
|  |     owner_field = "user" | ||||||
|  | |||||||
| @ -6,6 +6,9 @@ from typing import Any | |||||||
|  |  | ||||||
| from django.contrib.auth import update_session_auth_hash | from django.contrib.auth import update_session_auth_hash | ||||||
| from django.contrib.auth.models import Permission | from django.contrib.auth.models import Permission | ||||||
|  | from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
|  | from django.core.cache import cache | ||||||
|  | from django.db.models.functions import ExtractHour | ||||||
| from django.db.transaction import atomic | from django.db.transaction import atomic | ||||||
| from django.db.utils import IntegrityError | from django.db.utils import IntegrityError | ||||||
| from django.urls import reverse_lazy | from django.urls import reverse_lazy | ||||||
| @ -51,6 +54,7 @@ from rest_framework.validators import UniqueValidator | |||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
|  | from authentik.admin.api.metrics import CoordinateSerializer | ||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| @ -67,8 +71,8 @@ from authentik.core.middleware import ( | |||||||
| from authentik.core.models import ( | from authentik.core.models import ( | ||||||
|     USER_ATTRIBUTE_TOKEN_EXPIRING, |     USER_ATTRIBUTE_TOKEN_EXPIRING, | ||||||
|     USER_PATH_SERVICE_ACCOUNT, |     USER_PATH_SERVICE_ACCOUNT, | ||||||
|  |     AuthenticatedSession, | ||||||
|     Group, |     Group, | ||||||
|     Session, |  | ||||||
|     Token, |     Token, | ||||||
|     TokenIntents, |     TokenIntents, | ||||||
|     User, |     User, | ||||||
| @ -82,7 +86,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN | |||||||
| from authentik.lib.avatars import get_avatar | from authentik.lib.avatars import get_avatar | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
| from authentik.rbac.models import get_permission_choices | from authentik.rbac.models import get_permission_choices | ||||||
| from authentik.stages.email.flow import pickle_flow_token_for_email |  | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.stages.email.tasks import send_mails | from authentik.stages.email.tasks import send_mails | ||||||
| from authentik.stages.email.utils import TemplateEmailMessage | from authentik.stages.email.utils import TemplateEmailMessage | ||||||
| @ -90,12 +93,6 @@ from authentik.stages.email.utils import TemplateEmailMessage | |||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
|  |  | ||||||
|  |  | ||||||
| class ParamUserSerializer(PassiveSerializer): |  | ||||||
|     """Partial serializer for query parameters to select a user""" |  | ||||||
|  |  | ||||||
|     user = PrimaryKeyRelatedField(queryset=User.objects.all().exclude_anonymous(), required=False) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserGroupSerializer(ModelSerializer): | class UserGroupSerializer(ModelSerializer): | ||||||
|     """Simplified Group Serializer for user's groups""" |     """Simplified Group Serializer for user's groups""" | ||||||
|  |  | ||||||
| @ -229,7 +226,6 @@ class UserSerializer(ModelSerializer): | |||||||
|             "name", |             "name", | ||||||
|             "is_active", |             "is_active", | ||||||
|             "last_login", |             "last_login", | ||||||
|             "date_joined", |  | ||||||
|             "is_superuser", |             "is_superuser", | ||||||
|             "groups", |             "groups", | ||||||
|             "groups_obj", |             "groups_obj", | ||||||
| @ -244,7 +240,6 @@ class UserSerializer(ModelSerializer): | |||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
|             "name": {"allow_blank": True}, |             "name": {"allow_blank": True}, | ||||||
|             "date_joined": {"read_only": True}, |  | ||||||
|             "password_change_date": {"read_only": True}, |             "password_change_date": {"read_only": True}, | ||||||
|         } |         } | ||||||
|  |  | ||||||
| @ -321,6 +316,53 @@ class SessionUserSerializer(PassiveSerializer): | |||||||
|     original = UserSelfSerializer(required=False) |     original = UserSelfSerializer(required=False) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class UserMetricsSerializer(PassiveSerializer): | ||||||
|  |     """User Metrics""" | ||||||
|  |  | ||||||
|  |     logins = SerializerMethodField() | ||||||
|  |     logins_failed = SerializerMethodField() | ||||||
|  |     authorizations = SerializerMethodField() | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins(self, _): | ||||||
|  |         """Get successful logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         request = self.context["request"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN, user__pk=user.pk | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_logins_failed(self, _): | ||||||
|  |         """Get failed logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         request = self.context["request"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.LOGIN_FAILED, context__username=user.username | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|  |     def get_authorizations(self, _): | ||||||
|  |         """Get failed logins per 8 hours for the last 7 days""" | ||||||
|  |         user = self.context["user"] | ||||||
|  |         request = self.context["request"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event").filter( | ||||||
|  |                 action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk | ||||||
|  |             ) | ||||||
|  |             # 3 data points per day, so 8 hour spans | ||||||
|  |             .get_events_per(timedelta(days=7), ExtractHour, 7 * 3) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class UsersFilter(FilterSet): | class UsersFilter(FilterSet): | ||||||
|     """Filter for users""" |     """Filter for users""" | ||||||
|  |  | ||||||
| @ -331,7 +373,7 @@ class UsersFilter(FilterSet): | |||||||
|         method="filter_attributes", |         method="filter_attributes", | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     is_superuser = BooleanFilter(field_name="ak_groups", method="filter_is_superuser") |     is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser") | ||||||
|     uuid = UUIDFilter(field_name="uuid") |     uuid = UUIDFilter(field_name="uuid") | ||||||
|  |  | ||||||
|     path = CharFilter(field_name="path") |     path = CharFilter(field_name="path") | ||||||
| @ -349,11 +391,6 @@ class UsersFilter(FilterSet): | |||||||
|         queryset=Group.objects.all().order_by("name"), |         queryset=Group.objects.all().order_by("name"), | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     def filter_is_superuser(self, queryset, name, value): |  | ||||||
|         if value: |  | ||||||
|             return queryset.filter(ak_groups__is_superuser=True).distinct() |  | ||||||
|         return queryset.exclude(ak_groups__is_superuser=True).distinct() |  | ||||||
|  |  | ||||||
|     def filter_attributes(self, queryset, name, value): |     def filter_attributes(self, queryset, name, value): | ||||||
|         """Filter attributes by query args""" |         """Filter attributes by query args""" | ||||||
|         try: |         try: | ||||||
| @ -392,23 +429,8 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|     queryset = User.objects.none() |     queryset = User.objects.none() | ||||||
|     ordering = ["username"] |     ordering = ["username"] | ||||||
|     serializer_class = UserSerializer |     serializer_class = UserSerializer | ||||||
|     filterset_class = UsersFilter |  | ||||||
|     search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"] |     search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"] | ||||||
|  |     filterset_class = UsersFilter | ||||||
|     def get_ql_fields(self): |  | ||||||
|         from djangoql.schema import BoolField, StrField |  | ||||||
|  |  | ||||||
|         from authentik.enterprise.search.fields import ChoiceSearchField, JSONSearchField |  | ||||||
|  |  | ||||||
|         return [ |  | ||||||
|             StrField(User, "username"), |  | ||||||
|             StrField(User, "name"), |  | ||||||
|             StrField(User, "email"), |  | ||||||
|             StrField(User, "path"), |  | ||||||
|             BoolField(User, "is_active", nullable=True), |  | ||||||
|             ChoiceSearchField(User, "type"), |  | ||||||
|             JSONSearchField(User, "attributes", suggest_nested=False), |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|     def get_queryset(self): |     def get_queryset(self): | ||||||
|         base_qs = User.objects.all().exclude_anonymous() |         base_qs = User.objects.all().exclude_anonymous() | ||||||
| @ -424,7 +446,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def list(self, request, *args, **kwargs): |     def list(self, request, *args, **kwargs): | ||||||
|         return super().list(request, *args, **kwargs) |         return super().list(request, *args, **kwargs) | ||||||
|  |  | ||||||
|     def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: |     def _create_recovery_link(self) -> tuple[str, Token]: | ||||||
|         """Create a recovery link (when the current brand has a recovery flow set), |         """Create a recovery link (when the current brand has a recovery flow set), | ||||||
|         that can either be shown to an admin or sent to the user directly""" |         that can either be shown to an admin or sent to the user directly""" | ||||||
|         brand: Brand = self.request._request.brand |         brand: Brand = self.request._request.brand | ||||||
| @ -446,16 +468,12 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|             raise ValidationError( |             raise ValidationError( | ||||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} |                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||||
|             ) from None |             ) from None | ||||||
|         _plan = FlowToken.pickle(plan) |  | ||||||
|         if for_email: |  | ||||||
|             _plan = pickle_flow_token_for_email(plan) |  | ||||||
|         token, __ = FlowToken.objects.update_or_create( |         token, __ = FlowToken.objects.update_or_create( | ||||||
|             identifier=f"{user.uid}-password-reset", |             identifier=f"{user.uid}-password-reset", | ||||||
|             defaults={ |             defaults={ | ||||||
|                 "user": user, |                 "user": user, | ||||||
|                 "flow": flow, |                 "flow": flow, | ||||||
|                 "_plan": _plan, |                 "_plan": FlowToken.pickle(plan), | ||||||
|                 "revoke_on_execution": not for_email, |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         querystring = urlencode({QS_KEY_TOKEN: token.key}) |         querystring = urlencode({QS_KEY_TOKEN: token.key}) | ||||||
| @ -579,6 +597,17 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|             update_session_auth_hash(self.request, user) |             update_session_auth_hash(self.request, user) | ||||||
|         return Response(status=204) |         return Response(status=204) | ||||||
|  |  | ||||||
|  |     @permission_required("authentik_core.view_user", ["authentik_events.view_event"]) | ||||||
|  |     @extend_schema(responses={200: UserMetricsSerializer(many=False)}) | ||||||
|  |     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||||
|  |     def metrics(self, request: Request, pk: int) -> Response: | ||||||
|  |         """User metrics per 1h""" | ||||||
|  |         user: User = self.get_object() | ||||||
|  |         serializer = UserMetricsSerializer(instance={}) | ||||||
|  |         serializer.context["user"] = user | ||||||
|  |         serializer.context["request"] = request | ||||||
|  |         return Response(serializer.data) | ||||||
|  |  | ||||||
|     @permission_required("authentik_core.reset_user_password") |     @permission_required("authentik_core.reset_user_password") | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         responses={ |         responses={ | ||||||
| @ -614,7 +643,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|         if for_user.email == "": |         if for_user.email == "": | ||||||
|             LOGGER.debug("User doesn't have an email address") |             LOGGER.debug("User doesn't have an email address") | ||||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) |             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||||
|         link, token = self._create_recovery_link(for_email=True) |         link, token = self._create_recovery_link() | ||||||
|         # Lookup the email stage to assure the current user can access it |         # Lookup the email stage to assure the current user can access it | ||||||
|         stages = get_objects_for_user( |         stages = get_objects_for_user( | ||||||
|             request.user, "authentik_stages_email.view_emailstage" |             request.user, "authentik_stages_email.view_emailstage" | ||||||
| @ -738,6 +767,9 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|         response = super().partial_update(request, *args, **kwargs) |         response = super().partial_update(request, *args, **kwargs) | ||||||
|         instance: User = self.get_object() |         instance: User = self.get_object() | ||||||
|         if not instance.is_active: |         if not instance.is_active: | ||||||
|             Session.objects.filter(authenticatedsession__user=instance).delete() |             sessions = AuthenticatedSession.objects.filter(user=instance) | ||||||
|  |             session_ids = sessions.values_list("session_key", flat=True) | ||||||
|  |             cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids) | ||||||
|  |             sessions.delete() | ||||||
|             LOGGER.debug("Deleted user's sessions", user=instance.username) |             LOGGER.debug("Deleted user's sessions", user=instance.username) | ||||||
|         return response |         return response | ||||||
|  | |||||||
| @ -2,7 +2,6 @@ | |||||||
|  |  | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.db import models |  | ||||||
| from django.db.models import Model | from django.db.models import Model | ||||||
| from drf_spectacular.extensions import OpenApiSerializerFieldExtension | from drf_spectacular.extensions import OpenApiSerializerFieldExtension | ||||||
| from drf_spectacular.plumbing import build_basic_type | from drf_spectacular.plumbing import build_basic_type | ||||||
| @ -21,8 +20,6 @@ from rest_framework.serializers import ( | |||||||
|     raise_errors_on_nested_writes, |     raise_errors_on_nested_writes, | ||||||
| ) | ) | ||||||
|  |  | ||||||
| from authentik.rbac.permissions import assign_initial_permissions |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def is_dict(value: Any): | def is_dict(value: Any): | ||||||
|     """Ensure a value is a dictionary, useful for JSONFields""" |     """Ensure a value is a dictionary, useful for JSONFields""" | ||||||
| @ -31,36 +28,8 @@ def is_dict(value: Any): | |||||||
|     raise ValidationError("Value must be a dictionary, and not have any duplicate keys.") |     raise ValidationError("Value must be a dictionary, and not have any duplicate keys.") | ||||||
|  |  | ||||||
|  |  | ||||||
| class JSONDictField(JSONField): |  | ||||||
|     """JSON Field which only allows dictionaries""" |  | ||||||
|  |  | ||||||
|     default_validators = [is_dict] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class JSONExtension(OpenApiSerializerFieldExtension): |  | ||||||
|     """Generate API Schema for JSON fields as""" |  | ||||||
|  |  | ||||||
|     target_class = "authentik.core.api.utils.JSONDictField" |  | ||||||
|  |  | ||||||
|     def map_serializer_field(self, auto_schema, direction): |  | ||||||
|         return build_basic_type(OpenApiTypes.OBJECT) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ModelSerializer(BaseModelSerializer): | class ModelSerializer(BaseModelSerializer): | ||||||
|  |  | ||||||
|     # By default, JSON fields we have are used to store dictionaries |  | ||||||
|     serializer_field_mapping = BaseModelSerializer.serializer_field_mapping.copy() |  | ||||||
|     serializer_field_mapping[models.JSONField] = JSONDictField |  | ||||||
|  |  | ||||||
|     def create(self, validated_data): |  | ||||||
|         instance = super().create(validated_data) |  | ||||||
|  |  | ||||||
|         request = self.context.get("request") |  | ||||||
|         if request and hasattr(request, "user") and not request.user.is_anonymous: |  | ||||||
|             assign_initial_permissions(request.user, instance) |  | ||||||
|  |  | ||||||
|         return instance |  | ||||||
|  |  | ||||||
|     def update(self, instance: Model, validated_data): |     def update(self, instance: Model, validated_data): | ||||||
|         raise_errors_on_nested_writes("update", self, validated_data) |         raise_errors_on_nested_writes("update", self, validated_data) | ||||||
|         info = model_meta.get_field_info(instance) |         info = model_meta.get_field_info(instance) | ||||||
| @ -92,6 +61,21 @@ class ModelSerializer(BaseModelSerializer): | |||||||
|         return instance |         return instance | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class JSONDictField(JSONField): | ||||||
|  |     """JSON Field which only allows dictionaries""" | ||||||
|  |  | ||||||
|  |     default_validators = [is_dict] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class JSONExtension(OpenApiSerializerFieldExtension): | ||||||
|  |     """Generate API Schema for JSON fields as""" | ||||||
|  |  | ||||||
|  |     target_class = "authentik.core.api.utils.JSONDictField" | ||||||
|  |  | ||||||
|  |     def map_serializer_field(self, auto_schema, direction): | ||||||
|  |         return build_basic_type(OpenApiTypes.OBJECT) | ||||||
|  |  | ||||||
|  |  | ||||||
| class PassiveSerializer(Serializer): | class PassiveSerializer(Serializer): | ||||||
|     """Base serializer class which doesn't implement create/update methods""" |     """Base serializer class which doesn't implement create/update methods""" | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,7 +1,8 @@ | |||||||
| """authentik core app config""" | """authentik core app config""" | ||||||
|  |  | ||||||
|  | from django.conf import settings | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
| from authentik.tasks.schedules.lib import ScheduleSpec |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikCoreConfig(ManagedAppConfig): | class AuthentikCoreConfig(ManagedAppConfig): | ||||||
| @ -13,6 +14,14 @@ class AuthentikCoreConfig(ManagedAppConfig): | |||||||
|     mountpoint = "" |     mountpoint = "" | ||||||
|     default = True |     default = True | ||||||
|  |  | ||||||
|  |     @ManagedAppConfig.reconcile_global | ||||||
|  |     def debug_worker_hook(self): | ||||||
|  |         """Dispatch startup tasks inline when debugging""" | ||||||
|  |         if settings.DEBUG: | ||||||
|  |             from authentik.root.celery import worker_ready_hook | ||||||
|  |  | ||||||
|  |             worker_ready_hook() | ||||||
|  |  | ||||||
|     @ManagedAppConfig.reconcile_tenant |     @ManagedAppConfig.reconcile_tenant | ||||||
|     def source_inbuilt(self): |     def source_inbuilt(self): | ||||||
|         """Reconcile inbuilt source""" |         """Reconcile inbuilt source""" | ||||||
| @ -25,18 +34,3 @@ class AuthentikCoreConfig(ManagedAppConfig): | |||||||
|             }, |             }, | ||||||
|             managed=Source.MANAGED_INBUILT, |             managed=Source.MANAGED_INBUILT, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def tenant_schedule_specs(self) -> list[ScheduleSpec]: |  | ||||||
|         from authentik.core.tasks import clean_expired_models, clean_temporary_users |  | ||||||
|  |  | ||||||
|         return [ |  | ||||||
|             ScheduleSpec( |  | ||||||
|                 actor=clean_expired_models, |  | ||||||
|                 crontab="2-59/5 * * * *", |  | ||||||
|             ), |  | ||||||
|             ScheduleSpec( |  | ||||||
|                 actor=clean_temporary_users, |  | ||||||
|                 crontab="9-59/5 * * * *", |  | ||||||
|             ), |  | ||||||
|         ] |  | ||||||
|  | |||||||
| @ -24,15 +24,6 @@ class InbuiltBackend(ModelBackend): | |||||||
|         self.set_method("password", request) |         self.set_method("password", request) | ||||||
|         return user |         return user | ||||||
|  |  | ||||||
|     async def aauthenticate( |  | ||||||
|         self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any |  | ||||||
|     ) -> User | None: |  | ||||||
|         user = await super().aauthenticate(request, username=username, password=password, **kwargs) |  | ||||||
|         if not user: |  | ||||||
|             return None |  | ||||||
|         self.set_method("password", request) |  | ||||||
|         return user |  | ||||||
|  |  | ||||||
|     def set_method(self, method: str, request: HttpRequest | None, **kwargs): |     def set_method(self, method: str, request: HttpRequest | None, **kwargs): | ||||||
|         """Set method data on current flow, if possbiel""" |         """Set method data on current flow, if possbiel""" | ||||||
|         if not request: |         if not request: | ||||||
|  | |||||||
							
								
								
									
										21
									
								
								authentik/core/management/commands/bootstrap_tasks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								authentik/core/management/commands/bootstrap_tasks.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,21 @@ | |||||||
|  | """Run bootstrap tasks""" | ||||||
|  |  | ||||||
|  | from django.core.management.base import BaseCommand | ||||||
|  | from django_tenants.utils import get_public_schema_name | ||||||
|  |  | ||||||
|  | from authentik.root.celery import _get_startup_tasks_all_tenants, _get_startup_tasks_default_tenant | ||||||
|  | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Command(BaseCommand): | ||||||
|  |     """Run bootstrap tasks to ensure certain objects are created""" | ||||||
|  |  | ||||||
|  |     def handle(self, **options): | ||||||
|  |         for task in _get_startup_tasks_default_tenant(): | ||||||
|  |             with Tenant.objects.get(schema_name=get_public_schema_name()): | ||||||
|  |                 task() | ||||||
|  |  | ||||||
|  |         for task in _get_startup_tasks_all_tenants(): | ||||||
|  |             for tenant in Tenant.objects.filter(ready=True): | ||||||
|  |                 with tenant: | ||||||
|  |                     task() | ||||||
| @ -13,6 +13,7 @@ class Command(TenantCommand): | |||||||
|         parser.add_argument("usernames", nargs="*", type=str) |         parser.add_argument("usernames", nargs="*", type=str) | ||||||
|  |  | ||||||
|     def handle_per_tenant(self, **options): |     def handle_per_tenant(self, **options): | ||||||
|  |         print(options) | ||||||
|         new_type = UserTypes(options["type"]) |         new_type = UserTypes(options["type"]) | ||||||
|         qs = ( |         qs = ( | ||||||
|             User.objects.exclude_anonymous() |             User.objects.exclude_anonymous() | ||||||
|  | |||||||
| @ -1,15 +0,0 @@ | |||||||
| """Change user type""" |  | ||||||
|  |  | ||||||
| from importlib import import_module |  | ||||||
|  |  | ||||||
| from django.conf import settings |  | ||||||
|  |  | ||||||
| from authentik.tenants.management import TenantCommand |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Command(TenantCommand): |  | ||||||
|     """Delete all sessions""" |  | ||||||
|  |  | ||||||
|     def handle_per_tenant(self, **options): |  | ||||||
|         engine = import_module(settings.SESSION_ENGINE) |  | ||||||
|         engine.SessionStore.clear_expired() |  | ||||||
| @ -2,7 +2,6 @@ | |||||||
|  |  | ||||||
| from django.apps import apps | from django.apps import apps | ||||||
| from django.contrib.auth.management import create_permissions | from django.contrib.auth.management import create_permissions | ||||||
| from django.core.management import call_command |  | ||||||
| from django.core.management.base import BaseCommand, no_translations | from django.core.management.base import BaseCommand, no_translations | ||||||
| from guardian.management import create_anonymous_user | from guardian.management import create_anonymous_user | ||||||
|  |  | ||||||
| @ -17,10 +16,6 @@ class Command(BaseCommand): | |||||||
|         """Check permissions for all apps""" |         """Check permissions for all apps""" | ||||||
|         for tenant in Tenant.objects.filter(ready=True): |         for tenant in Tenant.objects.filter(ready=True): | ||||||
|             with tenant: |             with tenant: | ||||||
|                 # See https://code.djangoproject.com/ticket/28417 |  | ||||||
|                 # Remove potential lingering old permissions |  | ||||||
|                 call_command("remove_stale_contenttypes", "--no-input") |  | ||||||
|  |  | ||||||
|                 for app in apps.get_app_configs(): |                 for app in apps.get_app_configs(): | ||||||
|                     self.stdout.write(f"Checking app {app.name} ({app.label})\n") |                     self.stdout.write(f"Checking app {app.name} ({app.label})\n") | ||||||
|                     create_permissions(app, verbosity=0) |                     create_permissions(app, verbosity=0) | ||||||
|  | |||||||
							
								
								
									
										47
									
								
								authentik/core/management/commands/worker.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								authentik/core/management/commands/worker.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,47 @@ | |||||||
|  | """Run worker""" | ||||||
|  |  | ||||||
|  | from sys import exit as sysexit | ||||||
|  | from tempfile import tempdir | ||||||
|  |  | ||||||
|  | from celery.apps.worker import Worker | ||||||
|  | from django.core.management.base import BaseCommand | ||||||
|  | from django.db import close_old_connections | ||||||
|  | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
|  | from authentik.lib.config import CONFIG | ||||||
|  | from authentik.lib.debug import start_debug_server | ||||||
|  | from authentik.root.celery import CELERY_APP | ||||||
|  |  | ||||||
|  | LOGGER = get_logger() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Command(BaseCommand): | ||||||
|  |     """Run worker""" | ||||||
|  |  | ||||||
|  |     def add_arguments(self, parser): | ||||||
|  |         parser.add_argument( | ||||||
|  |             "-b", | ||||||
|  |             "--beat", | ||||||
|  |             action="store_false", | ||||||
|  |             help="When set, this worker will _not_ run Beat (scheduled) tasks", | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def handle(self, **options): | ||||||
|  |         LOGGER.debug("Celery options", **options) | ||||||
|  |         close_old_connections() | ||||||
|  |         start_debug_server() | ||||||
|  |         worker: Worker = CELERY_APP.Worker( | ||||||
|  |             no_color=False, | ||||||
|  |             quiet=True, | ||||||
|  |             optimization="fair", | ||||||
|  |             autoscale=(CONFIG.get_int("worker.concurrency"), 1), | ||||||
|  |             task_events=True, | ||||||
|  |             beat=options.get("beat", True), | ||||||
|  |             schedule_filename=f"{tempdir}/celerybeat-schedule", | ||||||
|  |             queues=["authentik", "authentik_scheduled", "authentik_events"], | ||||||
|  |         ) | ||||||
|  |         for task in CELERY_APP.tasks: | ||||||
|  |             LOGGER.debug("Registered task", task=task) | ||||||
|  |  | ||||||
|  |         worker.start() | ||||||
|  |         sysexit(worker.exitcode) | ||||||
| @ -2,14 +2,9 @@ | |||||||
|  |  | ||||||
| from collections.abc import Callable | from collections.abc import Callable | ||||||
| from contextvars import ContextVar | from contextvars import ContextVar | ||||||
| from functools import partial |  | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from django.contrib.auth.models import AnonymousUser |  | ||||||
| from django.core.exceptions import ImproperlyConfigured |  | ||||||
| from django.http import HttpRequest, HttpResponse | from django.http import HttpRequest, HttpResponse | ||||||
| from django.utils.deprecation import MiddlewareMixin |  | ||||||
| from django.utils.functional import SimpleLazyObject |  | ||||||
| from django.utils.translation import override | from django.utils.translation import override | ||||||
| from sentry_sdk.api import set_tag | from sentry_sdk.api import set_tag | ||||||
| from structlog.contextvars import STRUCTLOG_KEY_PREFIX | from structlog.contextvars import STRUCTLOG_KEY_PREFIX | ||||||
| @ -25,40 +20,6 @@ CTX_HOST = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + "host", default=None) | |||||||
| CTX_AUTH_VIA = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None) | CTX_AUTH_VIA = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_user(request): |  | ||||||
|     if not hasattr(request, "_cached_user"): |  | ||||||
|         user = None |  | ||||||
|         if (authenticated_session := request.session.get("authenticatedsession", None)) is not None: |  | ||||||
|             user = authenticated_session.user |  | ||||||
|         request._cached_user = user or AnonymousUser() |  | ||||||
|     return request._cached_user |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def aget_user(request): |  | ||||||
|     if not hasattr(request, "_cached_user"): |  | ||||||
|         user = None |  | ||||||
|         if ( |  | ||||||
|             authenticated_session := await request.session.aget("authenticatedsession", None) |  | ||||||
|         ) is not None: |  | ||||||
|             user = authenticated_session.user |  | ||||||
|         request._cached_user = user or AnonymousUser() |  | ||||||
|     return request._cached_user |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthenticationMiddleware(MiddlewareMixin): |  | ||||||
|     def process_request(self, request): |  | ||||||
|         if not hasattr(request, "session"): |  | ||||||
|             raise ImproperlyConfigured( |  | ||||||
|                 "The Django authentication middleware requires session " |  | ||||||
|                 "middleware to be installed. Edit your MIDDLEWARE setting to " |  | ||||||
|                 "insert " |  | ||||||
|                 "'authentik.root.middleware.SessionMiddleware' before " |  | ||||||
|                 "'authentik.core.middleware.AuthenticationMiddleware'." |  | ||||||
|             ) |  | ||||||
|         request.user = SimpleLazyObject(lambda: get_user(request)) |  | ||||||
|         request.auser = partial(aget_user, request) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ImpersonateMiddleware: | class ImpersonateMiddleware: | ||||||
|     """Middleware to impersonate users""" |     """Middleware to impersonate users""" | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,19 +0,0 @@ | |||||||
| # Generated by Django 5.0.13 on 2025-04-07 14:04 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0043_alter_group_options"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="usersourceconnection", |  | ||||||
|             name="new_identifier", |  | ||||||
|             field=models.TextField(default=""), |  | ||||||
|             preserve_default=False, |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,30 +0,0 @@ | |||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0044_usersourceconnection_new_identifier"), |  | ||||||
|         ("authentik_sources_kerberos", "0003_migrate_userkerberossourceconnection_identifier"), |  | ||||||
|         ("authentik_sources_oauth", "0009_migrate_useroauthsourceconnection_identifier"), |  | ||||||
|         ("authentik_sources_plex", "0005_migrate_userplexsourceconnection_identifier"), |  | ||||||
|         ("authentik_sources_saml", "0019_migrate_usersamlsourceconnection_identifier"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.RenameField( |  | ||||||
|             model_name="usersourceconnection", |  | ||||||
|             old_name="new_identifier", |  | ||||||
|             new_name="identifier", |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="usersourceconnection", |  | ||||||
|             index=models.Index(fields=["identifier"], name="authentik_c_identif_59226f_idx"), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="usersourceconnection", |  | ||||||
|             index=models.Index( |  | ||||||
|                 fields=["source", "identifier"], name="authentik_c_source__649e04_idx" |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,242 +0,0 @@ | |||||||
| # Generated by Django 5.0.11 on 2025-01-27 12:58 |  | ||||||
|  |  | ||||||
| import uuid |  | ||||||
| import pickle  # nosec |  | ||||||
| from django.core import signing |  | ||||||
| from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY |  | ||||||
| from django.db import migrations, models |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.conf import settings |  | ||||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX |  | ||||||
| from django.utils.timezone import now, timedelta |  | ||||||
| from authentik.lib.migrations import progress_bar |  | ||||||
| from authentik.root.middleware import ClientIPMiddleware |  | ||||||
|  |  | ||||||
|  |  | ||||||
| SESSION_CACHE_ALIAS = "default" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class PickleSerializer: |  | ||||||
|     """ |  | ||||||
|     Simple wrapper around pickle to be used in signing.dumps()/loads() and |  | ||||||
|     cache backends. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def __init__(self, protocol=None): |  | ||||||
|         self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol |  | ||||||
|  |  | ||||||
|     def dumps(self, obj): |  | ||||||
|         """Pickle data to be stored in redis""" |  | ||||||
|         return pickle.dumps(obj, self.protocol) |  | ||||||
|  |  | ||||||
|     def loads(self, data): |  | ||||||
|         """Unpickle data to be loaded from redis""" |  | ||||||
|         try: |  | ||||||
|             return pickle.loads(data)  # nosec |  | ||||||
|         except Exception: |  | ||||||
|             return {} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _migrate_session( |  | ||||||
|     apps, |  | ||||||
|     db_alias, |  | ||||||
|     session_key, |  | ||||||
|     session_data, |  | ||||||
|     expires, |  | ||||||
| ): |  | ||||||
|     Session = apps.get_model("authentik_core", "Session") |  | ||||||
|     OldAuthenticatedSession = apps.get_model("authentik_core", "OldAuthenticatedSession") |  | ||||||
|     AuthenticatedSession = apps.get_model("authentik_core", "AuthenticatedSession") |  | ||||||
|  |  | ||||||
|     old_auth_session = ( |  | ||||||
|         OldAuthenticatedSession.objects.using(db_alias).filter(session_key=session_key).first() |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     args = { |  | ||||||
|         "session_key": session_key, |  | ||||||
|         "expires": expires, |  | ||||||
|         "last_ip": ClientIPMiddleware.default_ip, |  | ||||||
|         "last_user_agent": "", |  | ||||||
|         "session_data": {}, |  | ||||||
|     } |  | ||||||
|     for k, v in session_data.items(): |  | ||||||
|         if k == "authentik/stages/user_login/last_ip": |  | ||||||
|             args["last_ip"] = v |  | ||||||
|         elif k in ["last_user_agent", "last_used"]: |  | ||||||
|             args[k] = v |  | ||||||
|         elif args in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY]: |  | ||||||
|             pass |  | ||||||
|         else: |  | ||||||
|             args["session_data"][k] = v |  | ||||||
|     if old_auth_session: |  | ||||||
|         args["last_user_agent"] = old_auth_session.last_user_agent |  | ||||||
|         args["last_used"] = old_auth_session.last_used |  | ||||||
|  |  | ||||||
|     args["session_data"] = pickle.dumps(args["session_data"]) |  | ||||||
|     session = Session.objects.using(db_alias).create(**args) |  | ||||||
|  |  | ||||||
|     if old_auth_session: |  | ||||||
|         AuthenticatedSession.objects.using(db_alias).create( |  | ||||||
|             session=session, |  | ||||||
|             user=old_auth_session.user, |  | ||||||
|             uuid=old_auth_session.uuid, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_redis_sessions(apps, schema_editor): |  | ||||||
|     from django.core.cache import caches |  | ||||||
|  |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|     cache = caches[SESSION_CACHE_ALIAS] |  | ||||||
|  |  | ||||||
|     # Not a redis cache, skipping |  | ||||||
|     if not hasattr(cache, "keys"): |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     print("\nMigrating Redis sessions to database, this might take a couple of minutes...") |  | ||||||
|     for key, session_data in progress_bar(cache.get_many(cache.keys(f"{KEY_PREFIX}*")).items()): |  | ||||||
|         _migrate_session( |  | ||||||
|             apps=apps, |  | ||||||
|             db_alias=db_alias, |  | ||||||
|             session_key=key.removeprefix(KEY_PREFIX), |  | ||||||
|             session_data=session_data, |  | ||||||
|             expires=now() + timedelta(seconds=cache.ttl(key)), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_database_sessions(apps, schema_editor): |  | ||||||
|     DjangoSession = apps.get_model("sessions", "Session") |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     print("\nMigration database sessions, this might take a couple of minutes...") |  | ||||||
|     for django_session in progress_bar(DjangoSession.objects.using(db_alias).all()): |  | ||||||
|         session_data = signing.loads( |  | ||||||
|             django_session.session_data, |  | ||||||
|             salt="django.contrib.sessions.SessionStore", |  | ||||||
|             serializer=PickleSerializer, |  | ||||||
|         ) |  | ||||||
|         _migrate_session( |  | ||||||
|             apps=apps, |  | ||||||
|             db_alias=db_alias, |  | ||||||
|             session_key=django_session.session_key, |  | ||||||
|             session_data=session_data, |  | ||||||
|             expires=django_session.expire_date, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("sessions", "0001_initial"), |  | ||||||
|         ("authentik_core", "0045_rename_new_identifier_usersourceconnection_identifier_and_more"), |  | ||||||
|         ("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"), |  | ||||||
|         ("authentik_providers_rac", "0006_connectiontoken_authentik_p_expires_91f148_idx_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         # Rename AuthenticatedSession to OldAuthenticatedSession |  | ||||||
|         migrations.RenameModel( |  | ||||||
|             old_name="AuthenticatedSession", |  | ||||||
|             new_name="OldAuthenticatedSession", |  | ||||||
|         ), |  | ||||||
|         migrations.RenameIndex( |  | ||||||
|             model_name="oldauthenticatedsession", |  | ||||||
|             new_name="authentik_c_expires_cf4f72_idx", |  | ||||||
|             old_name="authentik_c_expires_08251d_idx", |  | ||||||
|         ), |  | ||||||
|         migrations.RenameIndex( |  | ||||||
|             model_name="oldauthenticatedsession", |  | ||||||
|             new_name="authentik_c_expirin_c1f17f_idx", |  | ||||||
|             old_name="authentik_c_expirin_9cd839_idx", |  | ||||||
|         ), |  | ||||||
|         migrations.RenameIndex( |  | ||||||
|             model_name="oldauthenticatedsession", |  | ||||||
|             new_name="authentik_c_expirin_e04f5d_idx", |  | ||||||
|             old_name="authentik_c_expirin_195a84_idx", |  | ||||||
|         ), |  | ||||||
|         migrations.RenameIndex( |  | ||||||
|             model_name="oldauthenticatedsession", |  | ||||||
|             new_name="authentik_c_session_a44819_idx", |  | ||||||
|             old_name="authentik_c_session_d0f005_idx", |  | ||||||
|         ), |  | ||||||
|         migrations.RunSQL( |  | ||||||
|             sql="ALTER INDEX authentik_core_authenticatedsession_user_id_5055b6cf RENAME TO authentik_core_oldauthenticatedsession_user_id_5055b6cf", |  | ||||||
|             reverse_sql="ALTER INDEX authentik_core_oldauthenticatedsession_user_id_5055b6cf RENAME TO authentik_core_authenticatedsession_user_id_5055b6cf", |  | ||||||
|         ), |  | ||||||
|         # Create new Session and AuthenticatedSession models |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="Session", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "session_key", |  | ||||||
|                     models.CharField( |  | ||||||
|                         max_length=40, primary_key=True, serialize=False, verbose_name="session key" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("expires", models.DateTimeField(default=None, null=True)), |  | ||||||
|                 ("expiring", models.BooleanField(default=True)), |  | ||||||
|                 ("session_data", models.BinaryField(verbose_name="session data")), |  | ||||||
|                 ("last_ip", models.GenericIPAddressField()), |  | ||||||
|                 ("last_user_agent", models.TextField(blank=True)), |  | ||||||
|                 ("last_used", models.DateTimeField(auto_now=True)), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "default_permissions": [], |  | ||||||
|                 "verbose_name": "Session", |  | ||||||
|                 "verbose_name_plural": "Sessions", |  | ||||||
|             }, |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="session", |  | ||||||
|             index=models.Index(fields=["expires"], name="authentik_c_expires_d2f607_idx"), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="session", |  | ||||||
|             index=models.Index(fields=["expiring"], name="authentik_c_expirin_7c2cfb_idx"), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="session", |  | ||||||
|             index=models.Index( |  | ||||||
|                 fields=["expiring", "expires"], name="authentik_c_expirin_1ab2e4_idx" |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="session", |  | ||||||
|             index=models.Index( |  | ||||||
|                 fields=["expires", "session_key"], name="authentik_c_expires_c49143_idx" |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="AuthenticatedSession", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "session", |  | ||||||
|                     models.OneToOneField( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         primary_key=True, |  | ||||||
|                         serialize=False, |  | ||||||
|                         to="authentik_core.session", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("uuid", models.UUIDField(default=uuid.uuid4, unique=True)), |  | ||||||
|                 ( |  | ||||||
|                     "user", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "verbose_name": "Authenticated Session", |  | ||||||
|                 "verbose_name_plural": "Authenticated Sessions", |  | ||||||
|             }, |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=migrate_redis_sessions, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=migrate_database_sessions, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,18 +0,0 @@ | |||||||
| # Generated by Django 5.0.11 on 2025-01-27 13:02 |  | ||||||
|  |  | ||||||
| from django.db import migrations |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0046_session_and_more"), |  | ||||||
|         ("authentik_providers_rac", "0007_migrate_session"), |  | ||||||
|         ("authentik_providers_oauth2", "0028_migrate_session"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.DeleteModel( |  | ||||||
|             name="OldAuthenticatedSession", |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,103 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-14 11:15 |  | ||||||
|  |  | ||||||
| from django.apps.registry import Apps, apps as global_apps |  | ||||||
| from django.db import migrations |  | ||||||
| from django.contrib.contenttypes.management import create_contenttypes |  | ||||||
| from django.contrib.auth.management import create_permissions |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession""" |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the |  | ||||||
|     # real config for creating permissions and content types |  | ||||||
|     authentik_core_config = global_apps.get_app_config("authentik_core") |  | ||||||
|     # These are only ran by django after all migrations, but we need them right now. |  | ||||||
|     # `global_apps` is needed, |  | ||||||
|     create_permissions(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|     create_contenttypes(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|  |  | ||||||
|     # But from now on, this is just a regular migration, so use `apps` |  | ||||||
|     Permission = apps.get_model("auth", "Permission") |  | ||||||
|     ContentType = apps.get_model("contenttypes", "ContentType") |  | ||||||
|  |  | ||||||
|     try: |  | ||||||
|         old_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="oldauthenticatedsession" |  | ||||||
|         ) |  | ||||||
|         new_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="authenticatedsession" |  | ||||||
|         ) |  | ||||||
|     except ContentType.DoesNotExist: |  | ||||||
|         # This should exist at this point, but if not, let's cut our losses |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     # Get all permissions for the old content type |  | ||||||
|     old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct) |  | ||||||
|  |  | ||||||
|     # Create equivalent permissions for the new content type |  | ||||||
|     for old_perm in old_perms: |  | ||||||
|         new_perm = ( |  | ||||||
|             Permission.objects.using(db_alias) |  | ||||||
|             .filter( |  | ||||||
|                 content_type=new_ct, |  | ||||||
|                 codename=old_perm.codename, |  | ||||||
|             ) |  | ||||||
|             .first() |  | ||||||
|         ) |  | ||||||
|         if not new_perm: |  | ||||||
|             # This should exist at this point, but if not, let's cut our losses |  | ||||||
|             continue |  | ||||||
|  |  | ||||||
|         # Global user permissions |  | ||||||
|         User = apps.get_model("authentik_core", "User") |  | ||||||
|         User.user_permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Global role permissions |  | ||||||
|         DjangoGroup = apps.get_model("auth", "Group") |  | ||||||
|         DjangoGroup.permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Object user permissions |  | ||||||
|         UserObjectPermission = apps.get_model("guardian", "UserObjectPermission") |  | ||||||
|         UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # Object role permissions |  | ||||||
|         GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission") |  | ||||||
|         GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def remove_old_authenticated_session_content_type( |  | ||||||
|     apps: Apps, schema_editor: BaseDatabaseSchemaEditor |  | ||||||
| ): |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|     ContentType = apps.get_model("contenttypes", "ContentType") |  | ||||||
|  |  | ||||||
|     ContentType.objects.using(db_alias).filter(model="oldauthenticatedsession").delete() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0047_delete_oldauthenticatedsession"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=migrate_authenticated_session_permissions, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=remove_old_authenticated_session_content_type, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,7 +1,6 @@ | |||||||
| """authentik core models""" | """authentik core models""" | ||||||
|  |  | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from enum import StrEnum |  | ||||||
| from hashlib import sha256 | from hashlib import sha256 | ||||||
| from typing import Any, Optional, Self | from typing import Any, Optional, Self | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
| @ -10,7 +9,6 @@ from deepmerge import always_merger | |||||||
| from django.contrib.auth.hashers import check_password | from django.contrib.auth.hashers import check_password | ||||||
| from django.contrib.auth.models import AbstractUser | from django.contrib.auth.models import AbstractUser | ||||||
| from django.contrib.auth.models import UserManager as DjangoUserManager | from django.contrib.auth.models import UserManager as DjangoUserManager | ||||||
| from django.contrib.sessions.base_session import AbstractBaseSession |  | ||||||
| from django.db import models | from django.db import models | ||||||
| from django.db.models import Q, QuerySet, options | from django.db.models import Q, QuerySet, options | ||||||
| from django.db.models.constants import LOOKUP_SEP | from django.db.models.constants import LOOKUP_SEP | ||||||
| @ -18,7 +16,7 @@ from django.http import HttpRequest | |||||||
| from django.utils.functional import SimpleLazyObject, cached_property | from django.utils.functional import SimpleLazyObject, cached_property | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from django_cte import CTE, with_cte | from django_cte import CTEQuerySet, With | ||||||
| from guardian.conf import settings | from guardian.conf import settings | ||||||
| from guardian.mixins import GuardianUserMixin | from guardian.mixins import GuardianUserMixin | ||||||
| from model_utils.managers import InheritanceManager | from model_utils.managers import InheritanceManager | ||||||
| @ -136,7 +134,7 @@ class AttributesMixin(models.Model): | |||||||
|         return instance, False |         return instance, False | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupQuerySet(QuerySet): | class GroupQuerySet(CTEQuerySet): | ||||||
|     def with_children_recursive(self): |     def with_children_recursive(self): | ||||||
|         """Recursively get all groups that have the current queryset as parents |         """Recursively get all groups that have the current queryset as parents | ||||||
|         or are indirectly related.""" |         or are indirectly related.""" | ||||||
| @ -165,9 +163,9 @@ class GroupQuerySet(QuerySet): | |||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         # Build the recursive query, see above |         # Build the recursive query, see above | ||||||
|         cte = CTE.recursive(make_cte) |         cte = With.recursive(make_cte) | ||||||
|         # Return the result, as a usable queryset for Group. |         # Return the result, as a usable queryset for Group. | ||||||
|         return with_cte(cte, select=cte.join(Group, group_uuid=cte.col.group_uuid)) |         return cte.join(Group, group_uuid=cte.col.group_uuid).with_cte(cte) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Group(SerializerModel, AttributesMixin): | class Group(SerializerModel, AttributesMixin): | ||||||
| @ -648,30 +646,19 @@ class SourceUserMatchingModes(models.TextChoices): | |||||||
|     """Different modes a source can handle new/returning users""" |     """Different modes a source can handle new/returning users""" | ||||||
|  |  | ||||||
|     IDENTIFIER = "identifier", _("Use the source-specific identifier") |     IDENTIFIER = "identifier", _("Use the source-specific identifier") | ||||||
|     EMAIL_LINK = ( |     EMAIL_LINK = "email_link", _( | ||||||
|         "email_link", |         "Link to a user with identical email address. Can have security implications " | ||||||
|         _( |         "when a source doesn't validate email addresses." | ||||||
|             "Link to a user with identical email address. Can have security implications " |  | ||||||
|             "when a source doesn't validate email addresses." |  | ||||||
|         ), |  | ||||||
|     ) |     ) | ||||||
|     EMAIL_DENY = ( |     EMAIL_DENY = "email_deny", _( | ||||||
|         "email_deny", |         "Use the user's email address, but deny enrollment when the email address already exists." | ||||||
|         _( |  | ||||||
|             "Use the user's email address, but deny enrollment when the email address already " |  | ||||||
|             "exists." |  | ||||||
|         ), |  | ||||||
|     ) |     ) | ||||||
|     USERNAME_LINK = ( |     USERNAME_LINK = "username_link", _( | ||||||
|         "username_link", |         "Link to a user with identical username. Can have security implications " | ||||||
|         _( |         "when a username is used with another source." | ||||||
|             "Link to a user with identical username. Can have security implications " |  | ||||||
|             "when a username is used with another source." |  | ||||||
|         ), |  | ||||||
|     ) |     ) | ||||||
|     USERNAME_DENY = ( |     USERNAME_DENY = "username_deny", _( | ||||||
|         "username_deny", |         "Use the user's username, but deny enrollment when the username already exists." | ||||||
|         _("Use the user's username, but deny enrollment when the username already exists."), |  | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -679,16 +666,12 @@ class SourceGroupMatchingModes(models.TextChoices): | |||||||
|     """Different modes a source can handle new/returning groups""" |     """Different modes a source can handle new/returning groups""" | ||||||
|  |  | ||||||
|     IDENTIFIER = "identifier", _("Use the source-specific identifier") |     IDENTIFIER = "identifier", _("Use the source-specific identifier") | ||||||
|     NAME_LINK = ( |     NAME_LINK = "name_link", _( | ||||||
|         "name_link", |         "Link to a group with identical name. Can have security implications " | ||||||
|         _( |         "when a group name is used with another source." | ||||||
|             "Link to a group with identical name. Can have security implications " |  | ||||||
|             "when a group name is used with another source." |  | ||||||
|         ), |  | ||||||
|     ) |     ) | ||||||
|     NAME_DENY = ( |     NAME_DENY = "name_deny", _( | ||||||
|         "name_deny", |         "Use the group name, but deny enrollment when the name already exists." | ||||||
|         _("Use the group name, but deny enrollment when the name already exists."), |  | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -747,7 +730,8 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|         choices=SourceGroupMatchingModes.choices, |         choices=SourceGroupMatchingModes.choices, | ||||||
|         default=SourceGroupMatchingModes.IDENTIFIER, |         default=SourceGroupMatchingModes.IDENTIFIER, | ||||||
|         help_text=_( |         help_text=_( | ||||||
|             "How the source determines if an existing group should be used or a new group created." |             "How the source determines if an existing group should be used or " | ||||||
|  |             "a new group created." | ||||||
|         ), |         ), | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
| @ -777,17 +761,11 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|     @property |     @property | ||||||
|     def component(self) -> str: |     def component(self) -> str: | ||||||
|         """Return component used to edit this object""" |         """Return component used to edit this object""" | ||||||
|         if self.managed == self.MANAGED_INBUILT: |  | ||||||
|             return "" |  | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def property_mapping_type(self) -> "type[PropertyMapping]": |     def property_mapping_type(self) -> "type[PropertyMapping]": | ||||||
|         """Return property mapping type used by this object""" |         """Return property mapping type used by this object""" | ||||||
|         if self.managed == self.MANAGED_INBUILT: |  | ||||||
|             from authentik.core.models import PropertyMapping |  | ||||||
|  |  | ||||||
|             return PropertyMapping |  | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     def ui_login_button(self, request: HttpRequest) -> UILoginButton | None: |     def ui_login_button(self, request: HttpRequest) -> UILoginButton | None: | ||||||
| @ -802,14 +780,10 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|  |  | ||||||
|     def get_base_user_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: |     def get_base_user_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: | ||||||
|         """Get base properties for a user to build final properties upon.""" |         """Get base properties for a user to build final properties upon.""" | ||||||
|         if self.managed == self.MANAGED_INBUILT: |  | ||||||
|             return {} |  | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     def get_base_group_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: |     def get_base_group_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: | ||||||
|         """Get base properties for a group to build final properties upon.""" |         """Get base properties for a group to build final properties upon.""" | ||||||
|         if self.managed == self.MANAGED_INBUILT: |  | ||||||
|             return {} |  | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     def __str__(self): |     def __str__(self): | ||||||
| @ -840,7 +814,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | |||||||
|  |  | ||||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) |     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||||
|     source = models.ForeignKey(Source, on_delete=models.CASCADE) |     source = models.ForeignKey(Source, on_delete=models.CASCADE) | ||||||
|     identifier = models.TextField() |  | ||||||
|  |  | ||||||
|     objects = InheritanceManager() |     objects = InheritanceManager() | ||||||
|  |  | ||||||
| @ -854,10 +827,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | |||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         unique_together = (("user", "source"),) |         unique_together = (("user", "source"),) | ||||||
|         indexes = ( |  | ||||||
|             models.Index(fields=("identifier",)), |  | ||||||
|             models.Index(fields=("source", "identifier")), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupSourceConnection(SerializerModel, CreatedUpdatedModel): | class GroupSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||||
| @ -1028,81 +997,45 @@ class PropertyMapping(SerializerModel, ManagedModel): | |||||||
|         verbose_name_plural = _("Property Mappings") |         verbose_name_plural = _("Property Mappings") | ||||||
|  |  | ||||||
|  |  | ||||||
| class Session(ExpiringModel, AbstractBaseSession): | class AuthenticatedSession(ExpiringModel): | ||||||
|     """User session with extra fields for fast access""" |     """Additional session class for authenticated users. Augments the standard django session | ||||||
|  |     to achieve the following: | ||||||
|  |         - Make it queryable by user | ||||||
|  |         - Have a direct connection to user objects | ||||||
|  |         - Allow users to view their own sessions and terminate them | ||||||
|  |         - Save structured and well-defined information. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|     # Remove upstream field because we're using our own ExpiringModel |     uuid = models.UUIDField(default=uuid4, primary_key=True) | ||||||
|     expire_date = None |  | ||||||
|     session_data = models.BinaryField(_("session data")) |  | ||||||
|  |  | ||||||
|     # Keep in sync with Session.Keys |     session_key = models.CharField(max_length=40) | ||||||
|     last_ip = models.GenericIPAddressField() |     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||||
|  |  | ||||||
|  |     last_ip = models.TextField() | ||||||
|     last_user_agent = models.TextField(blank=True) |     last_user_agent = models.TextField(blank=True) | ||||||
|     last_used = models.DateTimeField(auto_now=True) |     last_used = models.DateTimeField(auto_now=True) | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _("Session") |  | ||||||
|         verbose_name_plural = _("Sessions") |  | ||||||
|         indexes = ExpiringModel.Meta.indexes + [ |  | ||||||
|             models.Index(fields=["expires", "session_key"]), |  | ||||||
|         ] |  | ||||||
|         default_permissions = [] |  | ||||||
|  |  | ||||||
|     def __str__(self): |  | ||||||
|         return self.session_key |  | ||||||
|  |  | ||||||
|     class Keys(StrEnum): |  | ||||||
|         """ |  | ||||||
|         Keys to be set with the session interface for the fields above to be updated. |  | ||||||
|  |  | ||||||
|         If a field is added here that needs to be initialized when the session is initialized, |  | ||||||
|         it must also be reflected in authentik.root.middleware.SessionMiddleware.process_request |  | ||||||
|         and in authentik.core.sessions.SessionStore.__init__ |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         LAST_IP = "last_ip" |  | ||||||
|         LAST_USER_AGENT = "last_user_agent" |  | ||||||
|         LAST_USED = "last_used" |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def get_session_store_class(cls): |  | ||||||
|         from authentik.core.sessions import SessionStore |  | ||||||
|  |  | ||||||
|         return SessionStore |  | ||||||
|  |  | ||||||
|     def get_decoded(self): |  | ||||||
|         raise NotImplementedError |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthenticatedSession(SerializerModel): |  | ||||||
|     session = models.OneToOneField(Session, on_delete=models.CASCADE, primary_key=True) |  | ||||||
|     # We use the session as primary key, but we need the API to be able to reference |  | ||||||
|     # this object uniquely without exposing the session key |  | ||||||
|     uuid = models.UUIDField(default=uuid4, unique=True) |  | ||||||
|  |  | ||||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def serializer(self) -> type[Serializer]: |  | ||||||
|         from authentik.core.api.authenticated_sessions import AuthenticatedSessionSerializer |  | ||||||
|  |  | ||||||
|         return AuthenticatedSessionSerializer |  | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("Authenticated Session") |         verbose_name = _("Authenticated Session") | ||||||
|         verbose_name_plural = _("Authenticated Sessions") |         verbose_name_plural = _("Authenticated Sessions") | ||||||
|  |         indexes = ExpiringModel.Meta.indexes + [ | ||||||
|  |             models.Index(fields=["session_key"]), | ||||||
|  |         ] | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return f"Authenticated Session {str(self.pk)[:10]}" |         return f"Authenticated Session {self.session_key[:10]}" | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]: |     def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]: | ||||||
|         """Create a new session from a http request""" |         """Create a new session from a http request""" | ||||||
|         if not hasattr(request, "session") or not request.session.exists( |         from authentik.root.middleware import ClientIPMiddleware | ||||||
|             request.session.session_key |  | ||||||
|         ): |         if not hasattr(request, "session") or not request.session.session_key: | ||||||
|             return None |             return None | ||||||
|         return AuthenticatedSession( |         return AuthenticatedSession( | ||||||
|             session=Session.objects.filter(session_key=request.session.session_key).first(), |             session_key=request.session.session_key, | ||||||
|             user=user, |             user=user, | ||||||
|  |             last_ip=ClientIPMiddleware.get_client_ip(request), | ||||||
|  |             last_user_agent=request.META.get("HTTP_USER_AGENT", ""), | ||||||
|  |             expires=request.session.get_expiry_date(), | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -1,168 +0,0 @@ | |||||||
| """authentik sessions engine""" |  | ||||||
|  |  | ||||||
| import pickle  # nosec |  | ||||||
|  |  | ||||||
| from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY |  | ||||||
| from django.contrib.sessions.backends.db import SessionStore as SessionBase |  | ||||||
| from django.core.exceptions import SuspiciousOperation |  | ||||||
| from django.utils import timezone |  | ||||||
| from django.utils.functional import cached_property |  | ||||||
| from structlog.stdlib import get_logger |  | ||||||
|  |  | ||||||
| from authentik.root.middleware import ClientIPMiddleware |  | ||||||
|  |  | ||||||
| LOGGER = get_logger() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SessionStore(SessionBase): |  | ||||||
|     def __init__(self, session_key=None, last_ip=None, last_user_agent=""): |  | ||||||
|         super().__init__(session_key) |  | ||||||
|         self._create_kwargs = { |  | ||||||
|             "last_ip": last_ip or ClientIPMiddleware.default_ip, |  | ||||||
|             "last_user_agent": last_user_agent, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def get_model_class(cls): |  | ||||||
|         from authentik.core.models import Session |  | ||||||
|  |  | ||||||
|         return Session |  | ||||||
|  |  | ||||||
|     @cached_property |  | ||||||
|     def model_fields(self): |  | ||||||
|         return [k.value for k in self.model.Keys] |  | ||||||
|  |  | ||||||
|     def _get_session_from_db(self): |  | ||||||
|         try: |  | ||||||
|             return ( |  | ||||||
|                 self.model.objects.select_related( |  | ||||||
|                     "authenticatedsession", |  | ||||||
|                     "authenticatedsession__user", |  | ||||||
|                 ) |  | ||||||
|                 .prefetch_related( |  | ||||||
|                     "authenticatedsession__user__groups", |  | ||||||
|                     "authenticatedsession__user__user_permissions", |  | ||||||
|                 ) |  | ||||||
|                 .get( |  | ||||||
|                     session_key=self.session_key, |  | ||||||
|                     expires__gt=timezone.now(), |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|         except (self.model.DoesNotExist, SuspiciousOperation) as exc: |  | ||||||
|             if isinstance(exc, SuspiciousOperation): |  | ||||||
|                 LOGGER.warning(str(exc)) |  | ||||||
|             self._session_key = None |  | ||||||
|  |  | ||||||
|     async def _aget_session_from_db(self): |  | ||||||
|         try: |  | ||||||
|             return ( |  | ||||||
|                 await self.model.objects.select_related( |  | ||||||
|                     "authenticatedsession", |  | ||||||
|                     "authenticatedsession__user", |  | ||||||
|                 ) |  | ||||||
|                 .prefetch_related( |  | ||||||
|                     "authenticatedsession__user__groups", |  | ||||||
|                     "authenticatedsession__user__user_permissions", |  | ||||||
|                 ) |  | ||||||
|                 .aget( |  | ||||||
|                     session_key=self.session_key, |  | ||||||
|                     expires__gt=timezone.now(), |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|         except (self.model.DoesNotExist, SuspiciousOperation) as exc: |  | ||||||
|             if isinstance(exc, SuspiciousOperation): |  | ||||||
|                 LOGGER.warning(str(exc)) |  | ||||||
|             self._session_key = None |  | ||||||
|  |  | ||||||
|     def encode(self, session_dict): |  | ||||||
|         return pickle.dumps(session_dict, protocol=pickle.HIGHEST_PROTOCOL) |  | ||||||
|  |  | ||||||
|     def decode(self, session_data): |  | ||||||
|         try: |  | ||||||
|             return pickle.loads(session_data)  # nosec |  | ||||||
|         except pickle.PickleError: |  | ||||||
|             # ValueError, unpickling exceptions. If any of these happen, just return an empty |  | ||||||
|             # dictionary (an empty session) |  | ||||||
|             pass |  | ||||||
|         return {} |  | ||||||
|  |  | ||||||
|     def load(self): |  | ||||||
|         s = self._get_session_from_db() |  | ||||||
|         if s: |  | ||||||
|             return { |  | ||||||
|                 "authenticatedsession": getattr(s, "authenticatedsession", None), |  | ||||||
|                 **{k: getattr(s, k) for k in self.model_fields}, |  | ||||||
|                 **self.decode(s.session_data), |  | ||||||
|             } |  | ||||||
|         else: |  | ||||||
|             return {} |  | ||||||
|  |  | ||||||
|     async def aload(self): |  | ||||||
|         s = await self._aget_session_from_db() |  | ||||||
|         if s: |  | ||||||
|             return { |  | ||||||
|                 "authenticatedsession": getattr(s, "authenticatedsession", None), |  | ||||||
|                 **{k: getattr(s, k) for k in self.model_fields}, |  | ||||||
|                 **self.decode(s.session_data), |  | ||||||
|             } |  | ||||||
|         else: |  | ||||||
|             return {} |  | ||||||
|  |  | ||||||
|     def create_model_instance(self, data): |  | ||||||
|         args = { |  | ||||||
|             "session_key": self._get_or_create_session_key(), |  | ||||||
|             "expires": self.get_expiry_date(), |  | ||||||
|             "session_data": {}, |  | ||||||
|             **self._create_kwargs, |  | ||||||
|         } |  | ||||||
|         for k, v in data.items(): |  | ||||||
|             # Don't save: |  | ||||||
|             # - unused auth data |  | ||||||
|             # - related models |  | ||||||
|             if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]: |  | ||||||
|                 pass |  | ||||||
|             elif k in self.model_fields: |  | ||||||
|                 args[k] = v |  | ||||||
|             else: |  | ||||||
|                 args["session_data"][k] = v |  | ||||||
|         args["session_data"] = self.encode(args["session_data"]) |  | ||||||
|         return self.model(**args) |  | ||||||
|  |  | ||||||
|     async def acreate_model_instance(self, data): |  | ||||||
|         args = { |  | ||||||
|             "session_key": await self._aget_or_create_session_key(), |  | ||||||
|             "expires": await self.aget_expiry_date(), |  | ||||||
|             "session_data": {}, |  | ||||||
|             **self._create_kwargs, |  | ||||||
|         } |  | ||||||
|         for k, v in data.items(): |  | ||||||
|             # Don't save: |  | ||||||
|             # - unused auth data |  | ||||||
|             # - related models |  | ||||||
|             if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]: |  | ||||||
|                 pass |  | ||||||
|             elif k in self.model_fields: |  | ||||||
|                 args[k] = v |  | ||||||
|             else: |  | ||||||
|                 args["session_data"][k] = v |  | ||||||
|         args["session_data"] = self.encode(args["session_data"]) |  | ||||||
|         return self.model(**args) |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def clear_expired(cls): |  | ||||||
|         cls.get_model_class().objects.filter(expires__lt=timezone.now()).delete() |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     async def aclear_expired(cls): |  | ||||||
|         await cls.get_model_class().objects.filter(expires__lt=timezone.now()).adelete() |  | ||||||
|  |  | ||||||
|     def cycle_key(self): |  | ||||||
|         data = self._session |  | ||||||
|         key = self.session_key |  | ||||||
|         self.create() |  | ||||||
|         self._session_cache = data |  | ||||||
|         if key: |  | ||||||
|             self.delete(key) |  | ||||||
|         if (authenticated_session := data.get("authenticatedsession")) is not None: |  | ||||||
|             authenticated_session.session_id = self.session_key |  | ||||||
|             authenticated_session.save(force_insert=True) |  | ||||||
| @ -1,10 +1,11 @@ | |||||||
| """authentik core signals""" | """authentik core signals""" | ||||||
|  |  | ||||||
| from django.contrib.auth.signals import user_logged_in | from django.contrib.auth.signals import user_logged_in, user_logged_out | ||||||
|  | from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.core.signals import Signal | from django.core.signals import Signal | ||||||
| from django.db.models import Model | from django.db.models import Model | ||||||
| from django.db.models.signals import post_delete, post_save, pre_save | from django.db.models.signals import post_save, pre_delete, pre_save | ||||||
| from django.dispatch import receiver | from django.dispatch import receiver | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| @ -14,7 +15,6 @@ from authentik.core.models import ( | |||||||
|     AuthenticatedSession, |     AuthenticatedSession, | ||||||
|     BackchannelProvider, |     BackchannelProvider, | ||||||
|     ExpiringModel, |     ExpiringModel, | ||||||
|     Session, |  | ||||||
|     User, |     User, | ||||||
|     default_token_duration, |     default_token_duration, | ||||||
| ) | ) | ||||||
| @ -49,10 +49,19 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_): | |||||||
|         session.save() |         session.save() | ||||||
|  |  | ||||||
|  |  | ||||||
| @receiver(post_delete, sender=AuthenticatedSession) | @receiver(user_logged_out) | ||||||
|  | def user_logged_out_session(sender, request: HttpRequest, user: User, **_): | ||||||
|  |     """Delete AuthenticatedSession if it exists""" | ||||||
|  |     if not request.session or not request.session.session_key: | ||||||
|  |         return | ||||||
|  |     AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @receiver(pre_delete, sender=AuthenticatedSession) | ||||||
| def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_): | def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_): | ||||||
|     """Delete session when authenticated session is deleted""" |     """Delete session when authenticated session is deleted""" | ||||||
|     Session.objects.filter(session_key=instance.pk).delete() |     cache_key = f"{KEY_PREFIX}{instance.session_key}" | ||||||
|  |     cache.delete(cache_key) | ||||||
|  |  | ||||||
|  |  | ||||||
| @receiver(pre_save) | @receiver(pre_save) | ||||||
|  | |||||||
| @ -48,7 +48,6 @@ LOGGER = get_logger() | |||||||
|  |  | ||||||
| PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | ||||||
| SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages" | SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages" | ||||||
| SESSION_KEY_SOURCE_FLOW_CONTEXT = "authentik/flows/source_flow_context" |  | ||||||
| SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -262,7 +261,6 @@ class SourceFlowManager: | |||||||
|                 plan.append_stage(stage) |                 plan.append_stage(stage) | ||||||
|         for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []): |         for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []): | ||||||
|             plan.append_stage(stage) |             plan.append_stage(stage) | ||||||
|         plan.context.update(self.request.session.get(SESSION_KEY_SOURCE_FLOW_CONTEXT, {})) |  | ||||||
|         return plan.to_redirect(self.request, flow) |         return plan.to_redirect(self.request, flow) | ||||||
|  |  | ||||||
|     def handle_auth( |     def handle_auth( | ||||||
|  | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	