Compare commits
	
		
			29 Commits
		
	
	
		
			website/do
			...
			web/legibi
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 1bc8fa0a9d | |||
| 3ec0d30965 | |||
| 50d2f69332 | |||
| 7d972ec711 | |||
| 854427e463 | |||
| be349e2e14 | |||
| bd0e81b8ad | |||
| f6afb59515 | |||
| dddde09be5 | |||
| 6d7fc94698 | |||
| 1dcf9108ad | |||
| 7bb6a3dfe6 | |||
| 9cc440eee1 | |||
| fe9e4526ac | |||
| 20b66f850c | |||
| 67b327414b | |||
| 5b8d86b5a9 | |||
| 67aed3e318 | |||
| 9809b94030 | |||
| e7527c551b | |||
| 36b10b434a | |||
| 831797b871 | |||
| 5cc2c0f45f | |||
| 32442766f4 | |||
| 75790909a8 | |||
| e0d5df89ca | |||
| f25a9c624e | |||
| 914993a788 | |||
| 89dad07a66 | 
| @ -1,5 +1,5 @@ | |||||||
| [bumpversion] | [bumpversion] | ||||||
| current_version = 2025.6.0 | current_version = 2025.2.0 | ||||||
| tag = True | tag = True | ||||||
| commit = True | commit = True | ||||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||||
| @ -17,8 +17,6 @@ optional_value = final | |||||||
|  |  | ||||||
| [bumpversion:file:pyproject.toml] | [bumpversion:file:pyproject.toml] | ||||||
|  |  | ||||||
| [bumpversion:file:uv.lock] |  | ||||||
|  |  | ||||||
| [bumpversion:file:package.json] | [bumpversion:file:package.json] | ||||||
|  |  | ||||||
| [bumpversion:file:docker-compose.yml] | [bumpversion:file:docker-compose.yml] | ||||||
|  | |||||||
							
								
								
									
										22
									
								
								.github/ISSUE_TEMPLATE/docs_issue.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								.github/ISSUE_TEMPLATE/docs_issue.md
									
									
									
									
										vendored
									
									
								
							| @ -1,22 +0,0 @@ | |||||||
| --- |  | ||||||
| name: Documentation issue |  | ||||||
| about: Suggest an improvement or report a problem |  | ||||||
| title: "" |  | ||||||
| labels: documentation |  | ||||||
| assignees: "" |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| **Do you see an area that can be clarified or expanded, a technical inaccuracy, or a broken link? Please describe.** |  | ||||||
| A clear and concise description of what the problem is, or where the document can be improved. Ex. I believe we need more details about [...] |  | ||||||
|  |  | ||||||
| **Provide the URL or link to the exact page in the documentation to which you are referring.** |  | ||||||
| If there are multiple pages, list them all, and be sure to state the header or section where the content is. |  | ||||||
|  |  | ||||||
| **Describe the solution you'd like** |  | ||||||
| A clear and concise description of what you want to happen. |  | ||||||
|  |  | ||||||
| **Additional context** |  | ||||||
| Add any other context or screenshots about the documentation issue here. |  | ||||||
|  |  | ||||||
| **Consider opening a PR!** |  | ||||||
| If the issue is one that you can fix, or even make a good pass at, we'd appreciate a PR. For more information about making a contribution to the docs, and using our Style Guide and our templates, refer to ["Writing documentation"](https://docs.goauthentik.io/docs/developer-docs/docs/writing-documentation). |  | ||||||
| @ -44,6 +44,7 @@ if is_release: | |||||||
|         ] |         ] | ||||||
|         if not prerelease: |         if not prerelease: | ||||||
|             image_tags += [ |             image_tags += [ | ||||||
|  |                 f"{name}:latest", | ||||||
|                 f"{name}:{version_family}", |                 f"{name}:{version_family}", | ||||||
|             ] |             ] | ||||||
| else: | else: | ||||||
|  | |||||||
							
								
								
									
										20
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										20
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -9,22 +9,17 @@ inputs: | |||||||
| runs: | runs: | ||||||
|   using: "composite" |   using: "composite" | ||||||
|   steps: |   steps: | ||||||
|     - name: Install apt deps |     - name: Install poetry & deps | ||||||
|       shell: bash |       shell: bash | ||||||
|       run: | |       run: | | ||||||
|  |         pipx install poetry || true | ||||||
|         sudo apt-get update |         sudo apt-get update | ||||||
|         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server |         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server | ||||||
|     - name: Install uv |     - name: Setup python and restore poetry | ||||||
|       uses: astral-sh/setup-uv@v5 |  | ||||||
|       with: |  | ||||||
|         enable-cache: true |  | ||||||
|     - name: Setup python |  | ||||||
|       uses: actions/setup-python@v5 |       uses: actions/setup-python@v5 | ||||||
|       with: |       with: | ||||||
|         python-version-file: "pyproject.toml" |         python-version-file: "pyproject.toml" | ||||||
|     - name: Install Python deps |         cache: "poetry" | ||||||
|       shell: bash |  | ||||||
|       run: uv sync --all-extras --dev --frozen |  | ||||||
|     - name: Setup node |     - name: Setup node | ||||||
|       uses: actions/setup-node@v4 |       uses: actions/setup-node@v4 | ||||||
|       with: |       with: | ||||||
| @ -35,18 +30,15 @@ runs: | |||||||
|       uses: actions/setup-go@v5 |       uses: actions/setup-go@v5 | ||||||
|       with: |       with: | ||||||
|         go-version-file: "go.mod" |         go-version-file: "go.mod" | ||||||
|     - name: Setup docker cache |  | ||||||
|       uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 |  | ||||||
|       with: |  | ||||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} |  | ||||||
|     - name: Setup dependencies |     - name: Setup dependencies | ||||||
|       shell: bash |       shell: bash | ||||||
|       run: | |       run: | | ||||||
|         export PSQL_TAG=${{ inputs.postgresql_version }} |         export PSQL_TAG=${{ inputs.postgresql_version }} | ||||||
|         docker compose -f .github/actions/setup/docker-compose.yml up -d |         docker compose -f .github/actions/setup/docker-compose.yml up -d | ||||||
|  |         poetry sync | ||||||
|         cd web && npm ci |         cd web && npm ci | ||||||
|     - name: Generate config |     - name: Generate config | ||||||
|       shell: uv run python {0} |       shell: poetry run python {0} | ||||||
|       run: | |       run: | | ||||||
|         from authentik.lib.generators import generate_id |         from authentik.lib.generators import generate_id | ||||||
|         from yaml import safe_dump |         from yaml import safe_dump | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,7 +11,7 @@ services: | |||||||
|       - 5432:5432 |       - 5432:5432 | ||||||
|     restart: always |     restart: always | ||||||
|   redis: |   redis: | ||||||
|     image: docker.io/library/redis:7 |     image: docker.io/library/redis | ||||||
|     ports: |     ports: | ||||||
|       - 6379:6379 |       - 6379:6379 | ||||||
|     restart: always |     restart: always | ||||||
|  | |||||||
							
								
								
									
										33
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										33
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							| @ -1,32 +1,7 @@ | |||||||
| akadmin |  | ||||||
| asgi |  | ||||||
| assertIn |  | ||||||
| authentik |  | ||||||
| authn |  | ||||||
| crate |  | ||||||
| docstrings |  | ||||||
| entra |  | ||||||
| goauthentik |  | ||||||
| gunicorn |  | ||||||
| hass |  | ||||||
| jwe |  | ||||||
| jwks |  | ||||||
| keypair | keypair | ||||||
| keypairs | keypairs | ||||||
| kubernetes | hass | ||||||
| oidc |  | ||||||
| ontext |  | ||||||
| openid |  | ||||||
| passwordless |  | ||||||
| plex |  | ||||||
| saml |  | ||||||
| scim |  | ||||||
| singed |  | ||||||
| slo |  | ||||||
| sso |  | ||||||
| totp |  | ||||||
| traefik |  | ||||||
| # https://github.com/codespell-project/codespell/issues/1224 |  | ||||||
| upToDate |  | ||||||
| warmup | warmup | ||||||
| webauthn | ontext | ||||||
|  | singed | ||||||
|  | assertIn | ||||||
|  | |||||||
							
								
								
									
										34
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										34
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -23,13 +23,7 @@ updates: | |||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directories: |     directories: | ||||||
|       - "/web" |       - "/web" | ||||||
|       - "/web/packages/sfe" |       - "/web/sfe" | ||||||
|       - "/web/packages/core" |  | ||||||
|       - "/web/packages/esbuild-plugin-live-reload" |  | ||||||
|       - "/packages/prettier-config" |  | ||||||
|       - "/packages/tsconfig" |  | ||||||
|       - "/packages/docusaurus-config" |  | ||||||
|       - "/packages/eslint-config" |  | ||||||
|     schedule: |     schedule: | ||||||
|       interval: daily |       interval: daily | ||||||
|       time: "04:00" |       time: "04:00" | ||||||
| @ -74,9 +68,6 @@ updates: | |||||||
|       wdio: |       wdio: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@wdio/*" |           - "@wdio/*" | ||||||
|       goauthentik: |  | ||||||
|         patterns: |  | ||||||
|           - "@goauthentik/*" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/website" |     directory: "/website" | ||||||
|     schedule: |     schedule: | ||||||
| @ -91,15 +82,6 @@ updates: | |||||||
|       docusaurus: |       docusaurus: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@docusaurus/*" |           - "@docusaurus/*" | ||||||
|       build: |  | ||||||
|         patterns: |  | ||||||
|           - "@swc/*" |  | ||||||
|           - "swc-*" |  | ||||||
|           - "lightningcss*" |  | ||||||
|           - "@rspack/binding*" |  | ||||||
|       goauthentik: |  | ||||||
|         patterns: |  | ||||||
|           - "@goauthentik/*" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/lifecycle/aws" |     directory: "/lifecycle/aws" | ||||||
|     schedule: |     schedule: | ||||||
| @ -110,7 +92,7 @@ updates: | |||||||
|       prefix: "lifecycle/aws:" |       prefix: "lifecycle/aws:" | ||||||
|     labels: |     labels: | ||||||
|       - dependencies |       - dependencies | ||||||
|   - package-ecosystem: uv |   - package-ecosystem: pip | ||||||
|     directory: "/" |     directory: "/" | ||||||
|     schedule: |     schedule: | ||||||
|       interval: daily |       interval: daily | ||||||
| @ -130,15 +112,3 @@ updates: | |||||||
|       prefix: "core:" |       prefix: "core:" | ||||||
|     labels: |     labels: | ||||||
|       - dependencies |       - dependencies | ||||||
|   - package-ecosystem: docker-compose |  | ||||||
|     directories: |  | ||||||
|       # - /scripts # Maybe |  | ||||||
|       - /tests/e2e |  | ||||||
|     schedule: |  | ||||||
|       interval: daily |  | ||||||
|       time: "04:00" |  | ||||||
|     open-pull-requests-limit: 10 |  | ||||||
|     commit-message: |  | ||||||
|       prefix: "core:" |  | ||||||
|     labels: |  | ||||||
|       - dependencies |  | ||||||
|  | |||||||
| @ -40,7 +40,7 @@ jobs: | |||||||
|       attestations: write |       attestations: write | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - uses: docker/setup-qemu-action@v3.6.0 |       - uses: docker/setup-qemu-action@v3.4.0 | ||||||
|       - uses: docker/setup-buildx-action@v3 |       - uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|         uses: ./.github/actions/docker-push-variables |         uses: ./.github/actions/docker-push-variables | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -30,6 +30,7 @@ jobs: | |||||||
|         uses: actions/setup-python@v5 |         uses: actions/setup-python@v5 | ||||||
|         with: |         with: | ||||||
|           python-version-file: "pyproject.toml" |           python-version-file: "pyproject.toml" | ||||||
|  |           cache: "poetry" | ||||||
|       - name: Generate API Client |       - name: Generate API Client | ||||||
|         run: make gen-client-py |         run: make gen-client-py | ||||||
|       - name: Publish package |       - name: Publish package | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | |||||||
|           signoff: true |           signoff: true | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |           # ID from https://api.github.com/users/authentik-automation[bot] | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							| @ -33,7 +33,7 @@ jobs: | |||||||
|           npm ci |           npm ci | ||||||
|       - name: Check changes have been applied |       - name: Check changes have been applied | ||||||
|         run: | |         run: | | ||||||
|           uv run make aws-cfn |           poetry run make aws-cfn | ||||||
|           git diff --exit-code |           git diff --exit-code | ||||||
|   ci-aws-cfn-mark: |   ci-aws-cfn-mark: | ||||||
|     if: always() |     if: always() | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							| @ -15,8 +15,8 @@ jobs: | |||||||
|       matrix: |       matrix: | ||||||
|         version: |         version: | ||||||
|           - docs |           - docs | ||||||
|           - version-2025-2 |  | ||||||
|           - version-2024-12 |           - version-2024-12 | ||||||
|  |           - version-2024-10 | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - run: | |       - run: | | ||||||
|  | |||||||
							
								
								
									
										29
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										29
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -34,7 +34,7 @@ jobs: | |||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: run job |       - name: run job | ||||||
|         run: uv run make ci-${{ matrix.job }} |         run: poetry run make ci-${{ matrix.job }} | ||||||
|   test-migrations: |   test-migrations: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
| @ -42,7 +42,7 @@ jobs: | |||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: run migrations |       - name: run migrations | ||||||
|         run: uv run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|   test-make-seed: |   test-make-seed: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
| @ -62,7 +62,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -70,6 +69,8 @@ jobs: | |||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|       - name: checkout stable |       - name: checkout stable | ||||||
|         run: | |         run: | | ||||||
|  |           # Delete all poetry envs | ||||||
|  |           rm -rf /home/runner/.cache/pypoetry | ||||||
|           # Copy current, latest config to local |           # Copy current, latest config to local | ||||||
|           cp authentik/lib/default.yml local.env.yml |           cp authentik/lib/default.yml local.env.yml | ||||||
|           cp -R .github .. |           cp -R .github .. | ||||||
| @ -82,7 +83,7 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           postgresql_version: ${{ matrix.psql }} |           postgresql_version: ${{ matrix.psql }} | ||||||
|       - name: run migrations to stable |       - name: run migrations to stable | ||||||
|         run: uv run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|       - name: checkout current code |       - name: checkout current code | ||||||
|         run: | |         run: | | ||||||
|           set -x |           set -x | ||||||
| @ -90,13 +91,15 @@ jobs: | |||||||
|           git reset --hard HEAD |           git reset --hard HEAD | ||||||
|           git clean -d -fx . |           git clean -d -fx . | ||||||
|           git checkout $GITHUB_SHA |           git checkout $GITHUB_SHA | ||||||
|  |           # Delete previous poetry env | ||||||
|  |           rm -rf /home/runner/.cache/pypoetry/virtualenvs/* | ||||||
|       - name: Setup authentik env (ensure latest deps are installed) |       - name: Setup authentik env (ensure latest deps are installed) | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|         with: |         with: | ||||||
|           postgresql_version: ${{ matrix.psql }} |           postgresql_version: ${{ matrix.psql }} | ||||||
|       - name: migrate to latest |       - name: migrate to latest | ||||||
|         run: | |         run: | | ||||||
|           uv run python -m lifecycle.migrate |           poetry run python -m lifecycle.migrate | ||||||
|       - name: run tests |       - name: run tests | ||||||
|         env: |         env: | ||||||
|           # Test in the main database that we just migrated from the previous stable version |           # Test in the main database that we just migrated from the previous stable version | ||||||
| @ -105,7 +108,7 @@ jobs: | |||||||
|           CI_RUN_ID: ${{ matrix.run_id }} |           CI_RUN_ID: ${{ matrix.run_id }} | ||||||
|           CI_TOTAL_RUNS: "5" |           CI_TOTAL_RUNS: "5" | ||||||
|         run: | |         run: | | ||||||
|           uv run make ci-test |           poetry run make ci-test | ||||||
|   test-unittest: |   test-unittest: | ||||||
|     name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 |     name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
| @ -117,7 +120,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -131,7 +133,7 @@ jobs: | |||||||
|           CI_RUN_ID: ${{ matrix.run_id }} |           CI_RUN_ID: ${{ matrix.run_id }} | ||||||
|           CI_TOTAL_RUNS: "5" |           CI_TOTAL_RUNS: "5" | ||||||
|         run: | |         run: | | ||||||
|           uv run make ci-test |           poetry run make ci-test | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v5 | ||||||
|         with: |         with: | ||||||
| @ -154,8 +156,8 @@ jobs: | |||||||
|         uses: helm/kind-action@v1.12.0 |         uses: helm/kind-action@v1.12.0 | ||||||
|       - name: run integration |       - name: run integration | ||||||
|         run: | |         run: | | ||||||
|           uv run coverage run manage.py test tests/integration |           poetry run coverage run manage.py test tests/integration | ||||||
|           uv run coverage xml |           poetry run coverage xml | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v5 | ||||||
|         with: |         with: | ||||||
| @ -202,7 +204,7 @@ jobs: | |||||||
|         uses: actions/cache@v4 |         uses: actions/cache@v4 | ||||||
|         with: |         with: | ||||||
|           path: web/dist |           path: web/dist | ||||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b |           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }} | ||||||
|       - name: prepare web ui |       - name: prepare web ui | ||||||
|         if: steps.cache-web.outputs.cache-hit != 'true' |         if: steps.cache-web.outputs.cache-hit != 'true' | ||||||
|         working-directory: web |         working-directory: web | ||||||
| @ -210,11 +212,10 @@ jobs: | |||||||
|           npm ci |           npm ci | ||||||
|           make -C .. gen-client-ts |           make -C .. gen-client-ts | ||||||
|           npm run build |           npm run build | ||||||
|           npm run build:sfe |  | ||||||
|       - name: run e2e |       - name: run e2e | ||||||
|         run: | |         run: | | ||||||
|           uv run coverage run manage.py test ${{ matrix.job.glob }} |           poetry run coverage run manage.py test ${{ matrix.job.glob }} | ||||||
|           uv run coverage xml |           poetry run coverage xml | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v5 | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										4
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -29,7 +29,7 @@ jobs: | |||||||
|       - name: Generate API |       - name: Generate API | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: golangci-lint |       - name: golangci-lint | ||||||
|         uses: golangci/golangci-lint-action@v8 |         uses: golangci/golangci-lint-action@v6 | ||||||
|         with: |         with: | ||||||
|           version: latest |           version: latest | ||||||
|           args: --timeout 5000s --verbose |           args: --timeout 5000s --verbose | ||||||
| @ -82,7 +82,7 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           ref: ${{ github.event.pull_request.head.sha }} |           ref: ${{ github.event.pull_request.head.sha }} | ||||||
|       - name: Set up QEMU |       - name: Set up QEMU | ||||||
|         uses: docker/setup-qemu-action@v3.6.0 |         uses: docker/setup-qemu-action@v3.4.0 | ||||||
|       - name: Set up Docker Buildx |       - name: Set up Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@v3 |         uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ name: authentik-gen-update-webauthn-mds | |||||||
| on: | on: | ||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
|   schedule: |   schedule: | ||||||
|     - cron: "30 1 1,15 * *" |     - cron: '30 1 1,15 * *' | ||||||
|  |  | ||||||
| env: | env: | ||||||
|   POSTGRES_DB: authentik |   POSTGRES_DB: authentik | ||||||
| @ -24,7 +24,7 @@ jobs: | |||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - run: uv run ak update_webauthn_mds |       - run: poetry run ak update_webauthn_mds | ||||||
|       - uses: peter-evans/create-pull-request@v7 |       - uses: peter-evans/create-pull-request@v7 | ||||||
|         id: cpr |         id: cpr | ||||||
|         with: |         with: | ||||||
| @ -37,7 +37,6 @@ jobs: | |||||||
|           signoff: true |           signoff: true | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |           # ID from https://api.github.com/users/authentik-automation[bot] | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | |||||||
|           body: ${{ steps.compress.outputs.markdown }} |           body: ${{ steps.compress.outputs.markdown }} | ||||||
|           delete-branch: true |           delete-branch: true | ||||||
|           signoff: true |           signoff: true | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" |         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										47
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										47
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,47 +0,0 @@ | |||||||
| name: authentik-packages-npm-publish |  | ||||||
| on: |  | ||||||
|   push: |  | ||||||
|     branches: [main] |  | ||||||
|     paths: |  | ||||||
|       - packages/docusaurus-config/** |  | ||||||
|       - packages/eslint-config/** |  | ||||||
|       - packages/prettier-config/** |  | ||||||
|       - packages/tsconfig/** |  | ||||||
|       - web/packages/esbuild-plugin-live-reload/** |  | ||||||
|   workflow_dispatch: |  | ||||||
| jobs: |  | ||||||
|   publish: |  | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|       matrix: |  | ||||||
|         package: |  | ||||||
|           - packages/docusaurus-config |  | ||||||
|           - packages/eslint-config |  | ||||||
|           - packages/prettier-config |  | ||||||
|           - packages/tsconfig |  | ||||||
|           - web/packages/esbuild-plugin-live-reload |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|         with: |  | ||||||
|           fetch-depth: 2 |  | ||||||
|       - uses: actions/setup-node@v4 |  | ||||||
|         with: |  | ||||||
|           node-version-file: ${{ matrix.package }}/package.json |  | ||||||
|           registry-url: "https://registry.npmjs.org" |  | ||||||
|       - name: Get changed files |  | ||||||
|         id: changed-files |  | ||||||
|         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c |  | ||||||
|         with: |  | ||||||
|           files: | |  | ||||||
|             ${{ matrix.package }}/package.json |  | ||||||
|       - name: Publish package |  | ||||||
|         if: steps.changed-files.outputs.any_changed == 'true' |  | ||||||
|         working-directory: ${{ matrix.package }} |  | ||||||
|         run: | |  | ||||||
|           npm ci |  | ||||||
|           npm run build |  | ||||||
|           npm publish |  | ||||||
|         env: |  | ||||||
|           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} |  | ||||||
							
								
								
									
										4
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							| @ -21,8 +21,8 @@ jobs: | |||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: generate docs |       - name: generate docs | ||||||
|         run: | |         run: | | ||||||
|           uv run make migrate |           poetry run make migrate | ||||||
|           uv run ak build_source_docs |           poetry run ak build_source_docs | ||||||
|       - name: Publish |       - name: Publish | ||||||
|         uses: netlify/actions/cli@master |         uses: netlify/actions/cli@master | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										4
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -42,7 +42,7 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           go-version-file: "go.mod" |           go-version-file: "go.mod" | ||||||
|       - name: Set up QEMU |       - name: Set up QEMU | ||||||
|         uses: docker/setup-qemu-action@v3.6.0 |         uses: docker/setup-qemu-action@v3.4.0 | ||||||
|       - name: Set up Docker Buildx |       - name: Set up Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@v3 |         uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
| @ -186,7 +186,7 @@ jobs: | |||||||
|           container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) |           container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) | ||||||
|           docker cp ${container}:web/ . |           docker cp ${container}:web/ . | ||||||
|       - name: Create a Sentry.io release |       - name: Create a Sentry.io release | ||||||
|         uses: getsentry/action-release@v3 |         uses: getsentry/action-release@v1 | ||||||
|         continue-on-error: true |         continue-on-error: true | ||||||
|         env: |         env: | ||||||
|           SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} |           SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} | ||||||
|  | |||||||
							
								
								
									
										27
									
								
								.github/workflows/semgrep.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								.github/workflows/semgrep.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,27 +0,0 @@ | |||||||
| name: authentik-semgrep |  | ||||||
| on: |  | ||||||
|   workflow_dispatch: {} |  | ||||||
|   pull_request: {} |  | ||||||
|   push: |  | ||||||
|     branches: |  | ||||||
|       - main |  | ||||||
|       - master |  | ||||||
|     paths: |  | ||||||
|       - .github/workflows/semgrep.yml |  | ||||||
|   schedule: |  | ||||||
|     # random HH:MM to avoid a load spike on GitHub Actions at 00:00 |  | ||||||
|     - cron: '12 15 * * *' |  | ||||||
| jobs: |  | ||||||
|   semgrep: |  | ||||||
|     name: semgrep/ci |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     permissions: |  | ||||||
|       contents: read |  | ||||||
|     env: |  | ||||||
|       SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} |  | ||||||
|     container: |  | ||||||
|       image: semgrep/semgrep |  | ||||||
|     if: (github.actor != 'dependabot[bot]') |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - run: semgrep ci |  | ||||||
| @ -19,27 +19,21 @@ jobs: | |||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - id: generate_token |       - id: generate_token | ||||||
|         if: ${{ github.event_name != 'pull_request' }} |  | ||||||
|         uses: tibdex/github-app-token@v2 |         uses: tibdex/github-app-token@v2 | ||||||
|         with: |         with: | ||||||
|           app_id: ${{ secrets.GH_APP_ID }} |           app_id: ${{ secrets.GH_APP_ID }} | ||||||
|           private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} |           private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|         if: ${{ github.event_name != 'pull_request' }} |  | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|         if: ${{ github.event_name == 'pull_request' }} |  | ||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: Generate API |  | ||||||
|         run: make gen-client-ts |  | ||||||
|       - name: run extract |       - name: run extract | ||||||
|         run: | |         run: | | ||||||
|           uv run make i18n-extract |           poetry run make i18n-extract | ||||||
|       - name: run compile |       - name: run compile | ||||||
|         run: | |         run: | | ||||||
|           uv run ak compilemessages |           poetry run ak compilemessages | ||||||
|           make web-check-compile |           make web-check-compile | ||||||
|       - name: Create Pull Request |       - name: Create Pull Request | ||||||
|         if: ${{ github.event_name != 'pull_request' }} |         if: ${{ github.event_name != 'pull_request' }} | ||||||
| @ -52,6 +46,3 @@ jobs: | |||||||
|           body: "core, web: update translations" |           body: "core, web: update translations" | ||||||
|           delete-branch: true |           delete-branch: true | ||||||
|           signoff: true |           signoff: true | ||||||
|           labels: dependencies |  | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |  | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |  | ||||||
|  | |||||||
							
								
								
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							| @ -15,7 +15,6 @@ jobs: | |||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} |     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - id: generate_token |       - id: generate_token | ||||||
|         uses: tibdex/github-app-token@v2 |         uses: tibdex/github-app-token@v2 | ||||||
|         with: |         with: | ||||||
| @ -26,13 +25,23 @@ jobs: | |||||||
|         env: |         env: | ||||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} |           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||||
|         run: | |         run: | | ||||||
|           title=$(gh pr view ${{ github.event.pull_request.number }} --json  "title" -q ".title") |           title=$(curl -q -L \ | ||||||
|  |             -H "Accept: application/vnd.github+json" \ | ||||||
|  |             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||||
|  |             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||||
|  |             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title) | ||||||
|           echo "title=${title}" >> "$GITHUB_OUTPUT" |           echo "title=${title}" >> "$GITHUB_OUTPUT" | ||||||
|       - name: Rename |       - name: Rename | ||||||
|         env: |         env: | ||||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} |           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||||
|         run: | |         run: | | ||||||
|           gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies |           curl -L \ | ||||||
|  |             -X PATCH \ | ||||||
|  |             -H "Accept: application/vnd.github+json" \ | ||||||
|  |             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||||
|  |             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||||
|  |             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \ | ||||||
|  |             -d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}" | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										5
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -11,10 +11,6 @@ local_settings.py | |||||||
| db.sqlite3 | db.sqlite3 | ||||||
| media | media | ||||||
|  |  | ||||||
| # Node |  | ||||||
|  |  | ||||||
| node_modules |  | ||||||
|  |  | ||||||
| # If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ | # If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ | ||||||
| # in your Git repository. Update and uncomment the following line accordingly. | # in your Git repository. Update and uncomment the following line accordingly. | ||||||
| # <django-project-name>/staticfiles/ | # <django-project-name>/staticfiles/ | ||||||
| @ -37,7 +33,6 @@ eggs/ | |||||||
| lib64/ | lib64/ | ||||||
| parts/ | parts/ | ||||||
| dist/ | dist/ | ||||||
| out/ |  | ||||||
| sdist/ | sdist/ | ||||||
| var/ | var/ | ||||||
| wheels/ | wheels/ | ||||||
|  | |||||||
| @ -1,47 +0,0 @@ | |||||||
| # Prettier Ignorefile |  | ||||||
|  |  | ||||||
| ## Static Files |  | ||||||
| **/LICENSE |  | ||||||
|  |  | ||||||
| authentik/stages/**/* |  | ||||||
|  |  | ||||||
| ## Build asset directories |  | ||||||
| coverage |  | ||||||
| dist |  | ||||||
| out |  | ||||||
| .docusaurus |  | ||||||
| website/docs/developer-docs/api/**/* |  | ||||||
|  |  | ||||||
| ## Environment |  | ||||||
| *.env |  | ||||||
|  |  | ||||||
| ## Secrets |  | ||||||
| *.secrets |  | ||||||
|  |  | ||||||
| ## Yarn |  | ||||||
| .yarn/**/* |  | ||||||
|  |  | ||||||
| ## Node |  | ||||||
| node_modules |  | ||||||
| coverage |  | ||||||
|  |  | ||||||
| ## Configs |  | ||||||
| *.log |  | ||||||
| *.yaml |  | ||||||
| *.yml |  | ||||||
|  |  | ||||||
| # Templates |  | ||||||
| # TODO: Rename affected files to *.template.* or similar. |  | ||||||
| *.html |  | ||||||
| *.mdx |  | ||||||
| *.md |  | ||||||
|  |  | ||||||
| ## Import order matters |  | ||||||
| poly.ts |  | ||||||
| src/locale-codes.ts |  | ||||||
| src/locales/ |  | ||||||
|  |  | ||||||
| # Storybook |  | ||||||
| storybook-static/ |  | ||||||
| .storybook/css-import-maps* |  | ||||||
|  |  | ||||||
							
								
								
									
										28
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										28
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -1,4 +1,26 @@ | |||||||
| { | { | ||||||
|  |     "cSpell.words": [ | ||||||
|  |         "akadmin", | ||||||
|  |         "asgi", | ||||||
|  |         "authentik", | ||||||
|  |         "authn", | ||||||
|  |         "entra", | ||||||
|  |         "goauthentik", | ||||||
|  |         "jwe", | ||||||
|  |         "jwks", | ||||||
|  |         "kubernetes", | ||||||
|  |         "oidc", | ||||||
|  |         "openid", | ||||||
|  |         "passwordless", | ||||||
|  |         "plex", | ||||||
|  |         "saml", | ||||||
|  |         "scim", | ||||||
|  |         "slo", | ||||||
|  |         "sso", | ||||||
|  |         "totp", | ||||||
|  |         "traefik", | ||||||
|  |         "webauthn" | ||||||
|  |     ], | ||||||
|     "todo-tree.tree.showCountsInTree": true, |     "todo-tree.tree.showCountsInTree": true, | ||||||
|     "todo-tree.tree.showBadges": true, |     "todo-tree.tree.showBadges": true, | ||||||
|     "yaml.customTags": [ |     "yaml.customTags": [ | ||||||
| @ -16,7 +38,7 @@ | |||||||
|     ], |     ], | ||||||
|     "typescript.preferences.importModuleSpecifier": "non-relative", |     "typescript.preferences.importModuleSpecifier": "non-relative", | ||||||
|     "typescript.preferences.importModuleSpecifierEnding": "index", |     "typescript.preferences.importModuleSpecifierEnding": "index", | ||||||
|     "typescript.tsdk": "./node_modules/typescript/lib", |     "typescript.tsdk": "./web/node_modules/typescript/lib", | ||||||
|     "typescript.enablePromptUseWorkspaceTsdk": true, |     "typescript.enablePromptUseWorkspaceTsdk": true, | ||||||
|     "yaml.schemas": { |     "yaml.schemas": { | ||||||
|         "./blueprints/schema.json": "blueprints/**/*.yaml" |         "./blueprints/schema.json": "blueprints/**/*.yaml" | ||||||
| @ -30,5 +52,7 @@ | |||||||
|         } |         } | ||||||
|     ], |     ], | ||||||
|     "go.testFlags": ["-count=1"], |     "go.testFlags": ["-count=1"], | ||||||
|     "github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"] |     "github-actions.workflows.pinned.workflows": [ | ||||||
|  |         ".github/workflows/ci-main.yml" | ||||||
|  |     ] | ||||||
| } | } | ||||||
|  | |||||||
							
								
								
									
										46
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										46
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @ -3,13 +3,8 @@ | |||||||
|     "tasks": [ |     "tasks": [ | ||||||
|         { |         { | ||||||
|             "label": "authentik/core: make", |             "label": "authentik/core: make", | ||||||
|             "command": "uv", |             "command": "poetry", | ||||||
|             "args": [ |             "args": ["run", "make", "lint-fix", "lint"], | ||||||
|                 "run", |  | ||||||
|                 "make", |  | ||||||
|                 "lint-fix", |  | ||||||
|                 "lint" |  | ||||||
|             ], |  | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "new" |                 "panel": "new" | ||||||
|             }, |             }, | ||||||
| @ -17,12 +12,8 @@ | |||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/core: run", |             "label": "authentik/core: run", | ||||||
|             "command": "uv", |             "command": "poetry", | ||||||
|             "args": [ |             "args": ["run", "ak", "server"], | ||||||
|                 "run", |  | ||||||
|                 "ak", |  | ||||||
|                 "server" |  | ||||||
|             ], |  | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
| @ -32,17 +23,13 @@ | |||||||
|         { |         { | ||||||
|             "label": "authentik/web: make", |             "label": "authentik/web: make", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["web"], | ||||||
|                 "web" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/web: watch", |             "label": "authentik/web: watch", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["web-watch"], | ||||||
|                 "web-watch" |  | ||||||
|             ], |  | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
| @ -52,26 +39,19 @@ | |||||||
|         { |         { | ||||||
|             "label": "authentik: install", |             "label": "authentik: install", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["install", "-j4"], | ||||||
|                 "install", |  | ||||||
|                 "-j4" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/website: make", |             "label": "authentik/website: make", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["website"], | ||||||
|                 "website" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/website: watch", |             "label": "authentik/website: watch", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["website-watch"], | ||||||
|                 "website-watch" |  | ||||||
|             ], |  | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
| @ -80,12 +60,8 @@ | |||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/api: generate", |             "label": "authentik/api: generate", | ||||||
|             "command": "uv", |             "command": "poetry", | ||||||
|             "args": [ |             "args": ["run", "make", "gen"], | ||||||
|                 "run", |  | ||||||
|                 "make", |  | ||||||
|                 "gen" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         } |         } | ||||||
|     ] |     ] | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ schemas/                        @goauthentik/backend | |||||||
| scripts/                        @goauthentik/backend | scripts/                        @goauthentik/backend | ||||||
| tests/                          @goauthentik/backend | tests/                          @goauthentik/backend | ||||||
| pyproject.toml                  @goauthentik/backend | pyproject.toml                  @goauthentik/backend | ||||||
| uv.lock                         @goauthentik/backend | poetry.lock                     @goauthentik/backend | ||||||
| go.mod                          @goauthentik/backend | go.mod                          @goauthentik/backend | ||||||
| go.sum                          @goauthentik/backend | go.sum                          @goauthentik/backend | ||||||
| # Infrastructure | # Infrastructure | ||||||
| @ -23,8 +23,6 @@ docker-compose.yml              @goauthentik/infrastructure | |||||||
| Makefile                        @goauthentik/infrastructure | Makefile                        @goauthentik/infrastructure | ||||||
| .editorconfig                   @goauthentik/infrastructure | .editorconfig                   @goauthentik/infrastructure | ||||||
| CODEOWNERS                      @goauthentik/infrastructure | CODEOWNERS                      @goauthentik/infrastructure | ||||||
| # Web packages |  | ||||||
| packages/                       @goauthentik/frontend |  | ||||||
| # Web | # Web | ||||||
| web/                            @goauthentik/frontend | web/                            @goauthentik/frontend | ||||||
| tests/wdio/                     @goauthentik/frontend | tests/wdio/                     @goauthentik/frontend | ||||||
|  | |||||||
| @ -5,7 +5,7 @@ | |||||||
| We as members, contributors, and leaders pledge to make participation in our | We as members, contributors, and leaders pledge to make participation in our | ||||||
| community a harassment-free experience for everyone, regardless of age, body | community a harassment-free experience for everyone, regardless of age, body | ||||||
| size, visible or invisible disability, ethnicity, sex characteristics, gender | size, visible or invisible disability, ethnicity, sex characteristics, gender | ||||||
| identity and expression, level of experience, education, socioeconomic status, | identity and expression, level of experience, education, socio-economic status, | ||||||
| nationality, personal appearance, race, religion, or sexual identity | nationality, personal appearance, race, religion, or sexual identity | ||||||
| and orientation. | and orientation. | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										97
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										97
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,7 @@ | |||||||
| # syntax=docker/dockerfile:1 | # syntax=docker/dockerfile:1 | ||||||
|  |  | ||||||
| # Stage 1: Build website | # Stage 1: Build website | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS website-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | ||||||
|  |  | ||||||
| ENV NODE_ENV=production | ENV NODE_ENV=production | ||||||
|  |  | ||||||
| @ -20,7 +20,7 @@ COPY ./SECURITY.md /work/ | |||||||
| RUN npm run build-bundled | RUN npm run build-bundled | ||||||
|  |  | ||||||
| # Stage 2: Build webui | # Stage 2: Build webui | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS web-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | ||||||
|  |  | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||||
| @ -40,11 +40,10 @@ COPY ./web /work/web/ | |||||||
| COPY ./website /work/website/ | COPY ./website /work/website/ | ||||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||||
|  |  | ||||||
| RUN npm run build && \ | RUN npm run build | ||||||
|     npm run build:sfe |  | ||||||
|  |  | ||||||
| # Stage 3: Build go proxy | # Stage 3: Build go proxy | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder | ||||||
|  |  | ||||||
| ARG TARGETOS | ARG TARGETOS | ||||||
| ARG TARGETARCH | ARG TARGETARCH | ||||||
| @ -77,7 +76,7 @@ COPY ./go.sum /go/src/goauthentik.io/go.sum | |||||||
| RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | ||||||
|     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ |     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ | ||||||
|     if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ |     if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ | ||||||
|     CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \ |     CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \ | ||||||
|     go build -o /go/authentik ./cmd/server |     go build -o /go/authentik ./cmd/server | ||||||
|  |  | ||||||
| # Stage 4: MaxMind GeoIP | # Stage 4: MaxMind GeoIP | ||||||
| @ -86,66 +85,61 @@ FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip | |||||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||||
| ENV GEOIPUPDATE_VERBOSE="1" | ENV GEOIPUPDATE_VERBOSE="1" | ||||||
| ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ||||||
|  | ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY" | ||||||
|  |  | ||||||
| USER root | USER root | ||||||
| RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||||
|     --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ |     --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ | ||||||
|     mkdir -p /usr/share/GeoIP && \ |     mkdir -p /usr/share/GeoIP && \ | ||||||
|     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" |     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||||
|  |  | ||||||
| # Stage 5: Download uv | # Stage 5: Python dependencies | ||||||
| FROM ghcr.io/astral-sh/uv:0.7.10 AS uv | FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps | ||||||
| # Stage 6: Base python image |  | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base |  | ||||||
|  |  | ||||||
| ENV VENV_PATH="/ak-root/.venv" \ |  | ||||||
|     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ |  | ||||||
|     UV_COMPILE_BYTECODE=1 \ |  | ||||||
|     UV_LINK_MODE=copy \ |  | ||||||
|     UV_NATIVE_TLS=1 \ |  | ||||||
|     UV_PYTHON_DOWNLOADS=0 |  | ||||||
|  |  | ||||||
| WORKDIR /ak-root/ |  | ||||||
|  |  | ||||||
| COPY --from=uv /uv /uvx /bin/ |  | ||||||
|  |  | ||||||
| # Stage 7: Python dependencies |  | ||||||
| FROM python-base AS python-deps |  | ||||||
|  |  | ||||||
| ARG TARGETARCH | ARG TARGETARCH | ||||||
| ARG TARGETVARIANT | ARG TARGETVARIANT | ||||||
|  |  | ||||||
| RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache | WORKDIR /ak-root/poetry | ||||||
|  |  | ||||||
| ENV PATH="/root/.cargo/bin:$PATH" | ENV VENV_PATH="/ak-root/venv" \ | ||||||
|  |     POETRY_VIRTUALENVS_CREATE=false \ | ||||||
|  |     PATH="/ak-root/venv/bin:$PATH" | ||||||
|  |  | ||||||
|  | RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache | ||||||
|  |  | ||||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | ||||||
|     apt-get update && \ |     apt-get update && \ | ||||||
|     # Required for installing pip packages |     # Required for installing pip packages | ||||||
|  |     apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev | ||||||
|  |  | ||||||
|  | RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||||
|  |     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ | ||||||
|  |     --mount=type=cache,target=/root/.cache/pip \ | ||||||
|  |     --mount=type=cache,target=/root/.cache/pypoetry \ | ||||||
|  |     pip install --no-cache cffi && \ | ||||||
|  |     apt-get update && \ | ||||||
|     apt-get install -y --no-install-recommends \ |     apt-get install -y --no-install-recommends \ | ||||||
|     # Build essentials |         build-essential libffi-dev \ | ||||||
|     build-essential pkg-config libffi-dev git \ |         # Required for cryptography | ||||||
|     # cryptography |         curl pkg-config \ | ||||||
|     curl \ |         # Required for lxml | ||||||
|     # libxml |  | ||||||
|         libxslt-dev zlib1g-dev \ |         libxslt-dev zlib1g-dev \ | ||||||
|     # postgresql |         # Required for xmlsec | ||||||
|     libpq-dev \ |         libltdl-dev \ | ||||||
|     # python-kadmin-rs |         # Required for kadmin | ||||||
|     clang libkrb5-dev sccache \ |         sccache clang && \ | ||||||
|     # xmlsec |     curl https://sh.rustup.rs -sSf | sh -s -- -y && \ | ||||||
|     libltdl-dev && \ |     . "$HOME/.cargo/env" && \ | ||||||
|     curl https://sh.rustup.rs -sSf | sh -s -- -y |     python -m venv /ak-root/venv/ && \ | ||||||
|  |     bash -c "source ${VENV_PATH}/bin/activate && \ | ||||||
|  |     pip3 install --upgrade pip poetry && \ | ||||||
|  |     poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \ | ||||||
|  |     poetry install --only=main --no-ansi --no-interaction --no-root && \ | ||||||
|  |     pip uninstall cryptography -y && \ | ||||||
|  |     poetry install --only=main --no-ansi --no-interaction --no-root" | ||||||
|  |  | ||||||
| ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec" | # Stage 6: Run | ||||||
|  | FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image | ||||||
| RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \ |  | ||||||
|     --mount=type=bind,target=uv.lock,src=uv.lock \ |  | ||||||
|     --mount=type=cache,target=/root/.cache/uv \ |  | ||||||
|     uv sync --frozen --no-install-project --no-dev |  | ||||||
|  |  | ||||||
| # Stage 8: Run |  | ||||||
| FROM python-base AS final-image |  | ||||||
|  |  | ||||||
| ARG VERSION | ARG VERSION | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
| @ -177,7 +171,7 @@ RUN apt-get update && \ | |||||||
|  |  | ||||||
| COPY ./authentik/ /authentik | COPY ./authentik/ /authentik | ||||||
| COPY ./pyproject.toml / | COPY ./pyproject.toml / | ||||||
| COPY ./uv.lock / | COPY ./poetry.lock / | ||||||
| COPY ./schemas /schemas | COPY ./schemas /schemas | ||||||
| COPY ./locale /locale | COPY ./locale /locale | ||||||
| COPY ./tests /tests | COPY ./tests /tests | ||||||
| @ -186,7 +180,7 @@ COPY ./blueprints /blueprints | |||||||
| COPY ./lifecycle/ /lifecycle | COPY ./lifecycle/ /lifecycle | ||||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | ||||||
| COPY --from=go-builder /go/authentik /bin/authentik | COPY --from=go-builder /go/authentik /bin/authentik | ||||||
| COPY --from=python-deps /ak-root/.venv /ak-root/.venv | COPY --from=python-deps /ak-root/venv /ak-root/venv | ||||||
| COPY --from=web-builder /work/web/dist/ /web/dist/ | COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||||
| COPY --from=web-builder /work/web/authentik/ /web/authentik/ | COPY --from=web-builder /work/web/authentik/ /web/authentik/ | ||||||
| COPY --from=website-builder /work/website/build/ /website/help/ | COPY --from=website-builder /work/website/build/ /website/help/ | ||||||
| @ -197,6 +191,9 @@ USER 1000 | |||||||
| ENV TMPDIR=/dev/shm/ \ | ENV TMPDIR=/dev/shm/ \ | ||||||
|     PYTHONDONTWRITEBYTECODE=1 \ |     PYTHONDONTWRITEBYTECODE=1 \ | ||||||
|     PYTHONUNBUFFERED=1 \ |     PYTHONUNBUFFERED=1 \ | ||||||
|  |     PATH="/ak-root/venv/bin:/lifecycle:$PATH" \ | ||||||
|  |     VENV_PATH="/ak-root/venv" \ | ||||||
|  |     POETRY_VIRTUALENVS_CREATE=false \ | ||||||
|     GOFIPS=1 |     GOFIPS=1 | ||||||
|  |  | ||||||
| HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | ||||||
|  | |||||||
							
								
								
									
										123
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										123
									
								
								Makefile
									
									
									
									
									
								
							| @ -1,21 +1,37 @@ | |||||||
| .PHONY: gen dev-reset all clean test web website | .PHONY: gen dev-reset all clean test web website | ||||||
|  |  | ||||||
| SHELL := /usr/bin/env bash | .SHELLFLAGS += ${SHELLFLAGS} -e | ||||||
| .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail |  | ||||||
| PWD = $(shell pwd) | PWD = $(shell pwd) | ||||||
| UID = $(shell id -u) | UID = $(shell id -u) | ||||||
| GID = $(shell id -g) | GID = $(shell id -g) | ||||||
| NPM_VERSION = $(shell python -m scripts.generate_semver) | NPM_VERSION = $(shell python -m scripts.npm_version) | ||||||
| PY_SOURCES = authentik tests scripts lifecycle .github | PY_SOURCES = authentik tests scripts lifecycle .github | ||||||
|  | GO_SOURCES = cmd internal | ||||||
|  | WEB_SOURCES = web/src web/packages | ||||||
| DOCKER_IMAGE ?= "authentik:test" | DOCKER_IMAGE ?= "authentik:test" | ||||||
|  |  | ||||||
| GEN_API_TS = gen-ts-api | GEN_API_TS = "gen-ts-api" | ||||||
| GEN_API_PY = gen-py-api | GEN_API_PY = "gen-py-api" | ||||||
| GEN_API_GO = gen-go-api | GEN_API_GO = "gen-go-api" | ||||||
|  |  | ||||||
| pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||||
| pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||||
| pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null) | pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null) | ||||||
|  |  | ||||||
|  | CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \ | ||||||
|  | 		-I .github/codespell-words.txt \ | ||||||
|  | 		-S 'web/src/locales/**' \ | ||||||
|  | 		-S 'website/docs/developer-docs/api/reference/**' \ | ||||||
|  | 		-S '**/node_modules/**' \ | ||||||
|  | 		-S '**/dist/**' \ | ||||||
|  | 		$(PY_SOURCES) \ | ||||||
|  | 		$(GO_SOURCES) \ | ||||||
|  | 		$(WEB_SOURCES) \ | ||||||
|  | 		website/src \ | ||||||
|  | 		website/blog \ | ||||||
|  | 		website/docs \ | ||||||
|  | 		website/integrations \ | ||||||
|  | 		website/src | ||||||
|  |  | ||||||
| all: lint-fix lint test gen web  ## Lint, build, and test everything | all: lint-fix lint test gen web  ## Lint, build, and test everything | ||||||
|  |  | ||||||
| @ -33,37 +49,34 @@ go-test: | |||||||
| 	go test -timeout 0 -v -race -cover ./... | 	go test -timeout 0 -v -race -cover ./... | ||||||
|  |  | ||||||
| test: ## Run the server tests and produce a coverage report (locally) | test: ## Run the server tests and produce a coverage report (locally) | ||||||
| 	uv run coverage run manage.py test --keepdb authentik | 	coverage run manage.py test --keepdb authentik | ||||||
| 	uv run coverage html | 	coverage html | ||||||
| 	uv run coverage report | 	coverage report | ||||||
|  |  | ||||||
| lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | ||||||
| 	uv run black $(PY_SOURCES) | 	black $(PY_SOURCES) | ||||||
| 	uv run ruff check --fix $(PY_SOURCES) | 	ruff check --fix $(PY_SOURCES) | ||||||
|  |  | ||||||
| lint-codespell:  ## Reports spelling errors. | lint-codespell:  ## Reports spelling errors. | ||||||
| 	uv run codespell -w | 	codespell -w $(CODESPELL_ARGS) | ||||||
|  |  | ||||||
| lint: ## Lint the python and golang sources | lint: ## Lint the python and golang sources | ||||||
| 	uv run bandit -c pyproject.toml -r $(PY_SOURCES) | 	bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules | ||||||
| 	golangci-lint run -v | 	golangci-lint run -v | ||||||
|  |  | ||||||
| core-install: | core-install: | ||||||
| 	uv sync --frozen | 	poetry install | ||||||
|  |  | ||||||
| migrate: ## Run the Authentik Django server's migrations | migrate: ## Run the Authentik Django server's migrations | ||||||
| 	uv run python -m lifecycle.migrate | 	python -m lifecycle.migrate | ||||||
|  |  | ||||||
| i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | ||||||
|  |  | ||||||
| aws-cfn: | aws-cfn: | ||||||
| 	cd lifecycle/aws && npm run aws-cfn | 	cd lifecycle/aws && npm run aws-cfn | ||||||
|  |  | ||||||
| run:  ## Run the main authentik server process |  | ||||||
| 	uv run ak server |  | ||||||
|  |  | ||||||
| core-i18n-extract: | core-i18n-extract: | ||||||
| 	uv run ak makemessages \ | 	ak makemessages \ | ||||||
| 		--add-location file \ | 		--add-location file \ | ||||||
| 		--no-obsolete \ | 		--no-obsolete \ | ||||||
| 		--ignore web \ | 		--ignore web \ | ||||||
| @ -94,11 +107,11 @@ gen-build:  ## Extract the schema from the database | |||||||
| 	AUTHENTIK_DEBUG=true \ | 	AUTHENTIK_DEBUG=true \ | ||||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||||
| 		uv run ak make_blueprint_schema > blueprints/schema.json | 		ak make_blueprint_schema > blueprints/schema.json | ||||||
| 	AUTHENTIK_DEBUG=true \ | 	AUTHENTIK_DEBUG=true \ | ||||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||||
| 		uv run ak spectacular --file schema.yml | 		ak spectacular --file schema.yml | ||||||
|  |  | ||||||
| gen-changelog:  ## (Release) generate the changelog based from the commits since the last tag | gen-changelog:  ## (Release) generate the changelog based from the commits since the last tag | ||||||
| 	git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md | 	git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md | ||||||
| @ -118,19 +131,14 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a | |||||||
| 	npx prettier --write diff.md | 	npx prettier --write diff.md | ||||||
|  |  | ||||||
| gen-clean-ts:  ## Remove generated API client for Typescript | gen-clean-ts:  ## Remove generated API client for Typescript | ||||||
| 	rm -rf ${PWD}/${GEN_API_TS}/ | 	rm -rf ./${GEN_API_TS}/ | ||||||
| 	rm -rf ${PWD}/web/node_modules/@goauthentik/api/ | 	rm -rf ./web/node_modules/@goauthentik/api/ | ||||||
|  |  | ||||||
| gen-clean-go:  ## Remove generated API client for Go | gen-clean-go:  ## Remove generated API client for Go | ||||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | 	rm -rf ./${GEN_API_GO}/ | ||||||
| ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) |  | ||||||
| 	make -C ${PWD}/${GEN_API_GO} clean |  | ||||||
| else |  | ||||||
| 	rm -rf ${PWD}/${GEN_API_GO} |  | ||||||
| endif |  | ||||||
|  |  | ||||||
| gen-clean-py:  ## Remove generated API client for Python | gen-clean-py:  ## Remove generated API client for Python | ||||||
| 	rm -rf ${PWD}/${GEN_API_PY}/ | 	rm -rf ./${GEN_API_PY}/ | ||||||
|  |  | ||||||
| gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | ||||||
|  |  | ||||||
| @ -147,14 +155,14 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | |||||||
| 		--git-repo-id authentik \ | 		--git-repo-id authentik \ | ||||||
| 		--git-user-id goauthentik | 		--git-user-id goauthentik | ||||||
| 	mkdir -p web/node_modules/@goauthentik/api | 	mkdir -p web/node_modules/@goauthentik/api | ||||||
| 	cd ${PWD}/${GEN_API_TS} && npm i | 	cd ./${GEN_API_TS} && npm i | ||||||
| 	\cp -rf ${PWD}/${GEN_API_TS}/* web/node_modules/@goauthentik/api | 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||||
|  |  | ||||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python | gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||||
| 	docker run \ | 	docker run \ | ||||||
| 		--rm -v ${PWD}:/local \ | 		--rm -v ${PWD}:/local \ | ||||||
| 		--user ${UID}:${GID} \ | 		--user ${UID}:${GID} \ | ||||||
| 		docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \ | 		docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \ | ||||||
| 		-i /local/schema.yml \ | 		-i /local/schema.yml \ | ||||||
| 		-g python \ | 		-g python \ | ||||||
| 		-o /local/${GEN_API_PY} \ | 		-o /local/${GEN_API_PY} \ | ||||||
| @ -162,20 +170,27 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python | |||||||
| 		--additional-properties=packageVersion=${NPM_VERSION} \ | 		--additional-properties=packageVersion=${NPM_VERSION} \ | ||||||
| 		--git-repo-id authentik \ | 		--git-repo-id authentik \ | ||||||
| 		--git-user-id goauthentik | 		--git-user-id goauthentik | ||||||
|  | 	pip install ./${GEN_API_PY} | ||||||
|  |  | ||||||
| gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | ||||||
| ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | ||||||
| 	git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO} | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache | ||||||
| else | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache | ||||||
| 	cd ${PWD}/${GEN_API_GO} && git pull | 	cp schema.yml ./${GEN_API_GO}/ | ||||||
| endif | 	docker run \ | ||||||
| 	cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO} | 		--rm -v ${PWD}/${GEN_API_GO}:/local \ | ||||||
| 	make -C ${PWD}/${GEN_API_GO} build | 		--user ${UID}:${GID} \ | ||||||
|  | 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||||
|  | 		-i /local/schema.yml \ | ||||||
|  | 		-g go \ | ||||||
|  | 		-o /local/ \ | ||||||
|  | 		-c /local/config.yaml | ||||||
| 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | ||||||
|  | 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | ||||||
|  |  | ||||||
| gen-dev-config:  ## Generate a local development config file | gen-dev-config:  ## Generate a local development config file | ||||||
| 	uv run scripts/generate_config.py | 	python -m scripts.generate_config | ||||||
|  |  | ||||||
| gen: gen-build gen-client-ts | gen: gen-build gen-client-ts | ||||||
|  |  | ||||||
| @ -243,7 +258,7 @@ docker:  ## Build a docker image of the current source tree | |||||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||||
|  |  | ||||||
| test-docker: | test-docker: | ||||||
| 	BUILD=true ${PWD}/scripts/test_docker.sh | 	BUILD=true ./scripts/test_docker.sh | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| ## CI | ## CI | ||||||
| @ -256,21 +271,21 @@ ci--meta-debug: | |||||||
| 	node --version | 	node --version | ||||||
|  |  | ||||||
| ci-black: ci--meta-debug | ci-black: ci--meta-debug | ||||||
| 	uv run black --check $(PY_SOURCES) | 	black --check $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-ruff: ci--meta-debug | ci-ruff: ci--meta-debug | ||||||
| 	uv run ruff check $(PY_SOURCES) | 	ruff check $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-codespell: ci--meta-debug | ci-codespell: ci--meta-debug | ||||||
| 	uv run codespell -s | 	codespell $(CODESPELL_ARGS) -s | ||||||
|  |  | ||||||
| ci-bandit: ci--meta-debug | ci-bandit: ci--meta-debug | ||||||
| 	uv run bandit -r $(PY_SOURCES) | 	bandit -r $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-pending-migrations: ci--meta-debug | ci-pending-migrations: ci--meta-debug | ||||||
| 	uv run ak makemigrations --check | 	ak makemigrations --check | ||||||
|  |  | ||||||
| ci-test: ci--meta-debug | ci-test: ci--meta-debug | ||||||
| 	uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | 	coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | ||||||
| 	uv run coverage report | 	coverage report | ||||||
| 	uv run coverage xml | 	coverage xml | ||||||
|  | |||||||
| @ -42,4 +42,4 @@ See [SECURITY.md](SECURITY.md) | |||||||
|  |  | ||||||
| ## Adoption and Contributions | ## Adoption and Contributions | ||||||
|  |  | ||||||
| Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github). | Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md). | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di | |||||||
|  |  | ||||||
| ## Independent audits and pentests | ## Independent audits and pentests | ||||||
|  |  | ||||||
| We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specific audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security). | We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specfic audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security). | ||||||
|  |  | ||||||
| ## What authentik classifies as a CVE | ## What authentik classifies as a CVE | ||||||
|  |  | ||||||
| @ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | |||||||
|  |  | ||||||
| | Version   | Supported | | | Version   | Supported | | ||||||
| | --------- | --------- | | | --------- | --------- | | ||||||
| | 2025.4.x  | ✅        | | | 2024.12.x | ✅        | | ||||||
| | 2025.6.x  | ✅        | | | 2025.2.x  | ✅        | | ||||||
|  |  | ||||||
| ## Reporting a Vulnerability | ## Reporting a Vulnerability | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
|  |  | ||||||
| from os import environ | from os import environ | ||||||
|  |  | ||||||
| __version__ = "2025.6.0" | __version__ = "2025.2.0" | ||||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -59,7 +59,7 @@ class SystemInfoSerializer(PassiveSerializer): | |||||||
|             if not isinstance(value, str): |             if not isinstance(value, str): | ||||||
|                 continue |                 continue | ||||||
|             actual_value = value |             actual_value = value | ||||||
|             if raw_session is not None and raw_session in actual_value: |             if raw_session in actual_value: | ||||||
|                 actual_value = actual_value.replace( |                 actual_value = actual_value.replace( | ||||||
|                     raw_session, SafeExceptionReporterFilter.cleansed_substitute |                     raw_session, SafeExceptionReporterFilter.cleansed_substitute | ||||||
|                 ) |                 ) | ||||||
|  | |||||||
| @ -1,12 +1,9 @@ | |||||||
| """API Authentication""" | """API Authentication""" | ||||||
|  |  | ||||||
| from hmac import compare_digest | from hmac import compare_digest | ||||||
| from pathlib import Path |  | ||||||
| from tempfile import gettempdir |  | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from django.contrib.auth.models import AnonymousUser |  | ||||||
| from drf_spectacular.extensions import OpenApiAuthenticationExtension | from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||||
| from rest_framework.exceptions import AuthenticationFailed | from rest_framework.exceptions import AuthenticationFailed | ||||||
| @ -14,17 +11,11 @@ from rest_framework.request import Request | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.core.middleware import CTX_AUTH_VIA | from authentik.core.middleware import CTX_AUTH_VIA | ||||||
| from authentik.core.models import Token, TokenIntents, User, UserTypes | from authentik.core.models import Token, TokenIntents, User | ||||||
| from authentik.outposts.models import Outpost | from authentik.outposts.models import Outpost | ||||||
| from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| _tmp = Path(gettempdir()) |  | ||||||
| try: |  | ||||||
|     with open(_tmp / "authentik-core-ipc.key") as _f: |  | ||||||
|         ipc_key = _f.read() |  | ||||||
| except OSError: |  | ||||||
|     ipc_key = None |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def validate_auth(header: bytes) -> str | None: | def validate_auth(header: bytes) -> str | None: | ||||||
| @ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None: | |||||||
|     if user: |     if user: | ||||||
|         CTX_AUTH_VIA.set("secret_key") |         CTX_AUTH_VIA.set("secret_key") | ||||||
|         return user |         return user | ||||||
|     # then try to auth via secret key (for embedded outpost/etc) |  | ||||||
|     user = token_ipc(auth_credentials) |  | ||||||
|     if user: |  | ||||||
|         CTX_AUTH_VIA.set("ipc") |  | ||||||
|         return user |  | ||||||
|     raise AuthenticationFailed("Token invalid/expired") |     raise AuthenticationFailed("Token invalid/expired") | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None: | |||||||
|     return outpost.user |     return outpost.user | ||||||
|  |  | ||||||
|  |  | ||||||
| class IPCUser(AnonymousUser): |  | ||||||
|     """'Virtual' user for IPC communication between authentik core and the authentik router""" |  | ||||||
|  |  | ||||||
|     username = "authentik:system" |  | ||||||
|     is_active = True |  | ||||||
|     is_superuser = True |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def type(self): |  | ||||||
|         return UserTypes.INTERNAL_SERVICE_ACCOUNT |  | ||||||
|  |  | ||||||
|     def has_perm(self, perm, obj=None): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def has_perms(self, perm_list, obj=None): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def has_module_perms(self, module): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_anonymous(self): |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_authenticated(self): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def token_ipc(value: str) -> User | None: |  | ||||||
|     """Check if the token is the secret key |  | ||||||
|     and return the service account for the managed outpost""" |  | ||||||
|     if not ipc_key or not compare_digest(value, ipc_key): |  | ||||||
|         return None |  | ||||||
|     return IPCUser() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TokenAuthentication(BaseAuthentication): | class TokenAuthentication(BaseAuthentication): | ||||||
|     """Token-based authentication using HTTP Bearer authentication""" |     """Token-based authentication using HTTP Bearer authentication""" | ||||||
|  |  | ||||||
|  | |||||||
| @ -54,7 +54,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom | |||||||
|     return component |     return component | ||||||
|  |  | ||||||
|  |  | ||||||
| def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): | def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):  # noqa: W0613 | ||||||
|     """Workaround to set a default response for endpoints. |     """Workaround to set a default response for endpoints. | ||||||
|     Workaround suggested at |     Workaround suggested at | ||||||
|     <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> |     <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> | ||||||
|  | |||||||
| @ -7,7 +7,7 @@ from rest_framework.exceptions import ValidationError | |||||||
| from rest_framework.fields import CharField, DateTimeField | from rest_framework.fields import CharField, DateTimeField | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.serializers import ListSerializer | from rest_framework.serializers import ListSerializer, ModelSerializer | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
| from authentik.blueprints.models import BlueprintInstance | from authentik.blueprints.models import BlueprintInstance | ||||||
| @ -15,7 +15,7 @@ from authentik.blueprints.v1.importer import Importer | |||||||
| from authentik.blueprints.v1.oci import OCI_PREFIX | from authentik.blueprints.v1.oci import OCI_PREFIX | ||||||
| from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict | from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer | from authentik.core.api.utils import JSONDictField, PassiveSerializer | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -164,7 +164,9 @@ class BlueprintEntry: | |||||||
|         """Get the blueprint model, with yaml tags resolved if present""" |         """Get the blueprint model, with yaml tags resolved if present""" | ||||||
|         return str(self.tag_resolver(self.model, blueprint)) |         return str(self.tag_resolver(self.model, blueprint)) | ||||||
|  |  | ||||||
|     def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]: |     def get_permissions( | ||||||
|  |         self, blueprint: "Blueprint" | ||||||
|  |     ) -> Generator[BlueprintEntryPermission, None, None]: | ||||||
|         """Get permissions of this entry, with all yaml tags resolved""" |         """Get permissions of this entry, with all yaml tags resolved""" | ||||||
|         for perm in self.permissions: |         for perm in self.permissions: | ||||||
|             yield BlueprintEntryPermission( |             yield BlueprintEntryPermission( | ||||||
|  | |||||||
| @ -36,7 +36,6 @@ from authentik.core.models import ( | |||||||
|     GroupSourceConnection, |     GroupSourceConnection, | ||||||
|     PropertyMapping, |     PropertyMapping, | ||||||
|     Provider, |     Provider, | ||||||
|     Session, |  | ||||||
|     Source, |     Source, | ||||||
|     User, |     User, | ||||||
|     UserSourceConnection, |     UserSourceConnection, | ||||||
| @ -109,7 +108,6 @@ def excluded_models() -> list[type[Model]]: | |||||||
|         Policy, |         Policy, | ||||||
|         PolicyBindingModel, |         PolicyBindingModel, | ||||||
|         # Classes that have other dependencies |         # Classes that have other dependencies | ||||||
|         Session, |  | ||||||
|         AuthenticatedSession, |         AuthenticatedSession, | ||||||
|         # Classes which are only internally managed |         # Classes which are only internally managed | ||||||
|         # FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin |         # FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin | ||||||
|  | |||||||
| @ -49,8 +49,6 @@ class BrandSerializer(ModelSerializer): | |||||||
|             "branding_title", |             "branding_title", | ||||||
|             "branding_logo", |             "branding_logo", | ||||||
|             "branding_favicon", |             "branding_favicon", | ||||||
|             "branding_custom_css", |  | ||||||
|             "branding_default_flow_background", |  | ||||||
|             "flow_authentication", |             "flow_authentication", | ||||||
|             "flow_invalidation", |             "flow_invalidation", | ||||||
|             "flow_recovery", |             "flow_recovery", | ||||||
| @ -59,7 +57,6 @@ class BrandSerializer(ModelSerializer): | |||||||
|             "flow_device_code", |             "flow_device_code", | ||||||
|             "default_application", |             "default_application", | ||||||
|             "web_certificate", |             "web_certificate", | ||||||
|             "client_certificates", |  | ||||||
|             "attributes", |             "attributes", | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
| @ -89,7 +86,6 @@ class CurrentBrandSerializer(PassiveSerializer): | |||||||
|     branding_title = CharField() |     branding_title = CharField() | ||||||
|     branding_logo = CharField(source="branding_logo_url") |     branding_logo = CharField(source="branding_logo_url") | ||||||
|     branding_favicon = CharField(source="branding_favicon_url") |     branding_favicon = CharField(source="branding_favicon_url") | ||||||
|     branding_custom_css = CharField() |  | ||||||
|     ui_footer_links = ListField( |     ui_footer_links = ListField( | ||||||
|         child=FooterLinkSerializer(), |         child=FooterLinkSerializer(), | ||||||
|         read_only=True, |         read_only=True, | ||||||
| @ -121,7 +117,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "domain", |         "domain", | ||||||
|         "branding_title", |         "branding_title", | ||||||
|         "web_certificate__name", |         "web_certificate__name", | ||||||
|         "client_certificates__name", |  | ||||||
|     ] |     ] | ||||||
|     filterset_fields = [ |     filterset_fields = [ | ||||||
|         "brand_uuid", |         "brand_uuid", | ||||||
| @ -130,7 +125,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "branding_title", |         "branding_title", | ||||||
|         "branding_logo", |         "branding_logo", | ||||||
|         "branding_favicon", |         "branding_favicon", | ||||||
|         "branding_default_flow_background", |  | ||||||
|         "flow_authentication", |         "flow_authentication", | ||||||
|         "flow_invalidation", |         "flow_invalidation", | ||||||
|         "flow_recovery", |         "flow_recovery", | ||||||
| @ -138,7 +132,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "flow_user_settings", |         "flow_user_settings", | ||||||
|         "flow_device_code", |         "flow_device_code", | ||||||
|         "web_certificate", |         "web_certificate", | ||||||
|         "client_certificates", |  | ||||||
|     ] |     ] | ||||||
|     ordering = ["domain"] |     ordering = ["domain"] | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,35 +0,0 @@ | |||||||
| # Generated by Django 5.0.12 on 2025-02-22 01:51 |  | ||||||
|  |  | ||||||
| from pathlib import Path |  | ||||||
| from django.db import migrations, models |  | ||||||
| from django.apps.registry import Apps |  | ||||||
|  |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     Brand = apps.get_model("authentik_brands", "brand") |  | ||||||
|  |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     path = Path("/web/dist/custom.css") |  | ||||||
|     if not path.exists(): |  | ||||||
|         return |  | ||||||
|     css = path.read_text() |  | ||||||
|     Brand.objects.using(db_alias).all().update(branding_custom_css=css) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0007_brand_default_application"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="branding_custom_css", |  | ||||||
|             field=models.TextField(blank=True, default=""), |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython(migrate_custom_css), |  | ||||||
|     ] |  | ||||||
| @ -1,18 +0,0 @@ | |||||||
| # Generated by Django 5.0.13 on 2025-03-19 22:54 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0008_brand_branding_custom_css"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="branding_default_flow_background", |  | ||||||
|             field=models.TextField(default="/static/dist/assets/images/flow_background.jpg"), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,37 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-19 15:09 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0009_brand_branding_default_flow_background"), |  | ||||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="client_certificates", |  | ||||||
|             field=models.ManyToManyField( |  | ||||||
|                 blank=True, |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Certificates used for client authentication.", |  | ||||||
|                 to="authentik_crypto.certificatekeypair", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="web_certificate", |  | ||||||
|             field=models.ForeignKey( |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Web Certificate used by the authentik Core webserver.", |  | ||||||
|                 null=True, |  | ||||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, |  | ||||||
|                 related_name="+", |  | ||||||
|                 to="authentik_crypto.certificatekeypair", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -33,10 +33,6 @@ class Brand(SerializerModel): | |||||||
|  |  | ||||||
|     branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg") |     branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg") | ||||||
|     branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png") |     branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png") | ||||||
|     branding_custom_css = models.TextField(default="", blank=True) |  | ||||||
|     branding_default_flow_background = models.TextField( |  | ||||||
|         default="/static/dist/assets/images/flow_background.jpg" |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     flow_authentication = models.ForeignKey( |     flow_authentication = models.ForeignKey( | ||||||
|         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication" |         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication" | ||||||
| @ -73,13 +69,6 @@ class Brand(SerializerModel): | |||||||
|         default=None, |         default=None, | ||||||
|         on_delete=models.SET_DEFAULT, |         on_delete=models.SET_DEFAULT, | ||||||
|         help_text=_("Web Certificate used by the authentik Core webserver."), |         help_text=_("Web Certificate used by the authentik Core webserver."), | ||||||
|         related_name="+", |  | ||||||
|     ) |  | ||||||
|     client_certificates = models.ManyToManyField( |  | ||||||
|         CertificateKeyPair, |  | ||||||
|         default=None, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_("Certificates used for client authentication."), |  | ||||||
|     ) |     ) | ||||||
|     attributes = models.JSONField(default=dict, blank=True) |     attributes = models.JSONField(default=dict, blank=True) | ||||||
|  |  | ||||||
| @ -95,12 +84,6 @@ class Brand(SerializerModel): | |||||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon |             return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon | ||||||
|         return self.branding_favicon |         return self.branding_favicon | ||||||
|  |  | ||||||
|     def branding_default_flow_background_url(self) -> str: |  | ||||||
|         """Get branding_default_flow_background with the correct prefix""" |  | ||||||
|         if self.branding_default_flow_background.startswith("/static"): |  | ||||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_default_flow_background |  | ||||||
|         return self.branding_default_flow_background |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> Serializer: |     def serializer(self) -> Serializer: | ||||||
|         from authentik.brands.api import BrandSerializer |         from authentik.brands.api import BrandSerializer | ||||||
|  | |||||||
| @ -24,7 +24,6 @@ class TestBrands(APITestCase): | |||||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", |                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", |                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||||
|                 "branding_title": "authentik", |                 "branding_title": "authentik", | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": brand.domain, |                 "matched_domain": brand.domain, | ||||||
|                 "ui_footer_links": [], |                 "ui_footer_links": [], | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |                 "ui_theme": Themes.AUTOMATIC, | ||||||
| @ -44,7 +43,6 @@ class TestBrands(APITestCase): | |||||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", |                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", |                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||||
|                 "branding_title": "custom", |                 "branding_title": "custom", | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": "bar.baz", |                 "matched_domain": "bar.baz", | ||||||
|                 "ui_footer_links": [], |                 "ui_footer_links": [], | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |                 "ui_theme": Themes.AUTOMATIC, | ||||||
| @ -61,7 +59,6 @@ class TestBrands(APITestCase): | |||||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", |                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", |                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||||
|                 "branding_title": "authentik", |                 "branding_title": "authentik", | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": "fallback", |                 "matched_domain": "fallback", | ||||||
|                 "ui_footer_links": [], |                 "ui_footer_links": [], | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |                 "ui_theme": Themes.AUTOMATIC, | ||||||
| @ -124,27 +121,3 @@ class TestBrands(APITestCase): | |||||||
|                 "subject": None, |                 "subject": None, | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_branding_url(self): |  | ||||||
|         """Test branding attributes return correct values""" |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         brand.branding_default_flow_background = "https://goauthentik.io/img/icon.png" |  | ||||||
|         brand.branding_favicon = "https://goauthentik.io/img/icon.png" |  | ||||||
|         brand.branding_logo = "https://goauthentik.io/img/icon.png" |  | ||||||
|         brand.save() |  | ||||||
|         self.assertEqual( |  | ||||||
|             brand.branding_default_flow_background_url(), "https://goauthentik.io/img/icon.png" |  | ||||||
|         ) |  | ||||||
|         self.assertJSONEqual( |  | ||||||
|             self.client.get(reverse("authentik_api:brand-current")).content.decode(), |  | ||||||
|             { |  | ||||||
|                 "branding_logo": "https://goauthentik.io/img/icon.png", |  | ||||||
|                 "branding_favicon": "https://goauthentik.io/img/icon.png", |  | ||||||
|                 "branding_title": "authentik", |  | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": brand.domain, |  | ||||||
|                 "ui_footer_links": [], |  | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |  | ||||||
|                 "default_locale": "", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  | |||||||
| @ -5,10 +5,10 @@ from typing import Any | |||||||
| from django.db.models import F, Q | from django.db.models import F, Q | ||||||
| from django.db.models import Value as V | from django.db.models import Value as V | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
|  | from sentry_sdk import get_current_span | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import get_full_version | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.lib.sentry import get_http_meta |  | ||||||
| from authentik.tenants.models import Tenant | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| _q_default = Q(default=True) | _q_default = Q(default=True) | ||||||
| @ -32,9 +32,13 @@ def context_processor(request: HttpRequest) -> dict[str, Any]: | |||||||
|     """Context Processor that injects brand object into every template""" |     """Context Processor that injects brand object into every template""" | ||||||
|     brand = getattr(request, "brand", DEFAULT_BRAND) |     brand = getattr(request, "brand", DEFAULT_BRAND) | ||||||
|     tenant = getattr(request, "tenant", Tenant()) |     tenant = getattr(request, "tenant", Tenant()) | ||||||
|  |     trace = "" | ||||||
|  |     span = get_current_span() | ||||||
|  |     if span: | ||||||
|  |         trace = span.to_traceparent() | ||||||
|     return { |     return { | ||||||
|         "brand": brand, |         "brand": brand, | ||||||
|         "footer_links": tenant.footer_links, |         "footer_links": tenant.footer_links, | ||||||
|         "html_meta": {**get_http_meta()}, |         "sentry_trace": trace, | ||||||
|         "version": get_full_version(), |         "version": get_full_version(), | ||||||
|     } |     } | ||||||
|  | |||||||
| @ -46,7 +46,7 @@ LOGGER = get_logger() | |||||||
|  |  | ||||||
| def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str: | def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str: | ||||||
|     """Cache key where application list for user is saved""" |     """Cache key where application list for user is saved""" | ||||||
|     key = f"{CACHE_PREFIX}app_access/{user_pk}" |     key = f"{CACHE_PREFIX}/app_access/{user_pk}" | ||||||
|     if page_number: |     if page_number: | ||||||
|         key += f"/{page_number}" |         key += f"/{page_number}" | ||||||
|     return key |     return key | ||||||
|  | |||||||
| @ -5,7 +5,6 @@ from typing import TypedDict | |||||||
| from rest_framework import mixins | from rest_framework import mixins | ||||||
| from rest_framework.fields import SerializerMethodField | from rest_framework.fields import SerializerMethodField | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.serializers import CharField, DateTimeField, IPAddressField |  | ||||||
| from rest_framework.viewsets import GenericViewSet | from rest_framework.viewsets import GenericViewSet | ||||||
| from ua_parser import user_agent_parser | from ua_parser import user_agent_parser | ||||||
|  |  | ||||||
| @ -55,11 +54,6 @@ class UserAgentDict(TypedDict): | |||||||
| class AuthenticatedSessionSerializer(ModelSerializer): | class AuthenticatedSessionSerializer(ModelSerializer): | ||||||
|     """AuthenticatedSession Serializer""" |     """AuthenticatedSession Serializer""" | ||||||
|  |  | ||||||
|     expires = DateTimeField(source="session.expires", read_only=True) |  | ||||||
|     last_ip = IPAddressField(source="session.last_ip", read_only=True) |  | ||||||
|     last_user_agent = CharField(source="session.last_user_agent", read_only=True) |  | ||||||
|     last_used = DateTimeField(source="session.last_used", read_only=True) |  | ||||||
|  |  | ||||||
|     current = SerializerMethodField() |     current = SerializerMethodField() | ||||||
|     user_agent = SerializerMethodField() |     user_agent = SerializerMethodField() | ||||||
|     geo_ip = SerializerMethodField() |     geo_ip = SerializerMethodField() | ||||||
| @ -68,19 +62,19 @@ class AuthenticatedSessionSerializer(ModelSerializer): | |||||||
|     def get_current(self, instance: AuthenticatedSession) -> bool: |     def get_current(self, instance: AuthenticatedSession) -> bool: | ||||||
|         """Check if session is currently active session""" |         """Check if session is currently active session""" | ||||||
|         request: Request = self.context["request"] |         request: Request = self.context["request"] | ||||||
|         return request._request.session.session_key == instance.session.session_key |         return request._request.session.session_key == instance.session_key | ||||||
|  |  | ||||||
|     def get_user_agent(self, instance: AuthenticatedSession) -> UserAgentDict: |     def get_user_agent(self, instance: AuthenticatedSession) -> UserAgentDict: | ||||||
|         """Get parsed user agent""" |         """Get parsed user agent""" | ||||||
|         return user_agent_parser.Parse(instance.session.last_user_agent) |         return user_agent_parser.Parse(instance.last_user_agent) | ||||||
|  |  | ||||||
|     def get_geo_ip(self, instance: AuthenticatedSession) -> GeoIPDict | None:  # pragma: no cover |     def get_geo_ip(self, instance: AuthenticatedSession) -> GeoIPDict | None:  # pragma: no cover | ||||||
|         """Get GeoIP Data""" |         """Get GeoIP Data""" | ||||||
|         return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.session.last_ip) |         return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.last_ip) | ||||||
|  |  | ||||||
|     def get_asn(self, instance: AuthenticatedSession) -> ASNDict | None:  # pragma: no cover |     def get_asn(self, instance: AuthenticatedSession) -> ASNDict | None:  # pragma: no cover | ||||||
|         """Get ASN Data""" |         """Get ASN Data""" | ||||||
|         return ASN_CONTEXT_PROCESSOR.asn_dict(instance.session.last_ip) |         return ASN_CONTEXT_PROCESSOR.asn_dict(instance.last_ip) | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         model = AuthenticatedSession |         model = AuthenticatedSession | ||||||
| @ -96,7 +90,6 @@ class AuthenticatedSessionSerializer(ModelSerializer): | |||||||
|             "last_used", |             "last_used", | ||||||
|             "expires", |             "expires", | ||||||
|         ] |         ] | ||||||
|         extra_args = {"uuid": {"read_only": True}} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthenticatedSessionViewSet( | class AuthenticatedSessionViewSet( | ||||||
| @ -108,10 +101,9 @@ class AuthenticatedSessionViewSet( | |||||||
| ): | ): | ||||||
|     """AuthenticatedSession Viewset""" |     """AuthenticatedSession Viewset""" | ||||||
|  |  | ||||||
|     lookup_field = "uuid" |     queryset = AuthenticatedSession.objects.all() | ||||||
|     queryset = AuthenticatedSession.objects.select_related("session").all() |  | ||||||
|     serializer_class = AuthenticatedSessionSerializer |     serializer_class = AuthenticatedSessionSerializer | ||||||
|     search_fields = ["user__username", "session__last_ip", "session__last_user_agent"] |     search_fields = ["user__username", "last_ip", "last_user_agent"] | ||||||
|     filterset_fields = ["user__username", "session__last_ip", "session__last_user_agent"] |     filterset_fields = ["user__username", "last_ip", "last_user_agent"] | ||||||
|     ordering = ["user__username"] |     ordering = ["user__username"] | ||||||
|     owner_field = "user" |     owner_field = "user" | ||||||
|  | |||||||
| @ -99,8 +99,9 @@ class GroupSerializer(ModelSerializer): | |||||||
|             if superuser |             if superuser | ||||||
|             else "authentik_core.disable_group_superuser" |             else "authentik_core.disable_group_superuser" | ||||||
|         ) |         ) | ||||||
|         if self.instance or superuser: |         has_perm = user.has_perm(perm) | ||||||
|             has_perm = user.has_perm(perm) or user.has_perm(perm, self.instance) |         if self.instance and not has_perm: | ||||||
|  |             has_perm = user.has_perm(perm, self.instance) | ||||||
|         if not has_perm: |         if not has_perm: | ||||||
|             raise ValidationError( |             raise ValidationError( | ||||||
|                 _( |                 _( | ||||||
|  | |||||||
| @ -5,7 +5,6 @@ from collections.abc import Iterable | |||||||
| from drf_spectacular.utils import OpenApiResponse, extend_schema | from drf_spectacular.utils import OpenApiResponse, extend_schema | ||||||
| from rest_framework import mixins | from rest_framework import mixins | ||||||
| from rest_framework.decorators import action | from rest_framework.decorators import action | ||||||
| from rest_framework.exceptions import ValidationError |  | ||||||
| from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField | from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField | ||||||
| from rest_framework.parsers import MultiPartParser | from rest_framework.parsers import MultiPartParser | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| @ -155,17 +154,6 @@ class SourceViewSet( | |||||||
|             matching_sources.append(source_settings.validated_data) |             matching_sources.append(source_settings.validated_data) | ||||||
|         return Response(matching_sources) |         return Response(matching_sources) | ||||||
|  |  | ||||||
|     def destroy(self, request: Request, *args, **kwargs): |  | ||||||
|         """Prevent deletion of built-in sources""" |  | ||||||
|         instance: Source = self.get_object() |  | ||||||
|  |  | ||||||
|         if instance.managed == Source.MANAGED_INBUILT: |  | ||||||
|             raise ValidationError( |  | ||||||
|                 {"detail": "Built-in sources cannot be deleted"}, code="protected" |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         return super().destroy(request, *args, **kwargs) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserSourceConnectionSerializer(SourceSerializer): | class UserSourceConnectionSerializer(SourceSerializer): | ||||||
|     """User source connection""" |     """User source connection""" | ||||||
| @ -179,13 +167,10 @@ class UserSourceConnectionSerializer(SourceSerializer): | |||||||
|             "user", |             "user", | ||||||
|             "source", |             "source", | ||||||
|             "source_obj", |             "source_obj", | ||||||
|             "identifier", |  | ||||||
|             "created", |             "created", | ||||||
|             "last_updated", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
|             "created": {"read_only": True}, |             "created": {"read_only": True}, | ||||||
|             "last_updated": {"read_only": True}, |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -202,7 +187,7 @@ class UserSourceConnectionViewSet( | |||||||
|     queryset = UserSourceConnection.objects.all() |     queryset = UserSourceConnection.objects.all() | ||||||
|     serializer_class = UserSourceConnectionSerializer |     serializer_class = UserSourceConnectionSerializer | ||||||
|     filterset_fields = ["user", "source__slug"] |     filterset_fields = ["user", "source__slug"] | ||||||
|     search_fields = ["user__username", "source__slug", "identifier"] |     search_fields = ["source__slug"] | ||||||
|     ordering = ["source__slug", "pk"] |     ordering = ["source__slug", "pk"] | ||||||
|     owner_field = "user" |     owner_field = "user" | ||||||
|  |  | ||||||
| @ -221,11 +206,9 @@ class GroupSourceConnectionSerializer(SourceSerializer): | |||||||
|             "source_obj", |             "source_obj", | ||||||
|             "identifier", |             "identifier", | ||||||
|             "created", |             "created", | ||||||
|             "last_updated", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
|             "created": {"read_only": True}, |             "created": {"read_only": True}, | ||||||
|             "last_updated": {"read_only": True}, |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -242,5 +225,6 @@ class GroupSourceConnectionViewSet( | |||||||
|     queryset = GroupSourceConnection.objects.all() |     queryset = GroupSourceConnection.objects.all() | ||||||
|     serializer_class = GroupSourceConnectionSerializer |     serializer_class = GroupSourceConnectionSerializer | ||||||
|     filterset_fields = ["group", "source__slug"] |     filterset_fields = ["group", "source__slug"] | ||||||
|     search_fields = ["group__name", "source__slug", "identifier"] |     search_fields = ["source__slug"] | ||||||
|     ordering = ["source__slug", "pk"] |     ordering = ["source__slug", "pk"] | ||||||
|  |     owner_field = "user" | ||||||
|  | |||||||
| @ -6,6 +6,8 @@ from typing import Any | |||||||
|  |  | ||||||
| from django.contrib.auth import update_session_auth_hash | from django.contrib.auth import update_session_auth_hash | ||||||
| from django.contrib.auth.models import Permission | from django.contrib.auth.models import Permission | ||||||
|  | from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
|  | from django.core.cache import cache | ||||||
| from django.db.models.functions import ExtractHour | from django.db.models.functions import ExtractHour | ||||||
| from django.db.transaction import atomic | from django.db.transaction import atomic | ||||||
| from django.db.utils import IntegrityError | from django.db.utils import IntegrityError | ||||||
| @ -69,8 +71,8 @@ from authentik.core.middleware import ( | |||||||
| from authentik.core.models import ( | from authentik.core.models import ( | ||||||
|     USER_ATTRIBUTE_TOKEN_EXPIRING, |     USER_ATTRIBUTE_TOKEN_EXPIRING, | ||||||
|     USER_PATH_SERVICE_ACCOUNT, |     USER_PATH_SERVICE_ACCOUNT, | ||||||
|  |     AuthenticatedSession, | ||||||
|     Group, |     Group, | ||||||
|     Session, |  | ||||||
|     Token, |     Token, | ||||||
|     TokenIntents, |     TokenIntents, | ||||||
|     User, |     User, | ||||||
| @ -84,7 +86,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN | |||||||
| from authentik.lib.avatars import get_avatar | from authentik.lib.avatars import get_avatar | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
| from authentik.rbac.models import get_permission_choices | from authentik.rbac.models import get_permission_choices | ||||||
| from authentik.stages.email.flow import pickle_flow_token_for_email |  | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.stages.email.tasks import send_mails | from authentik.stages.email.tasks import send_mails | ||||||
| from authentik.stages.email.utils import TemplateEmailMessage | from authentik.stages.email.utils import TemplateEmailMessage | ||||||
| @ -225,7 +226,6 @@ class UserSerializer(ModelSerializer): | |||||||
|             "name", |             "name", | ||||||
|             "is_active", |             "is_active", | ||||||
|             "last_login", |             "last_login", | ||||||
|             "date_joined", |  | ||||||
|             "is_superuser", |             "is_superuser", | ||||||
|             "groups", |             "groups", | ||||||
|             "groups_obj", |             "groups_obj", | ||||||
| @ -240,7 +240,6 @@ class UserSerializer(ModelSerializer): | |||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
|             "name": {"allow_blank": True}, |             "name": {"allow_blank": True}, | ||||||
|             "date_joined": {"read_only": True}, |  | ||||||
|             "password_change_date": {"read_only": True}, |             "password_change_date": {"read_only": True}, | ||||||
|         } |         } | ||||||
|  |  | ||||||
| @ -374,7 +373,7 @@ class UsersFilter(FilterSet): | |||||||
|         method="filter_attributes", |         method="filter_attributes", | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     is_superuser = BooleanFilter(field_name="ak_groups", method="filter_is_superuser") |     is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser") | ||||||
|     uuid = UUIDFilter(field_name="uuid") |     uuid = UUIDFilter(field_name="uuid") | ||||||
|  |  | ||||||
|     path = CharFilter(field_name="path") |     path = CharFilter(field_name="path") | ||||||
| @ -392,11 +391,6 @@ class UsersFilter(FilterSet): | |||||||
|         queryset=Group.objects.all().order_by("name"), |         queryset=Group.objects.all().order_by("name"), | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     def filter_is_superuser(self, queryset, name, value): |  | ||||||
|         if value: |  | ||||||
|             return queryset.filter(ak_groups__is_superuser=True).distinct() |  | ||||||
|         return queryset.exclude(ak_groups__is_superuser=True).distinct() |  | ||||||
|  |  | ||||||
|     def filter_attributes(self, queryset, name, value): |     def filter_attributes(self, queryset, name, value): | ||||||
|         """Filter attributes by query args""" |         """Filter attributes by query args""" | ||||||
|         try: |         try: | ||||||
| @ -452,7 +446,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def list(self, request, *args, **kwargs): |     def list(self, request, *args, **kwargs): | ||||||
|         return super().list(request, *args, **kwargs) |         return super().list(request, *args, **kwargs) | ||||||
|  |  | ||||||
|     def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: |     def _create_recovery_link(self) -> tuple[str, Token]: | ||||||
|         """Create a recovery link (when the current brand has a recovery flow set), |         """Create a recovery link (when the current brand has a recovery flow set), | ||||||
|         that can either be shown to an admin or sent to the user directly""" |         that can either be shown to an admin or sent to the user directly""" | ||||||
|         brand: Brand = self.request._request.brand |         brand: Brand = self.request._request.brand | ||||||
| @ -474,16 +468,12 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|             raise ValidationError( |             raise ValidationError( | ||||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} |                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||||
|             ) from None |             ) from None | ||||||
|         _plan = FlowToken.pickle(plan) |  | ||||||
|         if for_email: |  | ||||||
|             _plan = pickle_flow_token_for_email(plan) |  | ||||||
|         token, __ = FlowToken.objects.update_or_create( |         token, __ = FlowToken.objects.update_or_create( | ||||||
|             identifier=f"{user.uid}-password-reset", |             identifier=f"{user.uid}-password-reset", | ||||||
|             defaults={ |             defaults={ | ||||||
|                 "user": user, |                 "user": user, | ||||||
|                 "flow": flow, |                 "flow": flow, | ||||||
|                 "_plan": _plan, |                 "_plan": FlowToken.pickle(plan), | ||||||
|                 "revoke_on_execution": not for_email, |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         querystring = urlencode({QS_KEY_TOKEN: token.key}) |         querystring = urlencode({QS_KEY_TOKEN: token.key}) | ||||||
| @ -653,7 +643,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|         if for_user.email == "": |         if for_user.email == "": | ||||||
|             LOGGER.debug("User doesn't have an email address") |             LOGGER.debug("User doesn't have an email address") | ||||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) |             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||||
|         link, token = self._create_recovery_link(for_email=True) |         link, token = self._create_recovery_link() | ||||||
|         # Lookup the email stage to assure the current user can access it |         # Lookup the email stage to assure the current user can access it | ||||||
|         stages = get_objects_for_user( |         stages = get_objects_for_user( | ||||||
|             request.user, "authentik_stages_email.view_emailstage" |             request.user, "authentik_stages_email.view_emailstage" | ||||||
| @ -777,6 +767,9 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|         response = super().partial_update(request, *args, **kwargs) |         response = super().partial_update(request, *args, **kwargs) | ||||||
|         instance: User = self.get_object() |         instance: User = self.get_object() | ||||||
|         if not instance.is_active: |         if not instance.is_active: | ||||||
|             Session.objects.filter(authenticatedsession__user=instance).delete() |             sessions = AuthenticatedSession.objects.filter(user=instance) | ||||||
|  |             session_ids = sessions.values_list("session_key", flat=True) | ||||||
|  |             cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids) | ||||||
|  |             sessions.delete() | ||||||
|             LOGGER.debug("Deleted user's sessions", user=instance.username) |             LOGGER.debug("Deleted user's sessions", user=instance.username) | ||||||
|         return response |         return response | ||||||
|  | |||||||
| @ -20,8 +20,6 @@ from rest_framework.serializers import ( | |||||||
|     raise_errors_on_nested_writes, |     raise_errors_on_nested_writes, | ||||||
| ) | ) | ||||||
|  |  | ||||||
| from authentik.rbac.permissions import assign_initial_permissions |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def is_dict(value: Any): | def is_dict(value: Any): | ||||||
|     """Ensure a value is a dictionary, useful for JSONFields""" |     """Ensure a value is a dictionary, useful for JSONFields""" | ||||||
| @ -31,14 +29,6 @@ def is_dict(value: Any): | |||||||
|  |  | ||||||
|  |  | ||||||
| class ModelSerializer(BaseModelSerializer): | class ModelSerializer(BaseModelSerializer): | ||||||
|     def create(self, validated_data): |  | ||||||
|         instance = super().create(validated_data) |  | ||||||
|  |  | ||||||
|         request = self.context.get("request") |  | ||||||
|         if request and hasattr(request, "user") and not request.user.is_anonymous: |  | ||||||
|             assign_initial_permissions(request.user, instance) |  | ||||||
|  |  | ||||||
|         return instance |  | ||||||
|  |  | ||||||
|     def update(self, instance: Model, validated_data): |     def update(self, instance: Model, validated_data): | ||||||
|         raise_errors_on_nested_writes("update", self, validated_data) |         raise_errors_on_nested_writes("update", self, validated_data) | ||||||
|  | |||||||
| @ -32,5 +32,5 @@ class AuthentikCoreConfig(ManagedAppConfig): | |||||||
|                 "name": "authentik Built-in", |                 "name": "authentik Built-in", | ||||||
|                 "slug": "authentik-built-in", |                 "slug": "authentik-built-in", | ||||||
|             }, |             }, | ||||||
|             managed=Source.MANAGED_INBUILT, |             managed="goauthentik.io/sources/inbuilt", | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -24,15 +24,6 @@ class InbuiltBackend(ModelBackend): | |||||||
|         self.set_method("password", request) |         self.set_method("password", request) | ||||||
|         return user |         return user | ||||||
|  |  | ||||||
|     async def aauthenticate( |  | ||||||
|         self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any |  | ||||||
|     ) -> User | None: |  | ||||||
|         user = await super().aauthenticate(request, username=username, password=password, **kwargs) |  | ||||||
|         if not user: |  | ||||||
|             return None |  | ||||||
|         self.set_method("password", request) |  | ||||||
|         return user |  | ||||||
|  |  | ||||||
|     def set_method(self, method: str, request: HttpRequest | None, **kwargs): |     def set_method(self, method: str, request: HttpRequest | None, **kwargs): | ||||||
|         """Set method data on current flow, if possbiel""" |         """Set method data on current flow, if possbiel""" | ||||||
|         if not request: |         if not request: | ||||||
|  | |||||||
| @ -1,15 +0,0 @@ | |||||||
| """Change user type""" |  | ||||||
|  |  | ||||||
| from importlib import import_module |  | ||||||
|  |  | ||||||
| from django.conf import settings |  | ||||||
|  |  | ||||||
| from authentik.tenants.management import TenantCommand |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Command(TenantCommand): |  | ||||||
|     """Delete all sessions""" |  | ||||||
|  |  | ||||||
|     def handle_per_tenant(self, **options): |  | ||||||
|         engine = import_module(settings.SESSION_ENGINE) |  | ||||||
|         engine.SessionStore.clear_expired() |  | ||||||
| @ -2,7 +2,6 @@ | |||||||
|  |  | ||||||
| from django.apps import apps | from django.apps import apps | ||||||
| from django.contrib.auth.management import create_permissions | from django.contrib.auth.management import create_permissions | ||||||
| from django.core.management import call_command |  | ||||||
| from django.core.management.base import BaseCommand, no_translations | from django.core.management.base import BaseCommand, no_translations | ||||||
| from guardian.management import create_anonymous_user | from guardian.management import create_anonymous_user | ||||||
|  |  | ||||||
| @ -17,10 +16,6 @@ class Command(BaseCommand): | |||||||
|         """Check permissions for all apps""" |         """Check permissions for all apps""" | ||||||
|         for tenant in Tenant.objects.filter(ready=True): |         for tenant in Tenant.objects.filter(ready=True): | ||||||
|             with tenant: |             with tenant: | ||||||
|                 # See https://code.djangoproject.com/ticket/28417 |  | ||||||
|                 # Remove potential lingering old permissions |  | ||||||
|                 call_command("remove_stale_contenttypes", "--no-input") |  | ||||||
|  |  | ||||||
|                 for app in apps.get_app_configs(): |                 for app in apps.get_app_configs(): | ||||||
|                     self.stdout.write(f"Checking app {app.name} ({app.label})\n") |                     self.stdout.write(f"Checking app {app.name} ({app.label})\n") | ||||||
|                     create_permissions(app, verbosity=0) |                     create_permissions(app, verbosity=0) | ||||||
|  | |||||||
| @ -2,14 +2,9 @@ | |||||||
|  |  | ||||||
| from collections.abc import Callable | from collections.abc import Callable | ||||||
| from contextvars import ContextVar | from contextvars import ContextVar | ||||||
| from functools import partial |  | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from django.contrib.auth.models import AnonymousUser |  | ||||||
| from django.core.exceptions import ImproperlyConfigured |  | ||||||
| from django.http import HttpRequest, HttpResponse | from django.http import HttpRequest, HttpResponse | ||||||
| from django.utils.deprecation import MiddlewareMixin |  | ||||||
| from django.utils.functional import SimpleLazyObject |  | ||||||
| from django.utils.translation import override | from django.utils.translation import override | ||||||
| from sentry_sdk.api import set_tag | from sentry_sdk.api import set_tag | ||||||
| from structlog.contextvars import STRUCTLOG_KEY_PREFIX | from structlog.contextvars import STRUCTLOG_KEY_PREFIX | ||||||
| @ -25,40 +20,6 @@ CTX_HOST = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + "host", default=None) | |||||||
| CTX_AUTH_VIA = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None) | CTX_AUTH_VIA = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_user(request): |  | ||||||
|     if not hasattr(request, "_cached_user"): |  | ||||||
|         user = None |  | ||||||
|         if (authenticated_session := request.session.get("authenticatedsession", None)) is not None: |  | ||||||
|             user = authenticated_session.user |  | ||||||
|         request._cached_user = user or AnonymousUser() |  | ||||||
|     return request._cached_user |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def aget_user(request): |  | ||||||
|     if not hasattr(request, "_cached_user"): |  | ||||||
|         user = None |  | ||||||
|         if ( |  | ||||||
|             authenticated_session := await request.session.aget("authenticatedsession", None) |  | ||||||
|         ) is not None: |  | ||||||
|             user = authenticated_session.user |  | ||||||
|         request._cached_user = user or AnonymousUser() |  | ||||||
|     return request._cached_user |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthenticationMiddleware(MiddlewareMixin): |  | ||||||
|     def process_request(self, request): |  | ||||||
|         if not hasattr(request, "session"): |  | ||||||
|             raise ImproperlyConfigured( |  | ||||||
|                 "The Django authentication middleware requires session " |  | ||||||
|                 "middleware to be installed. Edit your MIDDLEWARE setting to " |  | ||||||
|                 "insert " |  | ||||||
|                 "'authentik.root.middleware.SessionMiddleware' before " |  | ||||||
|                 "'authentik.core.middleware.AuthenticationMiddleware'." |  | ||||||
|             ) |  | ||||||
|         request.user = SimpleLazyObject(lambda: get_user(request)) |  | ||||||
|         request.auser = partial(aget_user, request) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ImpersonateMiddleware: | class ImpersonateMiddleware: | ||||||
|     """Middleware to impersonate users""" |     """Middleware to impersonate users""" | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,19 +0,0 @@ | |||||||
| # Generated by Django 5.0.13 on 2025-04-07 14:04 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0043_alter_group_options"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="usersourceconnection", |  | ||||||
|             name="new_identifier", |  | ||||||
|             field=models.TextField(default=""), |  | ||||||
|             preserve_default=False, |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,30 +0,0 @@ | |||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0044_usersourceconnection_new_identifier"), |  | ||||||
|         ("authentik_sources_kerberos", "0003_migrate_userkerberossourceconnection_identifier"), |  | ||||||
|         ("authentik_sources_oauth", "0009_migrate_useroauthsourceconnection_identifier"), |  | ||||||
|         ("authentik_sources_plex", "0005_migrate_userplexsourceconnection_identifier"), |  | ||||||
|         ("authentik_sources_saml", "0019_migrate_usersamlsourceconnection_identifier"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.RenameField( |  | ||||||
|             model_name="usersourceconnection", |  | ||||||
|             old_name="new_identifier", |  | ||||||
|             new_name="identifier", |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="usersourceconnection", |  | ||||||
|             index=models.Index(fields=["identifier"], name="authentik_c_identif_59226f_idx"), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="usersourceconnection", |  | ||||||
|             index=models.Index( |  | ||||||
|                 fields=["source", "identifier"], name="authentik_c_source__649e04_idx" |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,242 +0,0 @@ | |||||||
| # Generated by Django 5.0.11 on 2025-01-27 12:58 |  | ||||||
|  |  | ||||||
| import uuid |  | ||||||
| import pickle  # nosec |  | ||||||
| from django.core import signing |  | ||||||
| from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY |  | ||||||
| from django.db import migrations, models |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.conf import settings |  | ||||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX |  | ||||||
| from django.utils.timezone import now, timedelta |  | ||||||
| from authentik.lib.migrations import progress_bar |  | ||||||
| from authentik.root.middleware import ClientIPMiddleware |  | ||||||
|  |  | ||||||
|  |  | ||||||
| SESSION_CACHE_ALIAS = "default" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class PickleSerializer: |  | ||||||
|     """ |  | ||||||
|     Simple wrapper around pickle to be used in signing.dumps()/loads() and |  | ||||||
|     cache backends. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def __init__(self, protocol=None): |  | ||||||
|         self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol |  | ||||||
|  |  | ||||||
|     def dumps(self, obj): |  | ||||||
|         """Pickle data to be stored in redis""" |  | ||||||
|         return pickle.dumps(obj, self.protocol) |  | ||||||
|  |  | ||||||
|     def loads(self, data): |  | ||||||
|         """Unpickle data to be loaded from redis""" |  | ||||||
|         try: |  | ||||||
|             return pickle.loads(data)  # nosec |  | ||||||
|         except Exception: |  | ||||||
|             return {} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _migrate_session( |  | ||||||
|     apps, |  | ||||||
|     db_alias, |  | ||||||
|     session_key, |  | ||||||
|     session_data, |  | ||||||
|     expires, |  | ||||||
| ): |  | ||||||
|     Session = apps.get_model("authentik_core", "Session") |  | ||||||
|     OldAuthenticatedSession = apps.get_model("authentik_core", "OldAuthenticatedSession") |  | ||||||
|     AuthenticatedSession = apps.get_model("authentik_core", "AuthenticatedSession") |  | ||||||
|  |  | ||||||
|     old_auth_session = ( |  | ||||||
|         OldAuthenticatedSession.objects.using(db_alias).filter(session_key=session_key).first() |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     args = { |  | ||||||
|         "session_key": session_key, |  | ||||||
|         "expires": expires, |  | ||||||
|         "last_ip": ClientIPMiddleware.default_ip, |  | ||||||
|         "last_user_agent": "", |  | ||||||
|         "session_data": {}, |  | ||||||
|     } |  | ||||||
|     for k, v in session_data.items(): |  | ||||||
|         if k == "authentik/stages/user_login/last_ip": |  | ||||||
|             args["last_ip"] = v |  | ||||||
|         elif k in ["last_user_agent", "last_used"]: |  | ||||||
|             args[k] = v |  | ||||||
|         elif args in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY]: |  | ||||||
|             pass |  | ||||||
|         else: |  | ||||||
|             args["session_data"][k] = v |  | ||||||
|     if old_auth_session: |  | ||||||
|         args["last_user_agent"] = old_auth_session.last_user_agent |  | ||||||
|         args["last_used"] = old_auth_session.last_used |  | ||||||
|  |  | ||||||
|     args["session_data"] = pickle.dumps(args["session_data"]) |  | ||||||
|     session = Session.objects.using(db_alias).create(**args) |  | ||||||
|  |  | ||||||
|     if old_auth_session: |  | ||||||
|         AuthenticatedSession.objects.using(db_alias).create( |  | ||||||
|             session=session, |  | ||||||
|             user=old_auth_session.user, |  | ||||||
|             uuid=old_auth_session.uuid, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_redis_sessions(apps, schema_editor): |  | ||||||
|     from django.core.cache import caches |  | ||||||
|  |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|     cache = caches[SESSION_CACHE_ALIAS] |  | ||||||
|  |  | ||||||
|     # Not a redis cache, skipping |  | ||||||
|     if not hasattr(cache, "keys"): |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     print("\nMigrating Redis sessions to database, this might take a couple of minutes...") |  | ||||||
|     for key, session_data in progress_bar(cache.get_many(cache.keys(f"{KEY_PREFIX}*")).items()): |  | ||||||
|         _migrate_session( |  | ||||||
|             apps=apps, |  | ||||||
|             db_alias=db_alias, |  | ||||||
|             session_key=key.removeprefix(KEY_PREFIX), |  | ||||||
|             session_data=session_data, |  | ||||||
|             expires=now() + timedelta(seconds=cache.ttl(key)), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_database_sessions(apps, schema_editor): |  | ||||||
|     DjangoSession = apps.get_model("sessions", "Session") |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     print("\nMigration database sessions, this might take a couple of minutes...") |  | ||||||
|     for django_session in progress_bar(DjangoSession.objects.using(db_alias).all()): |  | ||||||
|         session_data = signing.loads( |  | ||||||
|             django_session.session_data, |  | ||||||
|             salt="django.contrib.sessions.SessionStore", |  | ||||||
|             serializer=PickleSerializer, |  | ||||||
|         ) |  | ||||||
|         _migrate_session( |  | ||||||
|             apps=apps, |  | ||||||
|             db_alias=db_alias, |  | ||||||
|             session_key=django_session.session_key, |  | ||||||
|             session_data=session_data, |  | ||||||
|             expires=django_session.expire_date, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("sessions", "0001_initial"), |  | ||||||
|         ("authentik_core", "0045_rename_new_identifier_usersourceconnection_identifier_and_more"), |  | ||||||
|         ("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"), |  | ||||||
|         ("authentik_providers_rac", "0006_connectiontoken_authentik_p_expires_91f148_idx_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         # Rename AuthenticatedSession to OldAuthenticatedSession |  | ||||||
|         migrations.RenameModel( |  | ||||||
|             old_name="AuthenticatedSession", |  | ||||||
|             new_name="OldAuthenticatedSession", |  | ||||||
|         ), |  | ||||||
|         migrations.RenameIndex( |  | ||||||
|             model_name="oldauthenticatedsession", |  | ||||||
|             new_name="authentik_c_expires_cf4f72_idx", |  | ||||||
|             old_name="authentik_c_expires_08251d_idx", |  | ||||||
|         ), |  | ||||||
|         migrations.RenameIndex( |  | ||||||
|             model_name="oldauthenticatedsession", |  | ||||||
|             new_name="authentik_c_expirin_c1f17f_idx", |  | ||||||
|             old_name="authentik_c_expirin_9cd839_idx", |  | ||||||
|         ), |  | ||||||
|         migrations.RenameIndex( |  | ||||||
|             model_name="oldauthenticatedsession", |  | ||||||
|             new_name="authentik_c_expirin_e04f5d_idx", |  | ||||||
|             old_name="authentik_c_expirin_195a84_idx", |  | ||||||
|         ), |  | ||||||
|         migrations.RenameIndex( |  | ||||||
|             model_name="oldauthenticatedsession", |  | ||||||
|             new_name="authentik_c_session_a44819_idx", |  | ||||||
|             old_name="authentik_c_session_d0f005_idx", |  | ||||||
|         ), |  | ||||||
|         migrations.RunSQL( |  | ||||||
|             sql="ALTER INDEX authentik_core_authenticatedsession_user_id_5055b6cf RENAME TO authentik_core_oldauthenticatedsession_user_id_5055b6cf", |  | ||||||
|             reverse_sql="ALTER INDEX authentik_core_oldauthenticatedsession_user_id_5055b6cf RENAME TO authentik_core_authenticatedsession_user_id_5055b6cf", |  | ||||||
|         ), |  | ||||||
|         # Create new Session and AuthenticatedSession models |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="Session", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "session_key", |  | ||||||
|                     models.CharField( |  | ||||||
|                         max_length=40, primary_key=True, serialize=False, verbose_name="session key" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("expires", models.DateTimeField(default=None, null=True)), |  | ||||||
|                 ("expiring", models.BooleanField(default=True)), |  | ||||||
|                 ("session_data", models.BinaryField(verbose_name="session data")), |  | ||||||
|                 ("last_ip", models.GenericIPAddressField()), |  | ||||||
|                 ("last_user_agent", models.TextField(blank=True)), |  | ||||||
|                 ("last_used", models.DateTimeField(auto_now=True)), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "default_permissions": [], |  | ||||||
|                 "verbose_name": "Session", |  | ||||||
|                 "verbose_name_plural": "Sessions", |  | ||||||
|             }, |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="session", |  | ||||||
|             index=models.Index(fields=["expires"], name="authentik_c_expires_d2f607_idx"), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="session", |  | ||||||
|             index=models.Index(fields=["expiring"], name="authentik_c_expirin_7c2cfb_idx"), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="session", |  | ||||||
|             index=models.Index( |  | ||||||
|                 fields=["expiring", "expires"], name="authentik_c_expirin_1ab2e4_idx" |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="session", |  | ||||||
|             index=models.Index( |  | ||||||
|                 fields=["expires", "session_key"], name="authentik_c_expires_c49143_idx" |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="AuthenticatedSession", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "session", |  | ||||||
|                     models.OneToOneField( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         primary_key=True, |  | ||||||
|                         serialize=False, |  | ||||||
|                         to="authentik_core.session", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("uuid", models.UUIDField(default=uuid.uuid4, unique=True)), |  | ||||||
|                 ( |  | ||||||
|                     "user", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "verbose_name": "Authenticated Session", |  | ||||||
|                 "verbose_name_plural": "Authenticated Sessions", |  | ||||||
|             }, |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=migrate_redis_sessions, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=migrate_database_sessions, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,18 +0,0 @@ | |||||||
| # Generated by Django 5.0.11 on 2025-01-27 13:02 |  | ||||||
|  |  | ||||||
| from django.db import migrations |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0046_session_and_more"), |  | ||||||
|         ("authentik_providers_rac", "0007_migrate_session"), |  | ||||||
|         ("authentik_providers_oauth2", "0028_migrate_session"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.DeleteModel( |  | ||||||
|             name="OldAuthenticatedSession", |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,103 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-14 11:15 |  | ||||||
|  |  | ||||||
| from django.apps.registry import Apps, apps as global_apps |  | ||||||
| from django.db import migrations |  | ||||||
| from django.contrib.contenttypes.management import create_contenttypes |  | ||||||
| from django.contrib.auth.management import create_permissions |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession""" |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the |  | ||||||
|     # real config for creating permissions and content types |  | ||||||
|     authentik_core_config = global_apps.get_app_config("authentik_core") |  | ||||||
|     # These are only ran by django after all migrations, but we need them right now. |  | ||||||
|     # `global_apps` is needed, |  | ||||||
|     create_permissions(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|     create_contenttypes(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|  |  | ||||||
|     # But from now on, this is just a regular migration, so use `apps` |  | ||||||
|     Permission = apps.get_model("auth", "Permission") |  | ||||||
|     ContentType = apps.get_model("contenttypes", "ContentType") |  | ||||||
|  |  | ||||||
|     try: |  | ||||||
|         old_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="oldauthenticatedsession" |  | ||||||
|         ) |  | ||||||
|         new_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="authenticatedsession" |  | ||||||
|         ) |  | ||||||
|     except ContentType.DoesNotExist: |  | ||||||
|         # This should exist at this point, but if not, let's cut our losses |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     # Get all permissions for the old content type |  | ||||||
|     old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct) |  | ||||||
|  |  | ||||||
|     # Create equivalent permissions for the new content type |  | ||||||
|     for old_perm in old_perms: |  | ||||||
|         new_perm = ( |  | ||||||
|             Permission.objects.using(db_alias) |  | ||||||
|             .filter( |  | ||||||
|                 content_type=new_ct, |  | ||||||
|                 codename=old_perm.codename, |  | ||||||
|             ) |  | ||||||
|             .first() |  | ||||||
|         ) |  | ||||||
|         if not new_perm: |  | ||||||
|             # This should exist at this point, but if not, let's cut our losses |  | ||||||
|             continue |  | ||||||
|  |  | ||||||
|         # Global user permissions |  | ||||||
|         User = apps.get_model("authentik_core", "User") |  | ||||||
|         User.user_permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Global role permissions |  | ||||||
|         DjangoGroup = apps.get_model("auth", "Group") |  | ||||||
|         DjangoGroup.permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Object user permissions |  | ||||||
|         UserObjectPermission = apps.get_model("guardian", "UserObjectPermission") |  | ||||||
|         UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # Object role permissions |  | ||||||
|         GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission") |  | ||||||
|         GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def remove_old_authenticated_session_content_type( |  | ||||||
|     apps: Apps, schema_editor: BaseDatabaseSchemaEditor |  | ||||||
| ): |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|     ContentType = apps.get_model("contenttypes", "ContentType") |  | ||||||
|  |  | ||||||
|     ContentType.objects.using(db_alias).filter(model="oldauthenticatedsession").delete() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0047_delete_oldauthenticatedsession"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=migrate_authenticated_session_permissions, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=remove_old_authenticated_session_content_type, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,7 +1,6 @@ | |||||||
| """authentik core models""" | """authentik core models""" | ||||||
|  |  | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from enum import StrEnum |  | ||||||
| from hashlib import sha256 | from hashlib import sha256 | ||||||
| from typing import Any, Optional, Self | from typing import Any, Optional, Self | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
| @ -10,7 +9,6 @@ from deepmerge import always_merger | |||||||
| from django.contrib.auth.hashers import check_password | from django.contrib.auth.hashers import check_password | ||||||
| from django.contrib.auth.models import AbstractUser | from django.contrib.auth.models import AbstractUser | ||||||
| from django.contrib.auth.models import UserManager as DjangoUserManager | from django.contrib.auth.models import UserManager as DjangoUserManager | ||||||
| from django.contrib.sessions.base_session import AbstractBaseSession |  | ||||||
| from django.db import models | from django.db import models | ||||||
| from django.db.models import Q, QuerySet, options | from django.db.models import Q, QuerySet, options | ||||||
| from django.db.models.constants import LOOKUP_SEP | from django.db.models.constants import LOOKUP_SEP | ||||||
| @ -648,30 +646,19 @@ class SourceUserMatchingModes(models.TextChoices): | |||||||
|     """Different modes a source can handle new/returning users""" |     """Different modes a source can handle new/returning users""" | ||||||
|  |  | ||||||
|     IDENTIFIER = "identifier", _("Use the source-specific identifier") |     IDENTIFIER = "identifier", _("Use the source-specific identifier") | ||||||
|     EMAIL_LINK = ( |     EMAIL_LINK = "email_link", _( | ||||||
|         "email_link", |  | ||||||
|         _( |  | ||||||
|         "Link to a user with identical email address. Can have security implications " |         "Link to a user with identical email address. Can have security implications " | ||||||
|         "when a source doesn't validate email addresses." |         "when a source doesn't validate email addresses." | ||||||
|         ), |  | ||||||
|     ) |     ) | ||||||
|     EMAIL_DENY = ( |     EMAIL_DENY = "email_deny", _( | ||||||
|         "email_deny", |         "Use the user's email address, but deny enrollment when the email address already exists." | ||||||
|         _( |  | ||||||
|             "Use the user's email address, but deny enrollment when the email address already " |  | ||||||
|             "exists." |  | ||||||
|         ), |  | ||||||
|     ) |     ) | ||||||
|     USERNAME_LINK = ( |     USERNAME_LINK = "username_link", _( | ||||||
|         "username_link", |  | ||||||
|         _( |  | ||||||
|         "Link to a user with identical username. Can have security implications " |         "Link to a user with identical username. Can have security implications " | ||||||
|         "when a username is used with another source." |         "when a username is used with another source." | ||||||
|         ), |  | ||||||
|     ) |     ) | ||||||
|     USERNAME_DENY = ( |     USERNAME_DENY = "username_deny", _( | ||||||
|         "username_deny", |         "Use the user's username, but deny enrollment when the username already exists." | ||||||
|         _("Use the user's username, but deny enrollment when the username already exists."), |  | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -679,24 +666,18 @@ class SourceGroupMatchingModes(models.TextChoices): | |||||||
|     """Different modes a source can handle new/returning groups""" |     """Different modes a source can handle new/returning groups""" | ||||||
|  |  | ||||||
|     IDENTIFIER = "identifier", _("Use the source-specific identifier") |     IDENTIFIER = "identifier", _("Use the source-specific identifier") | ||||||
|     NAME_LINK = ( |     NAME_LINK = "name_link", _( | ||||||
|         "name_link", |  | ||||||
|         _( |  | ||||||
|         "Link to a group with identical name. Can have security implications " |         "Link to a group with identical name. Can have security implications " | ||||||
|         "when a group name is used with another source." |         "when a group name is used with another source." | ||||||
|         ), |  | ||||||
|     ) |     ) | ||||||
|     NAME_DENY = ( |     NAME_DENY = "name_deny", _( | ||||||
|         "name_deny", |         "Use the group name, but deny enrollment when the name already exists." | ||||||
|         _("Use the group name, but deny enrollment when the name already exists."), |  | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Source(ManagedModel, SerializerModel, PolicyBindingModel): | class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||||
|     """Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server""" |     """Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server""" | ||||||
|  |  | ||||||
|     MANAGED_INBUILT = "goauthentik.io/sources/inbuilt" |  | ||||||
|  |  | ||||||
|     name = models.TextField(help_text=_("Source's display Name.")) |     name = models.TextField(help_text=_("Source's display Name.")) | ||||||
|     slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True) |     slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True) | ||||||
|  |  | ||||||
| @ -747,7 +728,8 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|         choices=SourceGroupMatchingModes.choices, |         choices=SourceGroupMatchingModes.choices, | ||||||
|         default=SourceGroupMatchingModes.IDENTIFIER, |         default=SourceGroupMatchingModes.IDENTIFIER, | ||||||
|         help_text=_( |         help_text=_( | ||||||
|             "How the source determines if an existing group should be used or a new group created." |             "How the source determines if an existing group should be used or " | ||||||
|  |             "a new group created." | ||||||
|         ), |         ), | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
| @ -777,17 +759,11 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|     @property |     @property | ||||||
|     def component(self) -> str: |     def component(self) -> str: | ||||||
|         """Return component used to edit this object""" |         """Return component used to edit this object""" | ||||||
|         if self.managed == self.MANAGED_INBUILT: |  | ||||||
|             return "" |  | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def property_mapping_type(self) -> "type[PropertyMapping]": |     def property_mapping_type(self) -> "type[PropertyMapping]": | ||||||
|         """Return property mapping type used by this object""" |         """Return property mapping type used by this object""" | ||||||
|         if self.managed == self.MANAGED_INBUILT: |  | ||||||
|             from authentik.core.models import PropertyMapping |  | ||||||
|  |  | ||||||
|             return PropertyMapping |  | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     def ui_login_button(self, request: HttpRequest) -> UILoginButton | None: |     def ui_login_button(self, request: HttpRequest) -> UILoginButton | None: | ||||||
| @ -802,14 +778,10 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|  |  | ||||||
|     def get_base_user_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: |     def get_base_user_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: | ||||||
|         """Get base properties for a user to build final properties upon.""" |         """Get base properties for a user to build final properties upon.""" | ||||||
|         if self.managed == self.MANAGED_INBUILT: |  | ||||||
|             return {} |  | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     def get_base_group_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: |     def get_base_group_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: | ||||||
|         """Get base properties for a group to build final properties upon.""" |         """Get base properties for a group to build final properties upon.""" | ||||||
|         if self.managed == self.MANAGED_INBUILT: |  | ||||||
|             return {} |  | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     def __str__(self): |     def __str__(self): | ||||||
| @ -840,7 +812,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | |||||||
|  |  | ||||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) |     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||||
|     source = models.ForeignKey(Source, on_delete=models.CASCADE) |     source = models.ForeignKey(Source, on_delete=models.CASCADE) | ||||||
|     identifier = models.TextField() |  | ||||||
|  |  | ||||||
|     objects = InheritanceManager() |     objects = InheritanceManager() | ||||||
|  |  | ||||||
| @ -854,10 +825,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | |||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         unique_together = (("user", "source"),) |         unique_together = (("user", "source"),) | ||||||
|         indexes = ( |  | ||||||
|             models.Index(fields=("identifier",)), |  | ||||||
|             models.Index(fields=("source", "identifier")), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupSourceConnection(SerializerModel, CreatedUpdatedModel): | class GroupSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||||
| @ -1028,75 +995,45 @@ class PropertyMapping(SerializerModel, ManagedModel): | |||||||
|         verbose_name_plural = _("Property Mappings") |         verbose_name_plural = _("Property Mappings") | ||||||
|  |  | ||||||
|  |  | ||||||
| class Session(ExpiringModel, AbstractBaseSession): | class AuthenticatedSession(ExpiringModel): | ||||||
|     """User session with extra fields for fast access""" |     """Additional session class for authenticated users. Augments the standard django session | ||||||
|  |     to achieve the following: | ||||||
|  |         - Make it queryable by user | ||||||
|  |         - Have a direct connection to user objects | ||||||
|  |         - Allow users to view their own sessions and terminate them | ||||||
|  |         - Save structured and well-defined information. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|     # Remove upstream field because we're using our own ExpiringModel |     uuid = models.UUIDField(default=uuid4, primary_key=True) | ||||||
|     expire_date = None |  | ||||||
|     session_data = models.BinaryField(_("session data")) |  | ||||||
|  |  | ||||||
|     # Keep in sync with Session.Keys |     session_key = models.CharField(max_length=40) | ||||||
|     last_ip = models.GenericIPAddressField() |     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||||
|  |  | ||||||
|  |     last_ip = models.TextField() | ||||||
|     last_user_agent = models.TextField(blank=True) |     last_user_agent = models.TextField(blank=True) | ||||||
|     last_used = models.DateTimeField(auto_now=True) |     last_used = models.DateTimeField(auto_now=True) | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _("Session") |  | ||||||
|         verbose_name_plural = _("Sessions") |  | ||||||
|         indexes = ExpiringModel.Meta.indexes + [ |  | ||||||
|             models.Index(fields=["expires", "session_key"]), |  | ||||||
|         ] |  | ||||||
|         default_permissions = [] |  | ||||||
|  |  | ||||||
|     def __str__(self): |  | ||||||
|         return self.session_key |  | ||||||
|  |  | ||||||
|     class Keys(StrEnum): |  | ||||||
|         """ |  | ||||||
|         Keys to be set with the session interface for the fields above to be updated. |  | ||||||
|  |  | ||||||
|         If a field is added here that needs to be initialized when the session is initialized, |  | ||||||
|         it must also be reflected in authentik.root.middleware.SessionMiddleware.process_request |  | ||||||
|         and in authentik.core.sessions.SessionStore.__init__ |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         LAST_IP = "last_ip" |  | ||||||
|         LAST_USER_AGENT = "last_user_agent" |  | ||||||
|         LAST_USED = "last_used" |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def get_session_store_class(cls): |  | ||||||
|         from authentik.core.sessions import SessionStore |  | ||||||
|  |  | ||||||
|         return SessionStore |  | ||||||
|  |  | ||||||
|     def get_decoded(self): |  | ||||||
|         raise NotImplementedError |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthenticatedSession(SerializerModel): |  | ||||||
|     session = models.OneToOneField(Session, on_delete=models.CASCADE, primary_key=True) |  | ||||||
|     # We use the session as primary key, but we need the API to be able to reference |  | ||||||
|     # this object uniquely without exposing the session key |  | ||||||
|     uuid = models.UUIDField(default=uuid4, unique=True) |  | ||||||
|  |  | ||||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) |  | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("Authenticated Session") |         verbose_name = _("Authenticated Session") | ||||||
|         verbose_name_plural = _("Authenticated Sessions") |         verbose_name_plural = _("Authenticated Sessions") | ||||||
|  |         indexes = ExpiringModel.Meta.indexes + [ | ||||||
|  |             models.Index(fields=["session_key"]), | ||||||
|  |         ] | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return f"Authenticated Session {str(self.pk)[:10]}" |         return f"Authenticated Session {self.session_key[:10]}" | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]: |     def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]: | ||||||
|         """Create a new session from a http request""" |         """Create a new session from a http request""" | ||||||
|         if not hasattr(request, "session") or not request.session.exists( |         from authentik.root.middleware import ClientIPMiddleware | ||||||
|             request.session.session_key |  | ||||||
|         ): |         if not hasattr(request, "session") or not request.session.session_key: | ||||||
|             return None |             return None | ||||||
|         return AuthenticatedSession( |         return AuthenticatedSession( | ||||||
|             session=Session.objects.filter(session_key=request.session.session_key).first(), |             session_key=request.session.session_key, | ||||||
|             user=user, |             user=user, | ||||||
|  |             last_ip=ClientIPMiddleware.get_client_ip(request), | ||||||
|  |             last_user_agent=request.META.get("HTTP_USER_AGENT", ""), | ||||||
|  |             expires=request.session.get_expiry_date(), | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -1,168 +0,0 @@ | |||||||
| """authentik sessions engine""" |  | ||||||
|  |  | ||||||
| import pickle  # nosec |  | ||||||
|  |  | ||||||
| from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY |  | ||||||
| from django.contrib.sessions.backends.db import SessionStore as SessionBase |  | ||||||
| from django.core.exceptions import SuspiciousOperation |  | ||||||
| from django.utils import timezone |  | ||||||
| from django.utils.functional import cached_property |  | ||||||
| from structlog.stdlib import get_logger |  | ||||||
|  |  | ||||||
| from authentik.root.middleware import ClientIPMiddleware |  | ||||||
|  |  | ||||||
| LOGGER = get_logger() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SessionStore(SessionBase): |  | ||||||
|     def __init__(self, session_key=None, last_ip=None, last_user_agent=""): |  | ||||||
|         super().__init__(session_key) |  | ||||||
|         self._create_kwargs = { |  | ||||||
|             "last_ip": last_ip or ClientIPMiddleware.default_ip, |  | ||||||
|             "last_user_agent": last_user_agent, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def get_model_class(cls): |  | ||||||
|         from authentik.core.models import Session |  | ||||||
|  |  | ||||||
|         return Session |  | ||||||
|  |  | ||||||
|     @cached_property |  | ||||||
|     def model_fields(self): |  | ||||||
|         return [k.value for k in self.model.Keys] |  | ||||||
|  |  | ||||||
|     def _get_session_from_db(self): |  | ||||||
|         try: |  | ||||||
|             return ( |  | ||||||
|                 self.model.objects.select_related( |  | ||||||
|                     "authenticatedsession", |  | ||||||
|                     "authenticatedsession__user", |  | ||||||
|                 ) |  | ||||||
|                 .prefetch_related( |  | ||||||
|                     "authenticatedsession__user__groups", |  | ||||||
|                     "authenticatedsession__user__user_permissions", |  | ||||||
|                 ) |  | ||||||
|                 .get( |  | ||||||
|                     session_key=self.session_key, |  | ||||||
|                     expires__gt=timezone.now(), |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|         except (self.model.DoesNotExist, SuspiciousOperation) as exc: |  | ||||||
|             if isinstance(exc, SuspiciousOperation): |  | ||||||
|                 LOGGER.warning(str(exc)) |  | ||||||
|             self._session_key = None |  | ||||||
|  |  | ||||||
|     async def _aget_session_from_db(self): |  | ||||||
|         try: |  | ||||||
|             return ( |  | ||||||
|                 await self.model.objects.select_related( |  | ||||||
|                     "authenticatedsession", |  | ||||||
|                     "authenticatedsession__user", |  | ||||||
|                 ) |  | ||||||
|                 .prefetch_related( |  | ||||||
|                     "authenticatedsession__user__groups", |  | ||||||
|                     "authenticatedsession__user__user_permissions", |  | ||||||
|                 ) |  | ||||||
|                 .aget( |  | ||||||
|                     session_key=self.session_key, |  | ||||||
|                     expires__gt=timezone.now(), |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|         except (self.model.DoesNotExist, SuspiciousOperation) as exc: |  | ||||||
|             if isinstance(exc, SuspiciousOperation): |  | ||||||
|                 LOGGER.warning(str(exc)) |  | ||||||
|             self._session_key = None |  | ||||||
|  |  | ||||||
|     def encode(self, session_dict): |  | ||||||
|         return pickle.dumps(session_dict, protocol=pickle.HIGHEST_PROTOCOL) |  | ||||||
|  |  | ||||||
|     def decode(self, session_data): |  | ||||||
|         try: |  | ||||||
|             return pickle.loads(session_data)  # nosec |  | ||||||
|         except pickle.PickleError: |  | ||||||
|             # ValueError, unpickling exceptions. If any of these happen, just return an empty |  | ||||||
|             # dictionary (an empty session) |  | ||||||
|             pass |  | ||||||
|         return {} |  | ||||||
|  |  | ||||||
|     def load(self): |  | ||||||
|         s = self._get_session_from_db() |  | ||||||
|         if s: |  | ||||||
|             return { |  | ||||||
|                 "authenticatedsession": getattr(s, "authenticatedsession", None), |  | ||||||
|                 **{k: getattr(s, k) for k in self.model_fields}, |  | ||||||
|                 **self.decode(s.session_data), |  | ||||||
|             } |  | ||||||
|         else: |  | ||||||
|             return {} |  | ||||||
|  |  | ||||||
|     async def aload(self): |  | ||||||
|         s = await self._aget_session_from_db() |  | ||||||
|         if s: |  | ||||||
|             return { |  | ||||||
|                 "authenticatedsession": getattr(s, "authenticatedsession", None), |  | ||||||
|                 **{k: getattr(s, k) for k in self.model_fields}, |  | ||||||
|                 **self.decode(s.session_data), |  | ||||||
|             } |  | ||||||
|         else: |  | ||||||
|             return {} |  | ||||||
|  |  | ||||||
|     def create_model_instance(self, data): |  | ||||||
|         args = { |  | ||||||
|             "session_key": self._get_or_create_session_key(), |  | ||||||
|             "expires": self.get_expiry_date(), |  | ||||||
|             "session_data": {}, |  | ||||||
|             **self._create_kwargs, |  | ||||||
|         } |  | ||||||
|         for k, v in data.items(): |  | ||||||
|             # Don't save: |  | ||||||
|             # - unused auth data |  | ||||||
|             # - related models |  | ||||||
|             if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]: |  | ||||||
|                 pass |  | ||||||
|             elif k in self.model_fields: |  | ||||||
|                 args[k] = v |  | ||||||
|             else: |  | ||||||
|                 args["session_data"][k] = v |  | ||||||
|         args["session_data"] = self.encode(args["session_data"]) |  | ||||||
|         return self.model(**args) |  | ||||||
|  |  | ||||||
|     async def acreate_model_instance(self, data): |  | ||||||
|         args = { |  | ||||||
|             "session_key": await self._aget_or_create_session_key(), |  | ||||||
|             "expires": await self.aget_expiry_date(), |  | ||||||
|             "session_data": {}, |  | ||||||
|             **self._create_kwargs, |  | ||||||
|         } |  | ||||||
|         for k, v in data.items(): |  | ||||||
|             # Don't save: |  | ||||||
|             # - unused auth data |  | ||||||
|             # - related models |  | ||||||
|             if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]: |  | ||||||
|                 pass |  | ||||||
|             elif k in self.model_fields: |  | ||||||
|                 args[k] = v |  | ||||||
|             else: |  | ||||||
|                 args["session_data"][k] = v |  | ||||||
|         args["session_data"] = self.encode(args["session_data"]) |  | ||||||
|         return self.model(**args) |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def clear_expired(cls): |  | ||||||
|         cls.get_model_class().objects.filter(expires__lt=timezone.now()).delete() |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     async def aclear_expired(cls): |  | ||||||
|         await cls.get_model_class().objects.filter(expires__lt=timezone.now()).adelete() |  | ||||||
|  |  | ||||||
|     def cycle_key(self): |  | ||||||
|         data = self._session |  | ||||||
|         key = self.session_key |  | ||||||
|         self.create() |  | ||||||
|         self._session_cache = data |  | ||||||
|         if key: |  | ||||||
|             self.delete(key) |  | ||||||
|         if (authenticated_session := data.get("authenticatedsession")) is not None: |  | ||||||
|             authenticated_session.session_id = self.session_key |  | ||||||
|             authenticated_session.save(force_insert=True) |  | ||||||
| @ -1,10 +1,11 @@ | |||||||
| """authentik core signals""" | """authentik core signals""" | ||||||
|  |  | ||||||
| from django.contrib.auth.signals import user_logged_in | from django.contrib.auth.signals import user_logged_in, user_logged_out | ||||||
|  | from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.core.signals import Signal | from django.core.signals import Signal | ||||||
| from django.db.models import Model | from django.db.models import Model | ||||||
| from django.db.models.signals import post_delete, post_save, pre_save | from django.db.models.signals import post_save, pre_delete, pre_save | ||||||
| from django.dispatch import receiver | from django.dispatch import receiver | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| @ -14,7 +15,6 @@ from authentik.core.models import ( | |||||||
|     AuthenticatedSession, |     AuthenticatedSession, | ||||||
|     BackchannelProvider, |     BackchannelProvider, | ||||||
|     ExpiringModel, |     ExpiringModel, | ||||||
|     Session, |  | ||||||
|     User, |     User, | ||||||
|     default_token_duration, |     default_token_duration, | ||||||
| ) | ) | ||||||
| @ -49,10 +49,19 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_): | |||||||
|         session.save() |         session.save() | ||||||
|  |  | ||||||
|  |  | ||||||
| @receiver(post_delete, sender=AuthenticatedSession) | @receiver(user_logged_out) | ||||||
|  | def user_logged_out_session(sender, request: HttpRequest, user: User, **_): | ||||||
|  |     """Delete AuthenticatedSession if it exists""" | ||||||
|  |     if not request.session or not request.session.session_key: | ||||||
|  |         return | ||||||
|  |     AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @receiver(pre_delete, sender=AuthenticatedSession) | ||||||
| def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_): | def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_): | ||||||
|     """Delete session when authenticated session is deleted""" |     """Delete session when authenticated session is deleted""" | ||||||
|     Session.objects.filter(session_key=instance.pk).delete() |     cache_key = f"{KEY_PREFIX}{instance.session_key}" | ||||||
|  |     cache.delete(cache_key) | ||||||
|  |  | ||||||
|  |  | ||||||
| @receiver(pre_save) | @receiver(pre_save) | ||||||
|  | |||||||
| @ -48,7 +48,6 @@ LOGGER = get_logger() | |||||||
|  |  | ||||||
| PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | ||||||
| SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages" | SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages" | ||||||
| SESSION_KEY_SOURCE_FLOW_CONTEXT = "authentik/flows/source_flow_context" |  | ||||||
| SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -262,7 +261,6 @@ class SourceFlowManager: | |||||||
|                 plan.append_stage(stage) |                 plan.append_stage(stage) | ||||||
|         for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []): |         for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []): | ||||||
|             plan.append_stage(stage) |             plan.append_stage(stage) | ||||||
|         plan.context.update(self.request.session.get(SESSION_KEY_SOURCE_FLOW_CONTEXT, {})) |  | ||||||
|         return plan.to_redirect(self.request, flow) |         return plan.to_redirect(self.request, flow) | ||||||
|  |  | ||||||
|     def handle_auth( |     def handle_auth( | ||||||
|  | |||||||
| @ -2,16 +2,22 @@ | |||||||
|  |  | ||||||
| from datetime import datetime, timedelta | from datetime import datetime, timedelta | ||||||
|  |  | ||||||
|  | from django.conf import ImproperlyConfigured | ||||||
|  | from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
|  | from django.contrib.sessions.backends.db import SessionStore as DBSessionStore | ||||||
|  | from django.core.cache import cache | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.core.models import ( | from authentik.core.models import ( | ||||||
|     USER_ATTRIBUTE_EXPIRES, |     USER_ATTRIBUTE_EXPIRES, | ||||||
|     USER_ATTRIBUTE_GENERATED, |     USER_ATTRIBUTE_GENERATED, | ||||||
|  |     AuthenticatedSession, | ||||||
|     ExpiringModel, |     ExpiringModel, | ||||||
|     User, |     User, | ||||||
| ) | ) | ||||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||||
|  | from authentik.lib.config import CONFIG | ||||||
| from authentik.root.celery import CELERY_APP | from authentik.root.celery import CELERY_APP | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| @ -32,6 +38,40 @@ def clean_expired_models(self: SystemTask): | |||||||
|             obj.expire_action() |             obj.expire_action() | ||||||
|         LOGGER.debug("Expired models", model=cls, amount=amount) |         LOGGER.debug("Expired models", model=cls, amount=amount) | ||||||
|         messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}") |         messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}") | ||||||
|  |     # Special case | ||||||
|  |     amount = 0 | ||||||
|  |  | ||||||
|  |     for session in AuthenticatedSession.objects.all(): | ||||||
|  |         match CONFIG.get("session_storage", "cache"): | ||||||
|  |             case "cache": | ||||||
|  |                 cache_key = f"{KEY_PREFIX}{session.session_key}" | ||||||
|  |                 value = None | ||||||
|  |                 try: | ||||||
|  |                     value = cache.get(cache_key) | ||||||
|  |  | ||||||
|  |                 except Exception as exc: | ||||||
|  |                     LOGGER.debug("Failed to get session from cache", exc=exc) | ||||||
|  |                 if not value: | ||||||
|  |                     session.delete() | ||||||
|  |                     amount += 1 | ||||||
|  |             case "db": | ||||||
|  |                 if not ( | ||||||
|  |                     DBSessionStore.get_model_class() | ||||||
|  |                     .objects.filter(session_key=session.session_key, expire_date__gt=now()) | ||||||
|  |                     .exists() | ||||||
|  |                 ): | ||||||
|  |                     session.delete() | ||||||
|  |                     amount += 1 | ||||||
|  |             case _: | ||||||
|  |                 # Should never happen, as we check for other values in authentik/root/settings.py | ||||||
|  |                 raise ImproperlyConfigured( | ||||||
|  |                     "Invalid session_storage setting, allowed values are db and cache" | ||||||
|  |                 ) | ||||||
|  |     if CONFIG.get("session_storage", "cache") == "db": | ||||||
|  |         DBSessionStore.clear_expired() | ||||||
|  |     LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount) | ||||||
|  |  | ||||||
|  |     messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}") | ||||||
|     self.set_status(TaskStatus.SUCCESSFUL, *messages) |     self.set_status(TaskStatus.SUCCESSFUL, *messages) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -11,7 +11,6 @@ | |||||||
|         build: "{{ build }}", |         build: "{{ build }}", | ||||||
|         api: { |         api: { | ||||||
|             base: "{{ base_url }}", |             base: "{{ base_url }}", | ||||||
|             relBase: "{{ base_url_rel }}", |  | ||||||
|         }, |         }, | ||||||
|     }; |     }; | ||||||
|     window.addEventListener("DOMContentLoaded", function () { |     window.addEventListener("DOMContentLoaded", function () { | ||||||
|  | |||||||
| @ -16,14 +16,12 @@ | |||||||
|         {% block head_before %} |         {% block head_before %} | ||||||
|         {% endblock %} |         {% endblock %} | ||||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> |         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> | ||||||
|         <style>{{ brand.branding_custom_css }}</style> |         <link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject> | ||||||
|         <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> |         <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> | ||||||
|         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> |         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> | ||||||
|         {% block head %} |         {% block head %} | ||||||
|         {% endblock %} |         {% endblock %} | ||||||
|         {% for key, value in html_meta.items %} |         <meta name="sentry-trace" content="{{ sentry_trace }}" /> | ||||||
|         <meta name="{{key}}" content="{{ value }}" /> |  | ||||||
|         {% endfor %} |  | ||||||
|     </head> |     </head> | ||||||
|     <body> |     <body> | ||||||
|         {% block body %} |         {% block body %} | ||||||
|  | |||||||
| @ -4,7 +4,7 @@ | |||||||
| {% load i18n %} | {% load i18n %} | ||||||
|  |  | ||||||
| {% block head_before %} | {% block head_before %} | ||||||
| <link rel="prefetch" href="{{ request.brand.branding_default_flow_background_url }}" /> | <link rel="prefetch" href="{% static 'dist/assets/images/flow_background.jpg' %}" /> | ||||||
| <link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}"> | <link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}"> | ||||||
| <link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)"> | <link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)"> | ||||||
| {% include "base/header_js.html" %} | {% include "base/header_js.html" %} | ||||||
| @ -13,7 +13,7 @@ | |||||||
| {% block head %} | {% block head %} | ||||||
| <style> | <style> | ||||||
| :root { | :root { | ||||||
|     --ak-flow-background: url("{{ request.brand.branding_default_flow_background_url }}"); |     --ak-flow-background: url("{% static 'dist/assets/images/flow_background.jpg' %}"); | ||||||
|     --pf-c-background-image--BackgroundImage: var(--ak-flow-background); |     --pf-c-background-image--BackgroundImage: var(--ak-flow-background); | ||||||
|     --pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background); |     --pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background); | ||||||
|     --pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background); |     --pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background); | ||||||
|  | |||||||
| @ -1,17 +1,9 @@ | |||||||
| """Test API Utils""" | """Test API Utils""" | ||||||
|  |  | ||||||
| from rest_framework.exceptions import ValidationError | from rest_framework.exceptions import ValidationError | ||||||
| from rest_framework.serializers import ( |  | ||||||
|     HyperlinkedModelSerializer, |  | ||||||
| ) |  | ||||||
| from rest_framework.serializers import ( |  | ||||||
|     ModelSerializer as BaseModelSerializer, |  | ||||||
| ) |  | ||||||
| from rest_framework.test import APITestCase | from rest_framework.test import APITestCase | ||||||
|  |  | ||||||
| from authentik.core.api.utils import ModelSerializer as CustomModelSerializer |  | ||||||
| from authentik.core.api.utils import is_dict | from authentik.core.api.utils import is_dict | ||||||
| from authentik.lib.utils.reflection import all_subclasses |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestAPIUtils(APITestCase): | class TestAPIUtils(APITestCase): | ||||||
| @ -22,14 +14,3 @@ class TestAPIUtils(APITestCase): | |||||||
|         self.assertIsNone(is_dict({})) |         self.assertIsNone(is_dict({})) | ||||||
|         with self.assertRaises(ValidationError): |         with self.assertRaises(ValidationError): | ||||||
|             is_dict("foo") |             is_dict("foo") | ||||||
|  |  | ||||||
|     def test_all_serializers_descend_from_custom(self): |  | ||||||
|         """Test that every serializer we define descends from our own ModelSerializer""" |  | ||||||
|         # Weirdly, there's only one serializer in `rest_framework` which descends from |  | ||||||
|         # ModelSerializer: HyperlinkedModelSerializer |  | ||||||
|         expected = {CustomModelSerializer, HyperlinkedModelSerializer} |  | ||||||
|         actual = set(all_subclasses(BaseModelSerializer)) - set( |  | ||||||
|             all_subclasses(CustomModelSerializer) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         self.assertEqual(expected, actual) |  | ||||||
|  | |||||||
| @ -5,7 +5,7 @@ from json import loads | |||||||
| from django.urls.base import reverse | from django.urls.base import reverse | ||||||
| from rest_framework.test import APITestCase | from rest_framework.test import APITestCase | ||||||
|  |  | ||||||
| from authentik.core.models import AuthenticatedSession, Session, User | from authentik.core.models import User | ||||||
| from authentik.core.tests.utils import create_test_admin_user | from authentik.core.tests.utils import create_test_admin_user | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -30,18 +30,3 @@ class TestAuthenticatedSessionsAPI(APITestCase): | |||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         body = loads(response.content.decode()) |         body = loads(response.content.decode()) | ||||||
|         self.assertEqual(body["pagination"]["count"], 1) |         self.assertEqual(body["pagination"]["count"], 1) | ||||||
|  |  | ||||||
|     def test_delete(self): |  | ||||||
|         """Test deletion""" |  | ||||||
|         self.client.force_login(self.user) |  | ||||||
|         self.assertEqual(AuthenticatedSession.objects.all().count(), 1) |  | ||||||
|         self.assertEqual(Session.objects.all().count(), 1) |  | ||||||
|         response = self.client.delete( |  | ||||||
|             reverse( |  | ||||||
|                 "authentik_api:authenticatedsession-detail", |  | ||||||
|                 kwargs={"uuid": AuthenticatedSession.objects.first().uuid}, |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(response.status_code, 204) |  | ||||||
|         self.assertEqual(AuthenticatedSession.objects.all().count(), 0) |  | ||||||
|         self.assertEqual(Session.objects.all().count(), 0) |  | ||||||
|  | |||||||
| @ -124,16 +124,6 @@ class TestGroupsAPI(APITestCase): | |||||||
|             {"is_superuser": ["User does not have permission to set superuser status to True."]}, |             {"is_superuser": ["User does not have permission to set superuser status to True."]}, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_superuser_no_perm_no_superuser(self): |  | ||||||
|         """Test creating a group without permission and without superuser flag""" |  | ||||||
|         assign_perm("authentik_core.add_group", self.login_user) |  | ||||||
|         self.client.force_login(self.login_user) |  | ||||||
|         res = self.client.post( |  | ||||||
|             reverse("authentik_api:group-list"), |  | ||||||
|             data={"name": generate_id(), "is_superuser": False}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 201) |  | ||||||
|  |  | ||||||
|     def test_superuser_update_no_perm(self): |     def test_superuser_update_no_perm(self): | ||||||
|         """Test updating a superuser group without permission""" |         """Test updating a superuser group without permission""" | ||||||
|         group = Group.objects.create(name=generate_id(), is_superuser=True) |         group = Group.objects.create(name=generate_id(), is_superuser=True) | ||||||
|  | |||||||
| @ -1,19 +0,0 @@ | |||||||
| from django.apps import apps |  | ||||||
| from django.urls import reverse |  | ||||||
| from rest_framework.test import APITestCase |  | ||||||
|  |  | ||||||
| from authentik.core.tests.utils import create_test_admin_user |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestSourceAPI(APITestCase): |  | ||||||
|     def setUp(self) -> None: |  | ||||||
|         self.user = create_test_admin_user() |  | ||||||
|         self.client.force_login(self.user) |  | ||||||
|  |  | ||||||
|     def test_builtin_source_used_by(self): |  | ||||||
|         """Test Providers's types endpoint""" |  | ||||||
|         apps.get_app_config("authentik_core").source_inbuilt() |  | ||||||
|         response = self.client.get( |  | ||||||
|             reverse("authentik_api:source-used-by", kwargs={"slug": "authentik-built-in"}), |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(response.status_code, 200) |  | ||||||
| @ -13,10 +13,7 @@ from authentik.core.models import ( | |||||||
|     TokenIntents, |     TokenIntents, | ||||||
|     User, |     User, | ||||||
| ) | ) | ||||||
| from authentik.core.tasks import ( | from authentik.core.tasks import clean_expired_models, clean_temporary_users | ||||||
|     clean_expired_models, |  | ||||||
|     clean_temporary_users, |  | ||||||
| ) |  | ||||||
| from authentik.core.tests.utils import create_test_admin_user | from authentik.core.tests.utils import create_test_admin_user | ||||||
| from authentik.lib.generators import generate_id | from authentik.lib.generators import generate_id | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,8 +1,9 @@ | |||||||
| """Test Users API""" | """Test Users API""" | ||||||
|  |  | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from json import loads |  | ||||||
|  |  | ||||||
|  | from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
|  | from django.core.cache import cache | ||||||
| from django.urls.base import reverse | from django.urls.base import reverse | ||||||
| from rest_framework.test import APITestCase | from rest_framework.test import APITestCase | ||||||
|  |  | ||||||
| @ -10,17 +11,11 @@ from authentik.brands.models import Brand | |||||||
| from authentik.core.models import ( | from authentik.core.models import ( | ||||||
|     USER_ATTRIBUTE_TOKEN_EXPIRING, |     USER_ATTRIBUTE_TOKEN_EXPIRING, | ||||||
|     AuthenticatedSession, |     AuthenticatedSession, | ||||||
|     Session, |  | ||||||
|     Token, |     Token, | ||||||
|     User, |     User, | ||||||
|     UserTypes, |     UserTypes, | ||||||
| ) | ) | ||||||
| from authentik.core.tests.utils import ( | from authentik.core.tests.utils import create_test_admin_user, create_test_brand, create_test_flow | ||||||
|     create_test_admin_user, |  | ||||||
|     create_test_brand, |  | ||||||
|     create_test_flow, |  | ||||||
|     create_test_user, |  | ||||||
| ) |  | ||||||
| from authentik.flows.models import FlowDesignation | from authentik.flows.models import FlowDesignation | ||||||
| from authentik.lib.generators import generate_id, generate_key | from authentik.lib.generators import generate_id, generate_key | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| @ -31,7 +26,7 @@ class TestUsersAPI(APITestCase): | |||||||
|  |  | ||||||
|     def setUp(self) -> None: |     def setUp(self) -> None: | ||||||
|         self.admin = create_test_admin_user() |         self.admin = create_test_admin_user() | ||||||
|         self.user = create_test_user() |         self.user = User.objects.create(username="test-user") | ||||||
|  |  | ||||||
|     def test_filter_type(self): |     def test_filter_type(self): | ||||||
|         """Test API filtering by type""" |         """Test API filtering by type""" | ||||||
| @ -46,35 +41,6 @@ class TestUsersAPI(APITestCase): | |||||||
|         ) |         ) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|  |  | ||||||
|     def test_filter_is_superuser(self): |  | ||||||
|         """Test API filtering by superuser status""" |  | ||||||
|         User.objects.all().delete() |  | ||||||
|         admin = create_test_admin_user() |  | ||||||
|         self.client.force_login(admin) |  | ||||||
|         # Test superuser |  | ||||||
|         response = self.client.get( |  | ||||||
|             reverse("authentik_api:user-list"), |  | ||||||
|             data={ |  | ||||||
|                 "is_superuser": True, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(response.status_code, 200) |  | ||||||
|         body = loads(response.content) |  | ||||||
|         self.assertEqual(len(body["results"]), 1) |  | ||||||
|         self.assertEqual(body["results"][0]["username"], admin.username) |  | ||||||
|         # Test non-superuser |  | ||||||
|         user = create_test_user() |  | ||||||
|         response = self.client.get( |  | ||||||
|             reverse("authentik_api:user-list"), |  | ||||||
|             data={ |  | ||||||
|                 "is_superuser": False, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(response.status_code, 200) |  | ||||||
|         body = loads(response.content) |  | ||||||
|         self.assertEqual(len(body["results"]), 1, body) |  | ||||||
|         self.assertEqual(body["results"][0]["username"], user.username) |  | ||||||
|  |  | ||||||
|     def test_list_with_groups(self): |     def test_list_with_groups(self): | ||||||
|         """Test listing with groups""" |         """Test listing with groups""" | ||||||
|         self.client.force_login(self.admin) |         self.client.force_login(self.admin) | ||||||
| @ -133,8 +99,6 @@ class TestUsersAPI(APITestCase): | |||||||
|     def test_recovery_email_no_flow(self): |     def test_recovery_email_no_flow(self): | ||||||
|         """Test user recovery link (no recovery flow set)""" |         """Test user recovery link (no recovery flow set)""" | ||||||
|         self.client.force_login(self.admin) |         self.client.force_login(self.admin) | ||||||
|         self.user.email = "" |  | ||||||
|         self.user.save() |  | ||||||
|         response = self.client.post( |         response = self.client.post( | ||||||
|             reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk}) |             reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk}) | ||||||
|         ) |         ) | ||||||
| @ -380,15 +344,12 @@ class TestUsersAPI(APITestCase): | |||||||
|         """Ensure sessions are deleted when a user is deactivated""" |         """Ensure sessions are deleted when a user is deactivated""" | ||||||
|         user = create_test_admin_user() |         user = create_test_admin_user() | ||||||
|         session_id = generate_id() |         session_id = generate_id() | ||||||
|         session = Session.objects.create( |  | ||||||
|             session_key=session_id, |  | ||||||
|             last_ip="255.255.255.255", |  | ||||||
|             last_user_agent="", |  | ||||||
|         ) |  | ||||||
|         AuthenticatedSession.objects.create( |         AuthenticatedSession.objects.create( | ||||||
|             session=session, |  | ||||||
|             user=user, |             user=user, | ||||||
|  |             session_key=session_id, | ||||||
|  |             last_ip="", | ||||||
|         ) |         ) | ||||||
|  |         cache.set(KEY_PREFIX + session_id, "foo") | ||||||
|  |  | ||||||
|         self.client.force_login(self.admin) |         self.client.force_login(self.admin) | ||||||
|         response = self.client.patch( |         response = self.client.patch( | ||||||
| @ -399,7 +360,5 @@ class TestUsersAPI(APITestCase): | |||||||
|         ) |         ) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|  |  | ||||||
|         self.assertFalse(Session.objects.filter(session_key=session_id).exists()) |         self.assertIsNone(cache.get(KEY_PREFIX + session_id)) | ||||||
|         self.assertFalse( |         self.assertFalse(AuthenticatedSession.objects.filter(session_key=session_id).exists()) | ||||||
|             AuthenticatedSession.objects.filter(session__session_key=session_id).exists() |  | ||||||
|         ) |  | ||||||
|  | |||||||
| @ -1,5 +1,7 @@ | |||||||
| """authentik URL Configuration""" | """authentik URL Configuration""" | ||||||
|  |  | ||||||
|  | from channels.auth import AuthMiddleware | ||||||
|  | from channels.sessions import CookieMiddleware | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from django.contrib.auth.decorators import login_required | from django.contrib.auth.decorators import login_required | ||||||
| from django.urls import path | from django.urls import path | ||||||
| @ -11,11 +13,7 @@ from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet | |||||||
| from authentik.core.api.groups import GroupViewSet | from authentik.core.api.groups import GroupViewSet | ||||||
| from authentik.core.api.property_mappings import PropertyMappingViewSet | from authentik.core.api.property_mappings import PropertyMappingViewSet | ||||||
| from authentik.core.api.providers import ProviderViewSet | from authentik.core.api.providers import ProviderViewSet | ||||||
| from authentik.core.api.sources import ( | from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet | ||||||
|     GroupSourceConnectionViewSet, |  | ||||||
|     SourceViewSet, |  | ||||||
|     UserSourceConnectionViewSet, |  | ||||||
| ) |  | ||||||
| from authentik.core.api.tokens import TokenViewSet | from authentik.core.api.tokens import TokenViewSet | ||||||
| from authentik.core.api.transactional_applications import TransactionalApplicationView | from authentik.core.api.transactional_applications import TransactionalApplicationView | ||||||
| from authentik.core.api.users import UserViewSet | from authentik.core.api.users import UserViewSet | ||||||
| @ -27,7 +25,7 @@ from authentik.core.views.interface import ( | |||||||
|     RootRedirectView, |     RootRedirectView, | ||||||
| ) | ) | ||||||
| from authentik.flows.views.interface import FlowInterfaceView | from authentik.flows.views.interface import FlowInterfaceView | ||||||
| from authentik.root.asgi_middleware import AuthMiddlewareStack | from authentik.root.asgi_middleware import SessionMiddleware | ||||||
| from authentik.root.messages.consumer import MessageConsumer | from authentik.root.messages.consumer import MessageConsumer | ||||||
| from authentik.root.middleware import ChannelsLoggingMiddleware | from authentik.root.middleware import ChannelsLoggingMiddleware | ||||||
|  |  | ||||||
| @ -83,7 +81,6 @@ api_urlpatterns = [ | |||||||
|     ("core/tokens", TokenViewSet), |     ("core/tokens", TokenViewSet), | ||||||
|     ("sources/all", SourceViewSet), |     ("sources/all", SourceViewSet), | ||||||
|     ("sources/user_connections/all", UserSourceConnectionViewSet), |     ("sources/user_connections/all", UserSourceConnectionViewSet), | ||||||
|     ("sources/group_connections/all", GroupSourceConnectionViewSet), |  | ||||||
|     ("providers/all", ProviderViewSet), |     ("providers/all", ProviderViewSet), | ||||||
|     ("propertymappings/all", PropertyMappingViewSet), |     ("propertymappings/all", PropertyMappingViewSet), | ||||||
|     ("authenticators/all", DeviceViewSet, "device"), |     ("authenticators/all", DeviceViewSet, "device"), | ||||||
| @ -97,7 +94,9 @@ api_urlpatterns = [ | |||||||
| websocket_urlpatterns = [ | websocket_urlpatterns = [ | ||||||
|     path( |     path( | ||||||
|         "ws/client/", |         "ws/client/", | ||||||
|         ChannelsLoggingMiddleware(AuthMiddlewareStack(MessageConsumer.as_asgi())), |         ChannelsLoggingMiddleware( | ||||||
|  |             CookieMiddleware(SessionMiddleware(AuthMiddleware(MessageConsumer.as_asgi()))) | ||||||
|  |         ), | ||||||
|     ), |     ), | ||||||
| ] | ] | ||||||
|  |  | ||||||
|  | |||||||
| @ -55,7 +55,7 @@ class RedirectToAppLaunch(View): | |||||||
|             ) |             ) | ||||||
|         except FlowNonApplicableException: |         except FlowNonApplicableException: | ||||||
|             raise Http404 from None |             raise Http404 from None | ||||||
|         plan.append_stage(in_memory_stage(RedirectToAppStage)) |         plan.insert_stage(in_memory_stage(RedirectToAppStage)) | ||||||
|         return plan.to_redirect(request, flow) |         return plan.to_redirect(request, flow) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -53,7 +53,6 @@ class InterfaceView(TemplateView): | |||||||
|         kwargs["build"] = get_build_hash() |         kwargs["build"] = get_build_hash() | ||||||
|         kwargs["url_kwargs"] = self.kwargs |         kwargs["url_kwargs"] = self.kwargs | ||||||
|         kwargs["base_url"] = self.request.build_absolute_uri(CONFIG.get("web.path", "/")) |         kwargs["base_url"] = self.request.build_absolute_uri(CONFIG.get("web.path", "/")) | ||||||
|         kwargs["base_url_rel"] = CONFIG.get("web.path", "/") |  | ||||||
|         return super().get_context_data(**kwargs) |         return super().get_context_data(**kwargs) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -30,7 +30,6 @@ from structlog.stdlib import get_logger | |||||||
|  |  | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||||
| from authentik.core.models import UserTypes |  | ||||||
| from authentik.crypto.apps import MANAGED_KEY | from authentik.crypto.apps import MANAGED_KEY | ||||||
| from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | ||||||
| from authentik.crypto.models import CertificateKeyPair | from authentik.crypto.models import CertificateKeyPair | ||||||
| @ -273,7 +272,6 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def view_certificate(self, request: Request, pk: str) -> Response: |     def view_certificate(self, request: Request, pk: str) -> Response: | ||||||
|         """Return certificate-key pairs certificate and log access""" |         """Return certificate-key pairs certificate and log access""" | ||||||
|         certificate: CertificateKeyPair = self.get_object() |         certificate: CertificateKeyPair = self.get_object() | ||||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: |  | ||||||
|         Event.new(  # noqa # nosec |         Event.new(  # noqa # nosec | ||||||
|             EventAction.SECRET_VIEW, |             EventAction.SECRET_VIEW, | ||||||
|             secret=certificate, |             secret=certificate, | ||||||
| @ -304,7 +302,6 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def view_private_key(self, request: Request, pk: str) -> Response: |     def view_private_key(self, request: Request, pk: str) -> Response: | ||||||
|         """Return certificate-key pairs private key and log access""" |         """Return certificate-key pairs private key and log access""" | ||||||
|         certificate: CertificateKeyPair = self.get_object() |         certificate: CertificateKeyPair = self.get_object() | ||||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: |  | ||||||
|         Event.new(  # noqa # nosec |         Event.new(  # noqa # nosec | ||||||
|             EventAction.SECRET_VIEW, |             EventAction.SECRET_VIEW, | ||||||
|             secret=certificate, |             secret=certificate, | ||||||
|  | |||||||
| @ -132,14 +132,13 @@ class LicenseKey: | |||||||
|         """Get a summarized version of all (not expired) licenses""" |         """Get a summarized version of all (not expired) licenses""" | ||||||
|         total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0) |         total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0) | ||||||
|         for lic in License.objects.all(): |         for lic in License.objects.all(): | ||||||
|             if lic.is_valid: |  | ||||||
|             total.internal_users += lic.internal_users |             total.internal_users += lic.internal_users | ||||||
|             total.external_users += lic.external_users |             total.external_users += lic.external_users | ||||||
|                 total.license_flags.extend(lic.status.license_flags) |  | ||||||
|             exp_ts = int(mktime(lic.expiry.timetuple())) |             exp_ts = int(mktime(lic.expiry.timetuple())) | ||||||
|             if total.exp == 0: |             if total.exp == 0: | ||||||
|                 total.exp = exp_ts |                 total.exp = exp_ts | ||||||
|             total.exp = max(total.exp, exp_ts) |             total.exp = max(total.exp, exp_ts) | ||||||
|  |             total.license_flags.extend(lic.status.license_flags) | ||||||
|         return total |         return total | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|  | |||||||
| @ -39,10 +39,6 @@ class License(SerializerModel): | |||||||
|     internal_users = models.BigIntegerField() |     internal_users = models.BigIntegerField() | ||||||
|     external_users = models.BigIntegerField() |     external_users = models.BigIntegerField() | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_valid(self) -> bool: |  | ||||||
|         return self.expiry >= now() |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> type[BaseSerializer]: |     def serializer(self) -> type[BaseSerializer]: | ||||||
|         from authentik.enterprise.api import LicenseSerializer |         from authentik.enterprise.api import LicenseSerializer | ||||||
|  | |||||||
| @ -1,27 +0,0 @@ | |||||||
| from rest_framework.viewsets import ModelViewSet |  | ||||||
|  |  | ||||||
| from authentik.core.api.used_by import UsedByMixin |  | ||||||
| from authentik.enterprise.api import EnterpriseRequiredMixin |  | ||||||
| from authentik.enterprise.policies.unique_password.models import UniquePasswordPolicy |  | ||||||
| from authentik.policies.api.policies import PolicySerializer |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UniquePasswordPolicySerializer(EnterpriseRequiredMixin, PolicySerializer): |  | ||||||
|     """Password Uniqueness Policy Serializer""" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         model = UniquePasswordPolicy |  | ||||||
|         fields = PolicySerializer.Meta.fields + [ |  | ||||||
|             "password_field", |  | ||||||
|             "num_historical_passwords", |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UniquePasswordPolicyViewSet(UsedByMixin, ModelViewSet): |  | ||||||
|     """Password Uniqueness Policy Viewset""" |  | ||||||
|  |  | ||||||
|     queryset = UniquePasswordPolicy.objects.all() |  | ||||||
|     serializer_class = UniquePasswordPolicySerializer |  | ||||||
|     filterset_fields = "__all__" |  | ||||||
|     ordering = ["name"] |  | ||||||
|     search_fields = ["name"] |  | ||||||
| @ -1,10 +0,0 @@ | |||||||
| """authentik Unique Password policy app config""" |  | ||||||
|  |  | ||||||
| from authentik.enterprise.apps import EnterpriseConfig |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikEnterprisePoliciesUniquePasswordConfig(EnterpriseConfig): |  | ||||||
|     name = "authentik.enterprise.policies.unique_password" |  | ||||||
|     label = "authentik_policies_unique_password" |  | ||||||
|     verbose_name = "authentik Enterprise.Policies.Unique Password" |  | ||||||
|     default = True |  | ||||||
| @ -1,81 +0,0 @@ | |||||||
| # Generated by Django 5.0.13 on 2025-03-26 23:02 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.conf import settings |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     initial = True |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_policies", "0011_policybinding_failure_result_and_more"), |  | ||||||
|         migrations.swappable_dependency(settings.AUTH_USER_MODEL), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="UniquePasswordPolicy", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "policy_ptr", |  | ||||||
|                     models.OneToOneField( |  | ||||||
|                         auto_created=True, |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         parent_link=True, |  | ||||||
|                         primary_key=True, |  | ||||||
|                         serialize=False, |  | ||||||
|                         to="authentik_policies.policy", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "password_field", |  | ||||||
|                     models.TextField( |  | ||||||
|                         default="password", |  | ||||||
|                         help_text="Field key to check, field keys defined in Prompt stages are available.", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "num_historical_passwords", |  | ||||||
|                     models.PositiveIntegerField( |  | ||||||
|                         default=1, help_text="Number of passwords to check against." |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "verbose_name": "Password Uniqueness Policy", |  | ||||||
|                 "verbose_name_plural": "Password Uniqueness Policies", |  | ||||||
|                 "indexes": [ |  | ||||||
|                     models.Index(fields=["policy_ptr_id"], name="authentik_p_policy__f559aa_idx") |  | ||||||
|                 ], |  | ||||||
|             }, |  | ||||||
|             bases=("authentik_policies.policy",), |  | ||||||
|         ), |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="UserPasswordHistory", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "id", |  | ||||||
|                     models.AutoField( |  | ||||||
|                         auto_created=True, primary_key=True, serialize=False, verbose_name="ID" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("old_password", models.CharField(max_length=128)), |  | ||||||
|                 ("created_at", models.DateTimeField(auto_now_add=True)), |  | ||||||
|                 ("hibp_prefix_sha1", models.CharField(max_length=5)), |  | ||||||
|                 ("hibp_pw_hash", models.TextField()), |  | ||||||
|                 ( |  | ||||||
|                     "user", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         related_name="old_passwords", |  | ||||||
|                         to=settings.AUTH_USER_MODEL, |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "verbose_name": "User Password History", |  | ||||||
|             }, |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,151 +0,0 @@ | |||||||
| from hashlib import sha1 |  | ||||||
|  |  | ||||||
| from django.contrib.auth.hashers import identify_hasher, make_password |  | ||||||
| from django.db import models |  | ||||||
| from django.utils.translation import gettext as _ |  | ||||||
| from rest_framework.serializers import BaseSerializer |  | ||||||
| from structlog.stdlib import get_logger |  | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.policies.models import Policy |  | ||||||
| from authentik.policies.types import PolicyRequest, PolicyResult |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
| LOGGER = get_logger() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UniquePasswordPolicy(Policy): |  | ||||||
|     """This policy prevents users from reusing old passwords.""" |  | ||||||
|  |  | ||||||
|     password_field = models.TextField( |  | ||||||
|         default="password", |  | ||||||
|         help_text=_("Field key to check, field keys defined in Prompt stages are available."), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     # Limit on the number of previous passwords the policy evaluates |  | ||||||
|     # Also controls number of old passwords the system stores. |  | ||||||
|     num_historical_passwords = models.PositiveIntegerField( |  | ||||||
|         default=1, |  | ||||||
|         help_text=_("Number of passwords to check against."), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def serializer(self) -> type[BaseSerializer]: |  | ||||||
|         from authentik.enterprise.policies.unique_password.api import UniquePasswordPolicySerializer |  | ||||||
|  |  | ||||||
|         return UniquePasswordPolicySerializer |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def component(self) -> str: |  | ||||||
|         return "ak-policy-password-uniqueness-form" |  | ||||||
|  |  | ||||||
|     def passes(self, request: PolicyRequest) -> PolicyResult: |  | ||||||
|         from authentik.enterprise.policies.unique_password.models import UserPasswordHistory |  | ||||||
|  |  | ||||||
|         password = request.context.get(PLAN_CONTEXT_PROMPT, {}).get( |  | ||||||
|             self.password_field, request.context.get(self.password_field) |  | ||||||
|         ) |  | ||||||
|         if not password: |  | ||||||
|             LOGGER.warning( |  | ||||||
|                 "Password field not found in request when checking UniquePasswordPolicy", |  | ||||||
|                 field=self.password_field, |  | ||||||
|                 fields=request.context.keys(), |  | ||||||
|             ) |  | ||||||
|             return PolicyResult(False, _("Password not set in context")) |  | ||||||
|         password = str(password) |  | ||||||
|  |  | ||||||
|         if not self.num_historical_passwords: |  | ||||||
|             # Policy not configured to check against any passwords |  | ||||||
|             return PolicyResult(True) |  | ||||||
|  |  | ||||||
|         num_to_check = self.num_historical_passwords |  | ||||||
|         password_history = UserPasswordHistory.objects.filter(user=request.user).order_by( |  | ||||||
|             "-created_at" |  | ||||||
|         )[:num_to_check] |  | ||||||
|  |  | ||||||
|         if not password_history: |  | ||||||
|             return PolicyResult(True) |  | ||||||
|  |  | ||||||
|         for record in password_history: |  | ||||||
|             if not record.old_password: |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             if self._passwords_match(new_password=password, old_password=record.old_password): |  | ||||||
|                 # Return on first match. Authentik does not consider timing attacks |  | ||||||
|                 # on old passwords to be an attack surface. |  | ||||||
|                 return PolicyResult( |  | ||||||
|                     False, |  | ||||||
|                     _("This password has been used previously. Please choose a different one."), |  | ||||||
|                 ) |  | ||||||
|  |  | ||||||
|         return PolicyResult(True) |  | ||||||
|  |  | ||||||
|     def _passwords_match(self, *, new_password: str, old_password: str) -> bool: |  | ||||||
|         try: |  | ||||||
|             hasher = identify_hasher(old_password) |  | ||||||
|         except ValueError: |  | ||||||
|             LOGGER.warning( |  | ||||||
|                 "Skipping password; could not load hash algorithm", |  | ||||||
|             ) |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|         return hasher.verify(new_password, old_password) |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def is_in_use(cls): |  | ||||||
|         """Check if any UniquePasswordPolicy is in use, either through policy bindings |  | ||||||
|         or direct attachment to a PromptStage. |  | ||||||
|  |  | ||||||
|         Returns: |  | ||||||
|             bool: True if any policy is in use, False otherwise |  | ||||||
|         """ |  | ||||||
|         from authentik.policies.models import PolicyBinding |  | ||||||
|  |  | ||||||
|         # Check if any policy is in use through bindings |  | ||||||
|         if PolicyBinding.in_use.for_policy(cls).exists(): |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         # Check if any policy is attached to a PromptStage |  | ||||||
|         if cls.objects.filter(promptstage__isnull=False).exists(): |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     class Meta(Policy.PolicyMeta): |  | ||||||
|         verbose_name = _("Password Uniqueness Policy") |  | ||||||
|         verbose_name_plural = _("Password Uniqueness Policies") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserPasswordHistory(models.Model): |  | ||||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="old_passwords") |  | ||||||
|     # Mimic's column type of AbstractBaseUser.password |  | ||||||
|     old_password = models.CharField(max_length=128) |  | ||||||
|     created_at = models.DateTimeField(auto_now_add=True) |  | ||||||
|  |  | ||||||
|     hibp_prefix_sha1 = models.CharField(max_length=5) |  | ||||||
|     hibp_pw_hash = models.TextField() |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _("User Password History") |  | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |  | ||||||
|         timestamp = f"{self.created_at:%Y/%m/%d %X}" if self.created_at else "N/A" |  | ||||||
|         return f"Previous Password (user: {self.user_id}, recorded: {timestamp})" |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def create_for_user(cls, user: User, password: str): |  | ||||||
|         # To check users' passwords against Have I been Pwned, we need the first 5 chars |  | ||||||
|         # of the password hashed with SHA1 without a salt... |  | ||||||
|         pw_hash_sha1 = sha1(password.encode("utf-8")).hexdigest()  # nosec |  | ||||||
|         # ...however that'll give us a list of hashes from HIBP, and to compare that we still |  | ||||||
|         # need a full unsalted SHA1 of the password. We don't want to save that directly in |  | ||||||
|         # the database, so we hash that SHA1 again with a modern hashing alg, |  | ||||||
|         # and then when we check users' passwords against HIBP we can use `check_password` |  | ||||||
|         # which will take care of this. |  | ||||||
|         hibp_hash_hash = make_password(pw_hash_sha1) |  | ||||||
|         return cls.objects.create( |  | ||||||
|             user=user, |  | ||||||
|             old_password=password, |  | ||||||
|             hibp_prefix_sha1=pw_hash_sha1[:5], |  | ||||||
|             hibp_pw_hash=hibp_hash_hash, |  | ||||||
|         ) |  | ||||||
| @ -1,20 +0,0 @@ | |||||||
| """Unique Password Policy settings""" |  | ||||||
|  |  | ||||||
| from celery.schedules import crontab |  | ||||||
|  |  | ||||||
| from authentik.lib.utils.time import fqdn_rand |  | ||||||
|  |  | ||||||
| CELERY_BEAT_SCHEDULE = { |  | ||||||
|     "policies_unique_password_trim_history": { |  | ||||||
|         "task": "authentik.enterprise.policies.unique_password.tasks.trim_password_histories", |  | ||||||
|         "schedule": crontab(minute=fqdn_rand("policies_unique_password_trim"), hour="*/12"), |  | ||||||
|         "options": {"queue": "authentik_scheduled"}, |  | ||||||
|     }, |  | ||||||
|     "policies_unique_password_check_purge": { |  | ||||||
|         "task": ( |  | ||||||
|             "authentik.enterprise.policies.unique_password.tasks.check_and_purge_password_history" |  | ||||||
|         ), |  | ||||||
|         "schedule": crontab(minute=fqdn_rand("policies_unique_password_purge"), hour="*/24"), |  | ||||||
|         "options": {"queue": "authentik_scheduled"}, |  | ||||||
|     }, |  | ||||||
| } |  | ||||||
| @ -1,23 +0,0 @@ | |||||||
| """authentik policy signals""" |  | ||||||
|  |  | ||||||
| from django.dispatch import receiver |  | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.core.signals import password_changed |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @receiver(password_changed) |  | ||||||
| def copy_password_to_password_history(sender, user: User, *args, **kwargs): |  | ||||||
|     """Preserve the user's old password if UniquePasswordPolicy is enabled anywhere""" |  | ||||||
|     # Check if any UniquePasswordPolicy is in use |  | ||||||
|     unique_pwd_policy_in_use = UniquePasswordPolicy.is_in_use() |  | ||||||
|  |  | ||||||
|     if unique_pwd_policy_in_use: |  | ||||||
|         """NOTE: Because we run this in a signal after saving the user, |  | ||||||
|         we are not atomically guaranteed to save password history. |  | ||||||
|         """ |  | ||||||
|         UserPasswordHistory.create_for_user(user, user.password) |  | ||||||
| @ -1,66 +0,0 @@ | |||||||
| from django.db.models.aggregates import Count |  | ||||||
| from structlog import get_logger |  | ||||||
|  |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task |  | ||||||
| from authentik.root.celery import CELERY_APP |  | ||||||
|  |  | ||||||
| LOGGER = get_logger() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task(bind=True, base=SystemTask) |  | ||||||
| @prefill_task |  | ||||||
| def check_and_purge_password_history(self: SystemTask): |  | ||||||
|     """Check if any UniquePasswordPolicy exists, and if not, purge the password history table. |  | ||||||
|     This is run on a schedule instead of being triggered by policy binding deletion. |  | ||||||
|     """ |  | ||||||
|     if not UniquePasswordPolicy.objects.exists(): |  | ||||||
|         UserPasswordHistory.objects.all().delete() |  | ||||||
|         LOGGER.debug("Purged UserPasswordHistory table as no policies are in use") |  | ||||||
|         self.set_status(TaskStatus.SUCCESSFUL, "Successfully purged UserPasswordHistory") |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     self.set_status( |  | ||||||
|         TaskStatus.SUCCESSFUL, "Not purging password histories, a unique password policy exists" |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task(bind=True, base=SystemTask) |  | ||||||
| def trim_password_histories(self: SystemTask): |  | ||||||
|     """Removes rows from UserPasswordHistory older than |  | ||||||
|     the `n` most recent entries. |  | ||||||
|  |  | ||||||
|     The `n` is defined by the largest configured value for all bound |  | ||||||
|     UniquePasswordPolicy policies. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     # No policy, we'll let the cleanup above do its thing |  | ||||||
|     if not UniquePasswordPolicy.objects.exists(): |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     num_rows_to_preserve = 0 |  | ||||||
|     for policy in UniquePasswordPolicy.objects.all(): |  | ||||||
|         num_rows_to_preserve = max(num_rows_to_preserve, policy.num_historical_passwords) |  | ||||||
|  |  | ||||||
|     all_pks_to_keep = [] |  | ||||||
|  |  | ||||||
|     # Get all users who have password history entries |  | ||||||
|     users_with_history = ( |  | ||||||
|         UserPasswordHistory.objects.values("user") |  | ||||||
|         .annotate(count=Count("user")) |  | ||||||
|         .filter(count__gt=0) |  | ||||||
|         .values_list("user", flat=True) |  | ||||||
|     ) |  | ||||||
|     for user_pk in users_with_history: |  | ||||||
|         entries = UserPasswordHistory.objects.filter(user__pk=user_pk) |  | ||||||
|         pks_to_keep = entries.order_by("-created_at")[:num_rows_to_preserve].values_list( |  | ||||||
|             "pk", flat=True |  | ||||||
|         ) |  | ||||||
|         all_pks_to_keep.extend(pks_to_keep) |  | ||||||
|  |  | ||||||
|     num_deleted, _ = UserPasswordHistory.objects.exclude(pk__in=all_pks_to_keep).delete() |  | ||||||
|     LOGGER.debug("Deleted stale password history records", count=num_deleted) |  | ||||||
|     self.set_status(TaskStatus.SUCCESSFUL, f"Delete {num_deleted} stale password history records") |  | ||||||
| @ -1,108 +0,0 @@ | |||||||
| """Unique Password Policy flow tests""" |  | ||||||
|  |  | ||||||
| from django.contrib.auth.hashers import make_password |  | ||||||
| from django.urls.base import reverse |  | ||||||
|  |  | ||||||
| from authentik.core.tests.utils import create_test_flow, create_test_user |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
| from authentik.flows.models import FlowDesignation, FlowStageBinding |  | ||||||
| from authentik.flows.tests import FlowTestCase |  | ||||||
| from authentik.lib.generators import generate_id |  | ||||||
| from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestUniquePasswordPolicyFlow(FlowTestCase): |  | ||||||
|     """Test Unique Password Policy in a flow""" |  | ||||||
|  |  | ||||||
|     REUSED_PASSWORD = "hunter1"  # nosec B105 |  | ||||||
|  |  | ||||||
|     def setUp(self) -> None: |  | ||||||
|         self.user = create_test_user() |  | ||||||
|         self.flow = create_test_flow(FlowDesignation.AUTHENTICATION) |  | ||||||
|  |  | ||||||
|         password_prompt = Prompt.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|             field_key="password", |  | ||||||
|             label="PASSWORD_LABEL", |  | ||||||
|             type=FieldTypes.PASSWORD, |  | ||||||
|             required=True, |  | ||||||
|             placeholder="PASSWORD_PLACEHOLDER", |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         self.policy = UniquePasswordPolicy.objects.create( |  | ||||||
|             name="password_must_unique", |  | ||||||
|             password_field=password_prompt.field_key, |  | ||||||
|             num_historical_passwords=1, |  | ||||||
|         ) |  | ||||||
|         stage = PromptStage.objects.create(name="prompt-stage") |  | ||||||
|         stage.validation_policies.set([self.policy]) |  | ||||||
|         stage.fields.set( |  | ||||||
|             [ |  | ||||||
|                 password_prompt, |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|         FlowStageBinding.objects.create(target=self.flow, stage=stage, order=2) |  | ||||||
|  |  | ||||||
|         # Seed the user's password history |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, make_password(self.REUSED_PASSWORD)) |  | ||||||
|  |  | ||||||
|     def test_prompt_data(self): |  | ||||||
|         """Test policy attached to a prompt stage""" |  | ||||||
|         # Test the policy directly |  | ||||||
|         from authentik.policies.types import PolicyRequest |  | ||||||
|         from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
|         # Create a policy request with the reused password |  | ||||||
|         request = PolicyRequest(user=self.user) |  | ||||||
|         request.context[PLAN_CONTEXT_PROMPT] = {"password": self.REUSED_PASSWORD} |  | ||||||
|  |  | ||||||
|         # Test the policy directly |  | ||||||
|         result = self.policy.passes(request) |  | ||||||
|  |  | ||||||
|         # Verify that the policy fails (returns False) with the expected error message |  | ||||||
|         self.assertFalse(result.passing, "Policy should fail for reused password") |  | ||||||
|         self.assertEqual( |  | ||||||
|             result.messages[0], |  | ||||||
|             "This password has been used previously. Please choose a different one.", |  | ||||||
|             "Incorrect error message", |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # API-based testing approach: |  | ||||||
|  |  | ||||||
|         self.client.force_login(self.user) |  | ||||||
|  |  | ||||||
|         # Send a POST request to the flow executor with the reused password |  | ||||||
|         response = self.client.post( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|             {"password": self.REUSED_PASSWORD}, |  | ||||||
|         ) |  | ||||||
|         self.assertStageResponse( |  | ||||||
|             response, |  | ||||||
|             self.flow, |  | ||||||
|             component="ak-stage-prompt", |  | ||||||
|             fields=[ |  | ||||||
|                 { |  | ||||||
|                     "choices": None, |  | ||||||
|                     "field_key": "password", |  | ||||||
|                     "label": "PASSWORD_LABEL", |  | ||||||
|                     "order": 0, |  | ||||||
|                     "placeholder": "PASSWORD_PLACEHOLDER", |  | ||||||
|                     "initial_value": "", |  | ||||||
|                     "required": True, |  | ||||||
|                     "type": "password", |  | ||||||
|                     "sub_text": "", |  | ||||||
|                 } |  | ||||||
|             ], |  | ||||||
|             response_errors={ |  | ||||||
|                 "non_field_errors": [ |  | ||||||
|                     { |  | ||||||
|                         "code": "invalid", |  | ||||||
|                         "string": "This password has been used previously. " |  | ||||||
|                         "Please choose a different one.", |  | ||||||
|                     } |  | ||||||
|                 ] |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
| @ -1,77 +0,0 @@ | |||||||
| """Unique Password Policy tests""" |  | ||||||
|  |  | ||||||
| from django.contrib.auth.hashers import make_password |  | ||||||
| from django.test import TestCase |  | ||||||
| from guardian.shortcuts import get_anonymous_user |  | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
| from authentik.policies.types import PolicyRequest, PolicyResult |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestUniquePasswordPolicy(TestCase): |  | ||||||
|     """Test Password Uniqueness Policy""" |  | ||||||
|  |  | ||||||
|     def setUp(self) -> None: |  | ||||||
|         self.policy = UniquePasswordPolicy.objects.create( |  | ||||||
|             name="test_unique_password", num_historical_passwords=1 |  | ||||||
|         ) |  | ||||||
|         self.user = User.objects.create(username="test-user") |  | ||||||
|  |  | ||||||
|     def test_invalid(self): |  | ||||||
|         """Test without password present in request""" |  | ||||||
|         request = PolicyRequest(get_anonymous_user()) |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertFalse(result.passing) |  | ||||||
|         self.assertEqual(result.messages[0], "Password not set in context") |  | ||||||
|  |  | ||||||
|     def test_passes_no_previous_passwords(self): |  | ||||||
|         request = PolicyRequest(get_anonymous_user()) |  | ||||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}} |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertTrue(result.passing) |  | ||||||
|  |  | ||||||
|     def test_passes_passwords_are_different(self): |  | ||||||
|         # Seed database with an old password |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, make_password("hunter1")) |  | ||||||
|  |  | ||||||
|         request = PolicyRequest(self.user) |  | ||||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}} |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertTrue(result.passing) |  | ||||||
|  |  | ||||||
|     def test_passes_multiple_old_passwords(self): |  | ||||||
|         # Seed with multiple old passwords |  | ||||||
|         UserPasswordHistory.objects.bulk_create( |  | ||||||
|             [ |  | ||||||
|                 UserPasswordHistory(user=self.user, old_password=make_password("hunter1")), |  | ||||||
|                 UserPasswordHistory(user=self.user, old_password=make_password("hunter2")), |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|         request = PolicyRequest(self.user) |  | ||||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter3"}} |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertTrue(result.passing) |  | ||||||
|  |  | ||||||
|     def test_fails_password_matches_old_password(self): |  | ||||||
|         # Seed database with an old password |  | ||||||
|  |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, make_password("hunter1")) |  | ||||||
|  |  | ||||||
|         request = PolicyRequest(self.user) |  | ||||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter1"}} |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertFalse(result.passing) |  | ||||||
|  |  | ||||||
|     def test_fails_if_identical_password_with_different_hash_algos(self): |  | ||||||
|         UserPasswordHistory.create_for_user( |  | ||||||
|             self.user, make_password("hunter2", "somesalt", "scrypt") |  | ||||||
|         ) |  | ||||||
|         request = PolicyRequest(self.user) |  | ||||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}} |  | ||||||
|         result: PolicyResult = self.policy.passes(request) |  | ||||||
|         self.assertFalse(result.passing) |  | ||||||
| @ -1,90 +0,0 @@ | |||||||
| from django.urls import reverse |  | ||||||
|  |  | ||||||
| from authentik.core.models import Group, Source, User |  | ||||||
| from authentik.core.tests.utils import create_test_flow, create_test_user |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
| from authentik.flows.markers import StageMarker |  | ||||||
| from authentik.flows.models import FlowStageBinding |  | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan |  | ||||||
| from authentik.flows.tests import FlowTestCase |  | ||||||
| from authentik.flows.views.executor import SESSION_KEY_PLAN |  | ||||||
| from authentik.lib.generators import generate_key |  | ||||||
| from authentik.policies.models import PolicyBinding, PolicyBindingModel |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
| from authentik.stages.user_write.models import UserWriteStage |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestUserWriteStage(FlowTestCase): |  | ||||||
|     """Write tests""" |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         super().setUp() |  | ||||||
|         self.flow = create_test_flow() |  | ||||||
|         self.group = Group.objects.create(name="test-group") |  | ||||||
|         self.other_group = Group.objects.create(name="other-group") |  | ||||||
|         self.stage: UserWriteStage = UserWriteStage.objects.create( |  | ||||||
|             name="write", create_users_as_inactive=True, create_users_group=self.group |  | ||||||
|         ) |  | ||||||
|         self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2) |  | ||||||
|         self.source = Source.objects.create(name="fake_source") |  | ||||||
|  |  | ||||||
|     def test_save_password_history_if_policy_binding_enforced(self): |  | ||||||
|         """Test user's new password is recorded when ANY enabled UniquePasswordPolicy exists""" |  | ||||||
|         unique_password_policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) |  | ||||||
|         pbm = PolicyBindingModel.objects.create() |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=pbm, policy=unique_password_policy, order=0, enabled=True |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         test_user = create_test_user() |  | ||||||
|         # Store original password for verification |  | ||||||
|         original_password = test_user.password |  | ||||||
|  |  | ||||||
|         # We're changing our own password |  | ||||||
|         self.client.force_login(test_user) |  | ||||||
|  |  | ||||||
|         new_password = generate_key() |  | ||||||
|         plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]) |  | ||||||
|         plan.context[PLAN_CONTEXT_PENDING_USER] = test_user |  | ||||||
|         plan.context[PLAN_CONTEXT_PROMPT] = { |  | ||||||
|             "username": test_user.username, |  | ||||||
|             "password": new_password, |  | ||||||
|         } |  | ||||||
|         session = self.client.session |  | ||||||
|         session[SESSION_KEY_PLAN] = plan |  | ||||||
|         session.save() |  | ||||||
|         # Password history should be recorded |  | ||||||
|         user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user) |  | ||||||
|         self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded") |  | ||||||
|         self.assertEqual(len(user_password_history_qs), 1, "expected 1 recorded password") |  | ||||||
|  |  | ||||||
|         # Create a password history entry manually to simulate the signal behavior |  | ||||||
|         # This is what would happen if the signal worked correctly |  | ||||||
|         UserPasswordHistory.objects.create(user=test_user, old_password=original_password) |  | ||||||
|         user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user) |  | ||||||
|         self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded") |  | ||||||
|         self.assertEqual(len(user_password_history_qs), 2, "expected 2 recorded password") |  | ||||||
|  |  | ||||||
|         # Execute the flow by sending a POST request to the flow executor endpoint |  | ||||||
|         response = self.client.post( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # Verify that the request was successful |  | ||||||
|         self.assertEqual(response.status_code, 200) |  | ||||||
|         user_qs = User.objects.filter(username=plan.context[PLAN_CONTEXT_PROMPT]["username"]) |  | ||||||
|         self.assertTrue(user_qs.exists()) |  | ||||||
|  |  | ||||||
|         # Verify the password history entry exists |  | ||||||
|         user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user) |  | ||||||
|         self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded") |  | ||||||
|  |  | ||||||
|         self.assertEqual(len(user_password_history_qs), 3, "expected 3 recorded password") |  | ||||||
|         # Verify that one of the entries contains the original password |  | ||||||
|         self.assertTrue( |  | ||||||
|             any(entry.old_password == original_password for entry in user_password_history_qs), |  | ||||||
|             "original password should be in password history table", |  | ||||||
|         ) |  | ||||||
| @ -1,178 +0,0 @@ | |||||||
| from datetime import datetime, timedelta |  | ||||||
|  |  | ||||||
| from django.test import TestCase |  | ||||||
|  |  | ||||||
| from authentik.core.tests.utils import create_test_user |  | ||||||
| from authentik.enterprise.policies.unique_password.models import ( |  | ||||||
|     UniquePasswordPolicy, |  | ||||||
|     UserPasswordHistory, |  | ||||||
| ) |  | ||||||
| from authentik.enterprise.policies.unique_password.tasks import ( |  | ||||||
|     check_and_purge_password_history, |  | ||||||
|     trim_password_histories, |  | ||||||
| ) |  | ||||||
| from authentik.policies.models import PolicyBinding, PolicyBindingModel |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestUniquePasswordPolicyModel(TestCase): |  | ||||||
|     """Test the UniquePasswordPolicy model methods""" |  | ||||||
|  |  | ||||||
|     def test_is_in_use_with_binding(self): |  | ||||||
|         """Test is_in_use returns True when a policy binding exists""" |  | ||||||
|         # Create a UniquePasswordPolicy and a PolicyBinding for it |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) |  | ||||||
|         pbm = PolicyBindingModel.objects.create() |  | ||||||
|         PolicyBinding.objects.create(target=pbm, policy=policy, order=0, enabled=True) |  | ||||||
|  |  | ||||||
|         # Verify is_in_use returns True |  | ||||||
|         self.assertTrue(UniquePasswordPolicy.is_in_use()) |  | ||||||
|  |  | ||||||
|     def test_is_in_use_with_promptstage(self): |  | ||||||
|         """Test is_in_use returns True when attached to a PromptStage""" |  | ||||||
|         from authentik.stages.prompt.models import PromptStage |  | ||||||
|  |  | ||||||
|         # Create a UniquePasswordPolicy and attach it to a PromptStage |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) |  | ||||||
|         prompt_stage = PromptStage.objects.create( |  | ||||||
|             name="Test Prompt Stage", |  | ||||||
|         ) |  | ||||||
|         # Use the set() method for many-to-many relationships |  | ||||||
|         prompt_stage.validation_policies.set([policy]) |  | ||||||
|  |  | ||||||
|         # Verify is_in_use returns True |  | ||||||
|         self.assertTrue(UniquePasswordPolicy.is_in_use()) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestTrimAllPasswordHistories(TestCase): |  | ||||||
|     """Test the task that trims password history for all users""" |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         self.user1 = create_test_user("test-user1") |  | ||||||
|         self.user2 = create_test_user("test-user2") |  | ||||||
|         self.pbm = PolicyBindingModel.objects.create() |  | ||||||
|         # Create a policy with a limit of 1 password |  | ||||||
|         self.policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1) |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=self.pbm, |  | ||||||
|             policy=self.policy, |  | ||||||
|             enabled=True, |  | ||||||
|             order=0, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestCheckAndPurgePasswordHistory(TestCase): |  | ||||||
|     """Test the scheduled task that checks if any policy is in use and purges if not""" |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         self.user = create_test_user("test-user") |  | ||||||
|         self.pbm = PolicyBindingModel.objects.create() |  | ||||||
|  |  | ||||||
|     def test_purge_when_no_policy_in_use(self): |  | ||||||
|         """Test that the task purges the table when no policy is in use""" |  | ||||||
|         # Create some password history entries |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") |  | ||||||
|  |  | ||||||
|         # Verify we have entries |  | ||||||
|         self.assertTrue(UserPasswordHistory.objects.exists()) |  | ||||||
|  |  | ||||||
|         # Run the task - should purge since no policy is in use |  | ||||||
|         check_and_purge_password_history() |  | ||||||
|  |  | ||||||
|         # Verify the table is empty |  | ||||||
|         self.assertFalse(UserPasswordHistory.objects.exists()) |  | ||||||
|  |  | ||||||
|     def test_no_purge_when_policy_in_use(self): |  | ||||||
|         """Test that the task doesn't purge when a policy is in use""" |  | ||||||
|         # Create a policy and binding |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=self.pbm, |  | ||||||
|             policy=policy, |  | ||||||
|             enabled=True, |  | ||||||
|             order=0, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # Create some password history entries |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") |  | ||||||
|  |  | ||||||
|         # Verify we have entries |  | ||||||
|         self.assertTrue(UserPasswordHistory.objects.exists()) |  | ||||||
|  |  | ||||||
|         # Run the task - should NOT purge since a policy is in use |  | ||||||
|         check_and_purge_password_history() |  | ||||||
|  |  | ||||||
|         # Verify the entries still exist |  | ||||||
|         self.assertTrue(UserPasswordHistory.objects.exists()) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestTrimPasswordHistory(TestCase): |  | ||||||
|     """Test password history cleanup task""" |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         self.user = create_test_user("test-user") |  | ||||||
|         self.pbm = PolicyBindingModel.objects.create() |  | ||||||
|  |  | ||||||
|     def test_trim_password_history_ok(self): |  | ||||||
|         """Test passwords over the define limit are deleted""" |  | ||||||
|         _now = datetime.now() |  | ||||||
|         UserPasswordHistory.objects.bulk_create( |  | ||||||
|             [ |  | ||||||
|                 UserPasswordHistory( |  | ||||||
|                     user=self.user, |  | ||||||
|                     old_password="hunter1",  # nosec B106 |  | ||||||
|                     created_at=_now - timedelta(days=3), |  | ||||||
|                 ), |  | ||||||
|                 UserPasswordHistory( |  | ||||||
|                     user=self.user, |  | ||||||
|                     old_password="hunter2",  # nosec B106 |  | ||||||
|                     created_at=_now - timedelta(days=2), |  | ||||||
|                 ), |  | ||||||
|                 UserPasswordHistory( |  | ||||||
|                     user=self.user, |  | ||||||
|                     old_password="hunter3",  # nosec B106 |  | ||||||
|                     created_at=_now, |  | ||||||
|                 ), |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1) |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=self.pbm, |  | ||||||
|             policy=policy, |  | ||||||
|             enabled=True, |  | ||||||
|             order=0, |  | ||||||
|         ) |  | ||||||
|         trim_password_histories.delay() |  | ||||||
|         user_pwd_history_qs = UserPasswordHistory.objects.filter(user=self.user) |  | ||||||
|         self.assertEqual(len(user_pwd_history_qs), 1) |  | ||||||
|  |  | ||||||
|     def test_trim_password_history_policy_diabled_no_op(self): |  | ||||||
|         """Test no passwords removed if policy binding is disabled""" |  | ||||||
|  |  | ||||||
|         # Insert a record to ensure it's not deleted after executing task |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") |  | ||||||
|  |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1) |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=self.pbm, |  | ||||||
|             policy=policy, |  | ||||||
|             enabled=False, |  | ||||||
|             order=0, |  | ||||||
|         ) |  | ||||||
|         trim_password_histories.delay() |  | ||||||
|         self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists()) |  | ||||||
|  |  | ||||||
|     def test_trim_password_history_fewer_records_than_maximum_is_no_op(self): |  | ||||||
|         """Test no passwords deleted if fewer passwords exist than limit""" |  | ||||||
|  |  | ||||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") |  | ||||||
|  |  | ||||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=2) |  | ||||||
|         PolicyBinding.objects.create( |  | ||||||
|             target=self.pbm, |  | ||||||
|             policy=policy, |  | ||||||
|             enabled=True, |  | ||||||
|             order=0, |  | ||||||
|         ) |  | ||||||
|         trim_password_histories.delay() |  | ||||||
|         self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists()) |  | ||||||
| @ -1,7 +0,0 @@ | |||||||
| """API URLs""" |  | ||||||
|  |  | ||||||
| from authentik.enterprise.policies.unique_password.api import UniquePasswordPolicyViewSet |  | ||||||
|  |  | ||||||
| api_urlpatterns = [ |  | ||||||
|     ("policies/unique_password", UniquePasswordPolicyViewSet), |  | ||||||
| ] |  | ||||||
| @ -37,7 +37,6 @@ class GoogleWorkspaceProviderSerializer(EnterpriseRequiredMixin, ProviderSeriali | |||||||
|             "user_delete_action", |             "user_delete_action", | ||||||
|             "group_delete_action", |             "group_delete_action", | ||||||
|             "default_group_email_domain", |             "default_group_email_domain", | ||||||
|             "dry_run", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = {} |         extra_kwargs = {} | ||||||
|  |  | ||||||
|  | |||||||
| @ -8,10 +8,9 @@ from httplib2 import HttpLib2Error, HttpLib2ErrorWithResponse | |||||||
|  |  | ||||||
| from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider | from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider | ||||||
| from authentik.lib.sync.outgoing import HTTP_CONFLICT | from authentik.lib.sync.outgoing import HTTP_CONFLICT | ||||||
| from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient | from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient | ||||||
| from authentik.lib.sync.outgoing.exceptions import ( | from authentik.lib.sync.outgoing.exceptions import ( | ||||||
|     BadRequestSyncException, |     BadRequestSyncException, | ||||||
|     DryRunRejected, |  | ||||||
|     NotFoundSyncException, |     NotFoundSyncException, | ||||||
|     ObjectExistsSyncException, |     ObjectExistsSyncException, | ||||||
|     StopSync, |     StopSync, | ||||||
| @ -44,8 +43,6 @@ class GoogleWorkspaceSyncClient[TModel: Model, TConnection: Model, TSchema: dict | |||||||
|             self.domains.append(domain_name) |             self.domains.append(domain_name) | ||||||
|  |  | ||||||
|     def _request(self, request: HttpRequest): |     def _request(self, request: HttpRequest): | ||||||
|         if self.provider.dry_run and request.method.upper() not in SAFE_METHODS: |  | ||||||
|             raise DryRunRejected(request.uri, request.method, request.body) |  | ||||||
|         try: |         try: | ||||||
|             response = request.execute() |             response = request.execute() | ||||||
|         except GoogleAuthError as exc: |         except GoogleAuthError as exc: | ||||||
|  | |||||||
| @ -25,7 +25,7 @@ class GoogleWorkspaceGroupClient( | |||||||
|     """Google client for groups""" |     """Google client for groups""" | ||||||
|  |  | ||||||
|     connection_type = GoogleWorkspaceProviderGroup |     connection_type = GoogleWorkspaceProviderGroup | ||||||
|     connection_attr = "googleworkspaceprovidergroup_set" |     connection_type_query = "group" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: |     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||||
|  | |||||||
| @ -20,7 +20,7 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP | |||||||
|     """Sync authentik users into google workspace""" |     """Sync authentik users into google workspace""" | ||||||
|  |  | ||||||
|     connection_type = GoogleWorkspaceProviderUser |     connection_type = GoogleWorkspaceProviderUser | ||||||
|     connection_attr = "googleworkspaceprovideruser_set" |     connection_type_query = "user" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: |     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||||
|  | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	