Compare commits
	
		
			10 Commits
		
	
	
		
			build-scri
			...
			deny-unaut
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 7d40e00263 | |||
| 42501f6d1e | |||
| 2759b1c089 | |||
| ce6d76babe | |||
| 5cc2bd5b36 | |||
| bad8a8ead5 | |||
| 1f7a2d5194 | |||
| 5e328403d6 | |||
| f03e56af93 | |||
| 516aa9d9b1 | 
| @ -1,5 +1,5 @@ | ||||
| [bumpversion] | ||||
| current_version = 2025.2.4 | ||||
| current_version = 2025.2.1 | ||||
| tag = True | ||||
| commit = True | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||
| @ -17,8 +17,6 @@ optional_value = final | ||||
|  | ||||
| [bumpversion:file:pyproject.toml] | ||||
|  | ||||
| [bumpversion:file:uv.lock] | ||||
|  | ||||
| [bumpversion:file:package.json] | ||||
|  | ||||
| [bumpversion:file:docker-compose.yml] | ||||
|  | ||||
							
								
								
									
										22
									
								
								.github/ISSUE_TEMPLATE/docs_issue.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								.github/ISSUE_TEMPLATE/docs_issue.md
									
									
									
									
										vendored
									
									
								
							| @ -1,22 +0,0 @@ | ||||
| --- | ||||
| name: Documentation issue | ||||
| about: Suggest an improvement or report a problem | ||||
| title: "" | ||||
| labels: documentation | ||||
| assignees: "" | ||||
| --- | ||||
|  | ||||
| **Do you see an area that can be clarified or expanded, a technical inaccuracy, or a broken link? Please describe.** | ||||
| A clear and concise description of what the problem is, or where the document can be improved. Ex. I believe we need more details about [...] | ||||
|  | ||||
| **Provide the URL or link to the exact page in the documentation to which you are referring.** | ||||
| If there are multiple pages, list them all, and be sure to state the header or section where the content is. | ||||
|  | ||||
| **Describe the solution you'd like** | ||||
| A clear and concise description of what you want to happen. | ||||
|  | ||||
| **Additional context** | ||||
| Add any other context or screenshots about the documentation issue here. | ||||
|  | ||||
| **Consider opening a PR!** | ||||
| If the issue is one that you can fix, or even make a good pass at, we'd appreciate a PR. For more information about making a contribution to the docs, and using our Style Guide and our templates, refer to ["Writing documentation"](https://docs.goauthentik.io/docs/developer-docs/docs/writing-documentation). | ||||
| @ -1,5 +1,6 @@ | ||||
| --- | ||||
| name: "Prepare docker environment variables" | ||||
| description: "Prepare docker environment variables" | ||||
|  | ||||
| inputs: | ||||
|   image-name: | ||||
|  | ||||
| @ -44,6 +44,7 @@ if is_release: | ||||
|         ] | ||||
|         if not prerelease: | ||||
|             image_tags += [ | ||||
|                 f"{name}:latest", | ||||
|                 f"{name}:{version_family}", | ||||
|             ] | ||||
| else: | ||||
|  | ||||
							
								
								
									
										22
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,4 +1,4 @@ | ||||
| name: Setup authentik testing environment | ||||
| name: "Setup authentik testing environment" | ||||
| description: "Setup authentik testing environment" | ||||
|  | ||||
| inputs: | ||||
| @ -9,22 +9,17 @@ inputs: | ||||
| runs: | ||||
|   using: "composite" | ||||
|   steps: | ||||
|     - name: Install apt deps | ||||
|     - name: Install poetry & deps | ||||
|       shell: bash | ||||
|       run: | | ||||
|         pipx install poetry || true | ||||
|         sudo apt-get update | ||||
|         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server | ||||
|     - name: Install uv | ||||
|       uses: astral-sh/setup-uv@v5 | ||||
|       with: | ||||
|         enable-cache: true | ||||
|     - name: Setup python | ||||
|     - name: Setup python and restore poetry | ||||
|       uses: actions/setup-python@v5 | ||||
|       with: | ||||
|         python-version-file: "pyproject.toml" | ||||
|     - name: Install Python deps | ||||
|       shell: bash | ||||
|       run: uv sync --all-extras --dev --frozen | ||||
|         cache: "poetry" | ||||
|     - name: Setup node | ||||
|       uses: actions/setup-node@v4 | ||||
|       with: | ||||
| @ -35,18 +30,15 @@ runs: | ||||
|       uses: actions/setup-go@v5 | ||||
|       with: | ||||
|         go-version-file: "go.mod" | ||||
|     - name: Setup docker cache | ||||
|       uses: ScribeMD/docker-cache@0.5.0 | ||||
|       with: | ||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} | ||||
|     - name: Setup dependencies | ||||
|       shell: bash | ||||
|       run: | | ||||
|         export PSQL_TAG=${{ inputs.postgresql_version }} | ||||
|         docker compose -f .github/actions/setup/docker-compose.yml up -d | ||||
|         poetry sync | ||||
|         cd web && npm ci | ||||
|     - name: Generate config | ||||
|       shell: uv run python {0} | ||||
|       shell: poetry run python {0} | ||||
|       run: | | ||||
|         from authentik.lib.generators import generate_id | ||||
|         from yaml import safe_dump | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,7 +11,7 @@ services: | ||||
|       - 5432:5432 | ||||
|     restart: always | ||||
|   redis: | ||||
|     image: docker.io/library/redis:7 | ||||
|     image: docker.io/library/redis | ||||
|     ports: | ||||
|       - 6379:6379 | ||||
|     restart: always | ||||
|  | ||||
							
								
								
									
										33
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										33
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							| @ -1,32 +1,7 @@ | ||||
| akadmin | ||||
| asgi | ||||
| assertIn | ||||
| authentik | ||||
| authn | ||||
| crate | ||||
| docstrings | ||||
| entra | ||||
| goauthentik | ||||
| gunicorn | ||||
| hass | ||||
| jwe | ||||
| jwks | ||||
| keypair | ||||
| keypairs | ||||
| kubernetes | ||||
| oidc | ||||
| ontext | ||||
| openid | ||||
| passwordless | ||||
| plex | ||||
| saml | ||||
| scim | ||||
| singed | ||||
| slo | ||||
| sso | ||||
| totp | ||||
| traefik | ||||
| # https://github.com/codespell-project/codespell/issues/1224 | ||||
| upToDate | ||||
| hass | ||||
| warmup | ||||
| webauthn | ||||
| ontext | ||||
| singed | ||||
| assertIn | ||||
|  | ||||
							
								
								
									
										8
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -82,12 +82,6 @@ updates: | ||||
|       docusaurus: | ||||
|         patterns: | ||||
|           - "@docusaurus/*" | ||||
|       build: | ||||
|         patterns: | ||||
|           - "@swc/*" | ||||
|           - "swc-*" | ||||
|           - "lightningcss*" | ||||
|           - "@rspack/binding*" | ||||
|   - package-ecosystem: npm | ||||
|     directory: "/lifecycle/aws" | ||||
|     schedule: | ||||
| @ -98,7 +92,7 @@ updates: | ||||
|       prefix: "lifecycle/aws:" | ||||
|     labels: | ||||
|       - dependencies | ||||
|   - package-ecosystem: uv | ||||
|   - package-ecosystem: pip | ||||
|     directory: "/" | ||||
|     schedule: | ||||
|       interval: daily | ||||
|  | ||||
| @ -40,9 +40,9 @@ jobs: | ||||
|       attestations: write | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: docker/setup-qemu-action@v3.6.0 | ||||
|       - uses: docker/setup-qemu-action@v3.5.0 | ||||
|       - uses: docker/setup-buildx-action@v3 | ||||
|       - name: Prepare variables | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
| @ -64,12 +64,12 @@ jobs: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Make empty clients | ||||
|       - name: make empty clients | ||||
|         if: ${{ inputs.release }} | ||||
|         run: | | ||||
|           mkdir -p ./gen-ts-api | ||||
|           mkdir -p ./gen-go-api | ||||
|       - name: Generate TypeScript API Client | ||||
|       - name: generate ts client | ||||
|         if: ${{ !inputs.release }} | ||||
|         run: make gen-client-ts | ||||
|       - name: Build Docker Image | ||||
|  | ||||
| @ -49,7 +49,7 @@ jobs: | ||||
|       shouldPush: ${{ steps.ev.outputs.shouldPush }} | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Prepare variables | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
| @ -69,7 +69,7 @@ jobs: | ||||
|         tag: ${{ fromJson(needs.get-tags.outputs.tags) }} | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Prepare variables | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|  | ||||
							
								
								
									
										5
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,7 +7,6 @@ on: | ||||
|   workflow_dispatch: | ||||
| jobs: | ||||
|   build: | ||||
|     name: Build and Publish | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
| @ -15,7 +14,6 @@ jobs: | ||||
|     steps: | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         name: Generate token | ||||
|         with: | ||||
|           app_id: ${{ secrets.GH_APP_ID }} | ||||
|           private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} | ||||
| @ -32,7 +30,8 @@ jobs: | ||||
|         uses: actions/setup-python@v5 | ||||
|         with: | ||||
|           python-version-file: "pyproject.toml" | ||||
|       - name: Generate Python API Client | ||||
|           cache: "poetry" | ||||
|       - name: Generate API Client | ||||
|         run: make gen-client-py | ||||
|       - name: Publish package | ||||
|         working-directory: gen-py-api/ | ||||
|  | ||||
							
								
								
									
										3
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,7 +7,6 @@ on: | ||||
|   workflow_dispatch: | ||||
| jobs: | ||||
|   build: | ||||
|     name: Build and Publish | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
| @ -23,7 +22,7 @@ jobs: | ||||
|         with: | ||||
|           node-version-file: web/package.json | ||||
|           registry-url: "https://registry.npmjs.org" | ||||
|       - name: Generate TypeScript API Client | ||||
|       - name: Generate API Client | ||||
|         run: make gen-client-ts | ||||
|       - name: Publish package | ||||
|         working-directory: gen-ts-api/ | ||||
|  | ||||
							
								
								
									
										4
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							| @ -18,7 +18,6 @@ env: | ||||
|  | ||||
| jobs: | ||||
|   check-changes-applied: | ||||
|     name: Check changes applied | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -34,10 +33,9 @@ jobs: | ||||
|           npm ci | ||||
|       - name: Check changes have been applied | ||||
|         run: | | ||||
|           uv run make aws-cfn | ||||
|           poetry run make aws-cfn | ||||
|           git diff --exit-code | ||||
|   ci-aws-cfn-mark: | ||||
|     name: CI AWS CloudFormation Mark | ||||
|     if: always() | ||||
|     needs: | ||||
|       - check-changes-applied | ||||
|  | ||||
							
								
								
									
										4
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							| @ -9,18 +9,16 @@ on: | ||||
|  | ||||
| jobs: | ||||
|   test-container: | ||||
|     name: Test Container ${{ matrix.version }} | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         version: | ||||
|           - docs | ||||
|           - version-2025-2 | ||||
|           - version-2024-12 | ||||
|           - version-2024-10 | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         name: ${{ matrix.version }} Setup | ||||
|       - run: | | ||||
|           current="$(pwd)" | ||||
|           dir="/tmp/authentik/${{ matrix.version }}" | ||||
|  | ||||
							
								
								
									
										85
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										85
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -19,7 +19,6 @@ env: | ||||
|  | ||||
| jobs: | ||||
|   lint: | ||||
|     name: Lint | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
| @ -34,19 +33,17 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: Run job ${{ matrix.job }} | ||||
|         run: uv run make ci-${{ matrix.job }} | ||||
|       - name: run job | ||||
|         run: poetry run make ci-${{ matrix.job }} | ||||
|   test-migrations: | ||||
|     name: Test Migrations | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: run migrations | ||||
|         run: uv run python -m lifecycle.migrate | ||||
|         run: poetry run python -m lifecycle.migrate | ||||
|   test-make-seed: | ||||
|     name: Test Make Seed | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - id: seed | ||||
| @ -55,7 +52,7 @@ jobs: | ||||
|     outputs: | ||||
|       seed: ${{ steps.seed.outputs.seed }} | ||||
|   test-migrations-from-stable: | ||||
|     name: Test Migrations From Stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||
|     name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 20 | ||||
|     needs: test-make-seed | ||||
| @ -70,40 +67,40 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           fetch-depth: 0 | ||||
|       - name: Checkout Stable | ||||
|       - name: checkout stable | ||||
|         run: | | ||||
|           # Delete all poetry envs | ||||
|           rm -rf /home/runner/.cache/pypoetry | ||||
|           # Copy current, latest config to local | ||||
|           # Temporarly comment the .github backup while migrating to uv | ||||
|           cp authentik/lib/default.yml local.env.yml | ||||
|           # cp -R .github .. | ||||
|           cp -R .github .. | ||||
|           cp -R scripts .. | ||||
|           git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) | ||||
|           # rm -rf .github/ scripts/ | ||||
|           # mv ../.github ../scripts . | ||||
|           rm -rf scripts/ | ||||
|           mv ../scripts . | ||||
|           rm -rf .github/ scripts/ | ||||
|           mv ../.github ../scripts . | ||||
|       - name: Setup authentik env (stable) | ||||
|         uses: ./.github/actions/setup | ||||
|         with: | ||||
|           postgresql_version: ${{ matrix.psql }} | ||||
|         continue-on-error: true | ||||
|       - name: Run migrations to stable | ||||
|       - name: run migrations to stable | ||||
|         run: poetry run python -m lifecycle.migrate | ||||
|       - name: Checkout current code | ||||
|       - name: checkout current code | ||||
|         run: | | ||||
|           set -x | ||||
|           git fetch | ||||
|           git reset --hard HEAD | ||||
|           git clean -d -fx . | ||||
|           git checkout $GITHUB_SHA | ||||
|           # Delete previous poetry env | ||||
|           rm -rf /home/runner/.cache/pypoetry/virtualenvs/* | ||||
|       - name: Setup authentik env (ensure latest deps are installed) | ||||
|         uses: ./.github/actions/setup | ||||
|         with: | ||||
|           postgresql_version: ${{ matrix.psql }} | ||||
|       - name: Migrate to latest | ||||
|       - name: migrate to latest | ||||
|         run: | | ||||
|           uv run python -m lifecycle.migrate | ||||
|       - name: Run tests | ||||
|           poetry run python -m lifecycle.migrate | ||||
|       - name: run tests | ||||
|         env: | ||||
|           # Test in the main database that we just migrated from the previous stable version | ||||
|           AUTHENTIK_POSTGRESQL__TEST__NAME: authentik | ||||
| @ -111,9 +108,9 @@ jobs: | ||||
|           CI_RUN_ID: ${{ matrix.run_id }} | ||||
|           CI_TOTAL_RUNS: "5" | ||||
|         run: | | ||||
|           uv run make ci-test | ||||
|           poetry run make ci-test | ||||
|   test-unittest: | ||||
|     name: Unit tests - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||
|     name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 20 | ||||
|     needs: test-make-seed | ||||
| @ -126,7 +123,7 @@ jobs: | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Setup authentik env (${{ matrix.psql }}) | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|         with: | ||||
|           postgresql_version: ${{ matrix.psql }} | ||||
| @ -136,7 +133,7 @@ jobs: | ||||
|           CI_RUN_ID: ${{ matrix.run_id }} | ||||
|           CI_TOTAL_RUNS: "5" | ||||
|         run: | | ||||
|           uv run make ci-test | ||||
|           poetry run make ci-test | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v5 | ||||
|         with: | ||||
| @ -149,7 +146,6 @@ jobs: | ||||
|           file: unittest.xml | ||||
|           token: ${{ secrets.CODECOV_TOKEN }} | ||||
|   test-integration: | ||||
|     name: Integration tests | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 30 | ||||
|     steps: | ||||
| @ -158,10 +154,10 @@ jobs: | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: Create k8s Kind Cluster | ||||
|         uses: helm/kind-action@v1.12.0 | ||||
|       - name: Run integration | ||||
|       - name: run integration | ||||
|         run: | | ||||
|           uv run coverage run manage.py test tests/integration | ||||
|           uv run coverage xml | ||||
|           poetry run coverage run manage.py test tests/integration | ||||
|           poetry run coverage xml | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v5 | ||||
|         with: | ||||
| @ -174,34 +170,34 @@ jobs: | ||||
|           file: unittest.xml | ||||
|           token: ${{ secrets.CODECOV_TOKEN }} | ||||
|   test-e2e: | ||||
|     name: Test E2E (${{ matrix.job.name }}) | ||||
|     name: test-e2e (${{ matrix.job.name }}) | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 30 | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         job: | ||||
|           - name: Proxy Provider | ||||
|           - name: proxy | ||||
|             glob: tests/e2e/test_provider_proxy* | ||||
|           - name: OAuth2 Provider | ||||
|           - name: oauth | ||||
|             glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth* | ||||
|           - name: OIDC Provider | ||||
|           - name: oauth-oidc | ||||
|             glob: tests/e2e/test_provider_oidc* | ||||
|           - name: SAML Provider | ||||
|           - name: saml | ||||
|             glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml* | ||||
|           - name: LDAP Provider | ||||
|           - name: ldap | ||||
|             glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap* | ||||
|           - name: RADIUS Provider | ||||
|           - name: radius | ||||
|             glob: tests/e2e/test_provider_radius* | ||||
|           - name: SCIM Source | ||||
|           - name: scim | ||||
|             glob: tests/e2e/test_source_scim* | ||||
|           - name: Flows | ||||
|           - name: flows | ||||
|             glob: tests/e2e/test_flows* | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: Setup E2E env (chrome, etc) | ||||
|       - name: Setup e2e env (chrome, etc) | ||||
|         run: | | ||||
|           docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull | ||||
|       - id: cache-web | ||||
| @ -209,17 +205,17 @@ jobs: | ||||
|         with: | ||||
|           path: web/dist | ||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }} | ||||
|       - name: Prepare Web UI | ||||
|       - name: prepare web ui | ||||
|         if: steps.cache-web.outputs.cache-hit != 'true' | ||||
|         working-directory: web | ||||
|         run: | | ||||
|           npm ci | ||||
|           make -C .. gen-client-ts | ||||
|           npm run build | ||||
|       - name: Run E2E tests | ||||
|       - name: run e2e | ||||
|         run: | | ||||
|           uv run coverage run manage.py test ${{ matrix.job.glob }} | ||||
|           uv run coverage xml | ||||
|           poetry run coverage run manage.py test ${{ matrix.job.glob }} | ||||
|           poetry run coverage xml | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v5 | ||||
|         with: | ||||
| @ -232,7 +228,6 @@ jobs: | ||||
|           file: unittest.xml | ||||
|           token: ${{ secrets.CODECOV_TOKEN }} | ||||
|   ci-core-mark: | ||||
|     name: CI Core Mark | ||||
|     if: always() | ||||
|     needs: | ||||
|       - lint | ||||
| @ -247,7 +242,6 @@ jobs: | ||||
|         with: | ||||
|           jobs: ${{ toJSON(needs) }} | ||||
|   build: | ||||
|     name: Build | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       packages: write | ||||
| @ -261,7 +255,6 @@ jobs: | ||||
|       image_name: ghcr.io/goauthentik/dev-server | ||||
|       release: false | ||||
|   pr-comment: | ||||
|     name: PR Comment | ||||
|     needs: | ||||
|       - build | ||||
|     runs-on: ubuntu-latest | ||||
| @ -274,7 +267,7 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Prepare variables | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|  | ||||
							
								
								
									
										20
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										20
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -14,7 +14,6 @@ on: | ||||
|  | ||||
| jobs: | ||||
|   lint-golint: | ||||
|     name: Lint Go | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -27,16 +26,15 @@ jobs: | ||||
|           mkdir -p web/dist | ||||
|           mkdir -p website/help | ||||
|           touch web/dist/test website/help/test | ||||
|       - name: Generate Go API Client | ||||
|       - name: Generate API | ||||
|         run: make gen-client-go | ||||
|       - name: golangci-lint | ||||
|         uses: golangci/golangci-lint-action@v7 | ||||
|         uses: golangci/golangci-lint-action@v6 | ||||
|         with: | ||||
|           version: latest | ||||
|           args: --timeout 5000s --verbose | ||||
|           skip-cache: true | ||||
|   test-unittest: | ||||
|     name: Unit Test Go | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -45,13 +43,12 @@ jobs: | ||||
|           go-version-file: "go.mod" | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: Generate Go API Client | ||||
|       - name: Generate API | ||||
|         run: make gen-client-go | ||||
|       - name: Go unittests | ||||
|         run: | | ||||
|           go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./... | ||||
|   ci-outpost-mark: | ||||
|     name: CI Outpost Mark | ||||
|     if: always() | ||||
|     needs: | ||||
|       - lint-golint | ||||
| @ -62,7 +59,6 @@ jobs: | ||||
|         with: | ||||
|           jobs: ${{ toJSON(needs) }} | ||||
|   build-container: | ||||
|     name: Build Container | ||||
|     timeout-minutes: 120 | ||||
|     needs: | ||||
|       - ci-outpost-mark | ||||
| @ -86,10 +82,10 @@ jobs: | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.6.0 | ||||
|         uses: docker/setup-qemu-action@v3.5.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: Prepare variables | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
| @ -103,7 +99,7 @@ jobs: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Generate Go API Client | ||||
|       - name: Generate API | ||||
|         run: make gen-client-go | ||||
|       - name: Build Docker Image | ||||
|         id: push | ||||
| @ -126,7 +122,6 @@ jobs: | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|   build-binary: | ||||
|     name: Build Binary | ||||
|     timeout-minutes: 120 | ||||
|     needs: | ||||
|       - ci-outpost-mark | ||||
| @ -145,6 +140,7 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - uses: actions/setup-go@v5 | ||||
|         with: | ||||
|           go-version-file: "go.mod" | ||||
|       - uses: actions/setup-node@v4 | ||||
| @ -152,7 +148,7 @@ jobs: | ||||
|           node-version-file: web/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: web/package-lock.json | ||||
|       - name: Generate Go API Client | ||||
|       - name: Generate API | ||||
|         run: make gen-client-go | ||||
|       - name: Build web | ||||
|         working-directory: web/ | ||||
|  | ||||
							
								
								
									
										5
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							| @ -13,7 +13,6 @@ on: | ||||
|  | ||||
| jobs: | ||||
|   lint: | ||||
|     name: Lint | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
| @ -40,13 +39,12 @@ jobs: | ||||
|       - working-directory: ${{ matrix.project }}/ | ||||
|         run: | | ||||
|           npm ci | ||||
|       - name: Generate TypeScript API | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: Lint | ||||
|         working-directory: ${{ matrix.project }}/ | ||||
|         run: npm run ${{ matrix.command }} | ||||
|   build: | ||||
|     name: Build | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -63,7 +61,6 @@ jobs: | ||||
|         working-directory: web/ | ||||
|         run: npm run build | ||||
|   ci-web-mark: | ||||
|     name: CI Web Mark | ||||
|     if: always() | ||||
|     needs: | ||||
|       - build | ||||
|  | ||||
							
								
								
									
										19
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										19
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							| @ -13,7 +13,6 @@ on: | ||||
|  | ||||
| jobs: | ||||
|   lint: | ||||
|     name: Lint | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
| @ -25,11 +24,10 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - working-directory: website/ | ||||
|         run: npm ci | ||||
|       - name: Lint ${{ matrix.command }} | ||||
|       - name: Lint | ||||
|         working-directory: website/ | ||||
|         run: npm run ${{ matrix.command }} | ||||
|   test: | ||||
|     name: Test | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -39,14 +37,18 @@ jobs: | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: website/package-lock.json | ||||
|       - working-directory: website/ | ||||
|         name: Install dependencies | ||||
|         run: npm ci | ||||
|       - name: Documentation test | ||||
|       - name: test | ||||
|         working-directory: website/ | ||||
|         run: npm test | ||||
|   build: | ||||
|     name: Build Docs | ||||
|     runs-on: ubuntu-latest | ||||
|     name: ${{ matrix.job }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         job: | ||||
|           - build | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
| @ -56,11 +58,10 @@ jobs: | ||||
|           cache-dependency-path: website/package-lock.json | ||||
|       - working-directory: website/ | ||||
|         run: npm ci | ||||
|       - name: Build Docusaurus | ||||
|       - name: build | ||||
|         working-directory: website/ | ||||
|         run: npm run build | ||||
|         run: npm run ${{ matrix.job }} | ||||
|   ci-website-mark: | ||||
|     name: Mark CI Website | ||||
|     if: always() | ||||
|     needs: | ||||
|       - lint | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,4 +1,4 @@ | ||||
| name: CodeQL | ||||
| name: "CodeQL" | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|  | ||||
| @ -2,7 +2,7 @@ name: authentik-gen-update-webauthn-mds | ||||
| on: | ||||
|   workflow_dispatch: | ||||
|   schedule: | ||||
|     - cron: "30 1 1,15 * *" | ||||
|     - cron: '30 1 1,15 * *' | ||||
|  | ||||
| env: | ||||
|   POSTGRES_DB: authentik | ||||
| @ -11,7 +11,6 @@ env: | ||||
|  | ||||
| jobs: | ||||
|   build: | ||||
|     name: Update WebAuthn MDS | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
| @ -25,7 +24,7 @@ jobs: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - run: uv run ak update_webauthn_mds | ||||
|       - run: poetry run ak update_webauthn_mds | ||||
|       - uses: peter-evans/create-pull-request@v7 | ||||
|         id: cpr | ||||
|         with: | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/gha-cache-cleanup.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/gha-cache-cleanup.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,7 +12,6 @@ permissions: | ||||
|  | ||||
| jobs: | ||||
|   cleanup: | ||||
|     name: Cleanup Cache | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - name: Check out code | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -20,7 +20,7 @@ on: | ||||
|  | ||||
| jobs: | ||||
|   compress: | ||||
|     name: Compress Docker images | ||||
|     name: compress | ||||
|     runs-on: ubuntu-latest | ||||
|     # Don't run on forks. Token will not be available. Will run on main and open a PR anyway | ||||
|     if: | | ||||
|  | ||||
							
								
								
									
										46
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										46
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,46 +0,0 @@ | ||||
| name: authentik-packages-npm-publish | ||||
| on: | ||||
|   push: | ||||
|     branches: [main] | ||||
|     paths: | ||||
|       - packages/docusaurus-config/** | ||||
|       - packages/eslint-config/** | ||||
|       - packages/prettier-config/** | ||||
|       - packages/tsconfig/** | ||||
|   workflow_dispatch: | ||||
| jobs: | ||||
|   publish: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         package: | ||||
|           - docusaurus-config | ||||
|           - eslint-config | ||||
|           - prettier-config | ||||
|           - tsconfig | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           fetch-depth: 2 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         name: Setup Node.js | ||||
|         with: | ||||
|           node-version-file: packages/${{ matrix.package }}/package.json | ||||
|           registry-url: "https://registry.npmjs.org" | ||||
|       - name: Changed files (${{ matrix.package }}) | ||||
|         id: changed-files | ||||
|         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c | ||||
|         with: | ||||
|           files: | | ||||
|             packages/${{ matrix.package }}/package.json | ||||
|       - name: Publish package (${{ matrix.package }}) | ||||
|         if: steps.changed-files.outputs.any_changed == 'true' | ||||
|         working-directory: packages/${{ matrix.package}} | ||||
|         run: | | ||||
|           npm ci | ||||
|           npm run build | ||||
|           npm publish | ||||
|         env: | ||||
|           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} | ||||
							
								
								
									
										9
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,7 +12,6 @@ env: | ||||
|  | ||||
| jobs: | ||||
|   publish-source-docs: | ||||
|     name: Publish | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 120 | ||||
| @ -20,11 +19,11 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: Generate docs | ||||
|       - name: generate docs | ||||
|         run: | | ||||
|           uv run make migrate | ||||
|           uv run ak build_source_docs | ||||
|       - name: Deploy to Netlify | ||||
|           poetry run make migrate | ||||
|           poetry run ak build_source_docs | ||||
|       - name: Publish | ||||
|         uses: netlify/actions/cli@master | ||||
|         with: | ||||
|           args: deploy --dir=source_docs --prod | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/release-next-branch.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/release-next-branch.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,7 +11,6 @@ permissions: | ||||
|  | ||||
| jobs: | ||||
|   update-next: | ||||
|     name: Update Next Branch | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     environment: internal-production | ||||
|  | ||||
							
								
								
									
										16
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										16
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,7 +7,6 @@ on: | ||||
|  | ||||
| jobs: | ||||
|   build-server: | ||||
|     name: Build server | ||||
|     uses: ./.github/workflows/_reusable-docker-build.yaml | ||||
|     secrets: inherit | ||||
|     permissions: | ||||
| @ -22,7 +21,6 @@ jobs: | ||||
|       registry_dockerhub: true | ||||
|       registry_ghcr: true | ||||
|   build-outpost: | ||||
|     name: Build outpost | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
| @ -44,17 +42,17 @@ jobs: | ||||
|         with: | ||||
|           go-version-file: "go.mod" | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.6.0 | ||||
|         uses: docker/setup-qemu-action@v3.5.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: Prepare variables | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }} | ||||
|       - name: Make empty clients | ||||
|       - name: make empty clients | ||||
|         run: | | ||||
|           mkdir -p ./gen-ts-api | ||||
|           mkdir -p ./gen-go-api | ||||
| @ -87,7 +85,6 @@ jobs: | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|   build-outpost-binary: | ||||
|     name: Build outpost binary | ||||
|     timeout-minutes: 120 | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
| @ -132,7 +129,6 @@ jobs: | ||||
|           asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||
|           tag: ${{ github.ref }} | ||||
|   upload-aws-cfn-template: | ||||
|     name: Upload AWS CloudFormation template | ||||
|     permissions: | ||||
|       # Needed for AWS login | ||||
|       id-token: write | ||||
| @ -154,7 +150,6 @@ jobs: | ||||
|           aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml | ||||
|           aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml | ||||
|   test-release: | ||||
|     name: Test release | ||||
|     needs: | ||||
|       - build-server | ||||
|       - build-outpost | ||||
| @ -171,7 +166,6 @@ jobs: | ||||
|           docker compose start postgresql redis | ||||
|           docker compose run -u root server test-all | ||||
|   sentry-release: | ||||
|     name: Sentry release | ||||
|     needs: | ||||
|       - build-server | ||||
|       - build-outpost | ||||
| @ -179,7 +173,7 @@ jobs: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Prepare variables | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
| @ -192,7 +186,7 @@ jobs: | ||||
|           container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) | ||||
|           docker cp ${container}:web/ . | ||||
|       - name: Create a Sentry.io release | ||||
|         uses: getsentry/action-release@v3 | ||||
|         uses: getsentry/action-release@v1 | ||||
|         continue-on-error: true | ||||
|         env: | ||||
|           SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							| @ -20,7 +20,7 @@ jobs: | ||||
|         with: | ||||
|           app_id: ${{ secrets.GH_APP_ID }} | ||||
|           private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} | ||||
|       - name: Prepare variables | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							| @ -4,7 +4,6 @@ on: [push, delete] | ||||
|  | ||||
| jobs: | ||||
|   to_internal: | ||||
|     name: Mirror to internal repository | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,7 +11,6 @@ permissions: | ||||
|  | ||||
| jobs: | ||||
|   stale: | ||||
|     name: Stale Issues | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|  | ||||
							
								
								
									
										27
									
								
								.github/workflows/semgrep.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								.github/workflows/semgrep.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,27 +0,0 @@ | ||||
| name: authentik-semgrep | ||||
| on: | ||||
|   workflow_dispatch: {} | ||||
|   pull_request: {} | ||||
|   push: | ||||
|     branches: | ||||
|       - main | ||||
|       - master | ||||
|     paths: | ||||
|       - .github/workflows/semgrep.yml | ||||
|   schedule: | ||||
|     # random HH:MM to avoid a load spike on GitHub Actions at 00:00 | ||||
|     - cron: '12 15 * * *' | ||||
| jobs: | ||||
|   semgrep: | ||||
|     name: semgrep/ci | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       contents: read | ||||
|     env: | ||||
|       SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} | ||||
|     container: | ||||
|       image: semgrep/semgrep | ||||
|     if: (github.actor != 'dependabot[bot]') | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - run: semgrep ci | ||||
							
								
								
									
										1
									
								
								.github/workflows/translation-advice.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/translation-advice.yml
									
									
									
									
										vendored
									
									
								
							| @ -16,7 +16,6 @@ permissions: | ||||
|  | ||||
| jobs: | ||||
|   post-comment: | ||||
|     name: Post Comment | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - name: Find Comment | ||||
|  | ||||
| @ -16,7 +16,6 @@ env: | ||||
|  | ||||
| jobs: | ||||
|   compile: | ||||
|     name: Compile Translations | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - id: generate_token | ||||
| @ -33,14 +32,14 @@ jobs: | ||||
|         if: ${{ github.event_name == 'pull_request' }} | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: Generate TypeScript API | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: Extract Translations | ||||
|       - name: run extract | ||||
|         run: | | ||||
|           uv run make i18n-extract | ||||
|       - name: Compile Messages | ||||
|           poetry run make i18n-extract | ||||
|       - name: run compile | ||||
|         run: | | ||||
|           uv run ak compilemessages | ||||
|           poetry run ak compilemessages | ||||
|           make web-check-compile | ||||
|       - name: Create Pull Request | ||||
|         if: ${{ github.event_name != 'pull_request' }} | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,7 +12,6 @@ permissions: | ||||
|  | ||||
| jobs: | ||||
|   rename_pr: | ||||
|     name: Rename PR | ||||
|     runs-on: ubuntu-latest | ||||
|     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} | ||||
|     steps: | ||||
|  | ||||
							
								
								
									
										5
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -11,10 +11,6 @@ local_settings.py | ||||
| db.sqlite3 | ||||
| media | ||||
|  | ||||
| # Node | ||||
|  | ||||
| node_modules | ||||
|  | ||||
| # If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ | ||||
| # in your Git repository. Update and uncomment the following line accordingly. | ||||
| # <django-project-name>/staticfiles/ | ||||
| @ -37,7 +33,6 @@ eggs/ | ||||
| lib64/ | ||||
| parts/ | ||||
| dist/ | ||||
| out/ | ||||
| sdist/ | ||||
| var/ | ||||
| wheels/ | ||||
|  | ||||
| @ -1,47 +0,0 @@ | ||||
| # Prettier Ignorefile | ||||
|  | ||||
| ## Static Files | ||||
| **/LICENSE | ||||
|  | ||||
| authentik/stages/**/* | ||||
|  | ||||
| ## Build asset directories | ||||
| coverage | ||||
| dist | ||||
| out | ||||
| .docusaurus | ||||
| website/docs/developer-docs/api/**/* | ||||
|  | ||||
| ## Environment | ||||
| *.env | ||||
|  | ||||
| ## Secrets | ||||
| *.secrets | ||||
|  | ||||
| ## Yarn | ||||
| .yarn/**/* | ||||
|  | ||||
| ## Node | ||||
| node_modules | ||||
| coverage | ||||
|  | ||||
| ## Configs | ||||
| *.log | ||||
| *.yaml | ||||
| *.yml | ||||
|  | ||||
| # Templates | ||||
| # TODO: Rename affected files to *.template.* or similar. | ||||
| *.html | ||||
| *.mdx | ||||
| *.md | ||||
|  | ||||
| ## Import order matters | ||||
| poly.ts | ||||
| src/locale-codes.ts | ||||
| src/locales/ | ||||
|  | ||||
| # Storybook | ||||
| storybook-static/ | ||||
| .storybook/css-import-maps* | ||||
|  | ||||
							
								
								
									
										22
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -1,4 +1,26 @@ | ||||
| { | ||||
|     "cSpell.words": [ | ||||
|         "akadmin", | ||||
|         "asgi", | ||||
|         "authentik", | ||||
|         "authn", | ||||
|         "entra", | ||||
|         "goauthentik", | ||||
|         "jwe", | ||||
|         "jwks", | ||||
|         "kubernetes", | ||||
|         "oidc", | ||||
|         "openid", | ||||
|         "passwordless", | ||||
|         "plex", | ||||
|         "saml", | ||||
|         "scim", | ||||
|         "slo", | ||||
|         "sso", | ||||
|         "totp", | ||||
|         "traefik", | ||||
|         "webauthn" | ||||
|     ], | ||||
|     "todo-tree.tree.showCountsInTree": true, | ||||
|     "todo-tree.tree.showBadges": true, | ||||
|     "yaml.customTags": [ | ||||
|  | ||||
							
								
								
									
										46
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										46
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @ -3,13 +3,8 @@ | ||||
|     "tasks": [ | ||||
|         { | ||||
|             "label": "authentik/core: make", | ||||
|             "command": "uv", | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make", | ||||
|                 "lint-fix", | ||||
|                 "lint" | ||||
|             ], | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "make", "lint-fix", "lint"], | ||||
|             "presentation": { | ||||
|                 "panel": "new" | ||||
|             }, | ||||
| @ -17,12 +12,8 @@ | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/core: run", | ||||
|             "command": "uv", | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "ak", | ||||
|                 "server" | ||||
|             ], | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "ak", "server"], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
| @ -32,17 +23,13 @@ | ||||
|         { | ||||
|             "label": "authentik/web: make", | ||||
|             "command": "make", | ||||
|             "args": [ | ||||
|                 "web" | ||||
|             ], | ||||
|             "args": ["web"], | ||||
|             "group": "build" | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/web: watch", | ||||
|             "command": "make", | ||||
|             "args": [ | ||||
|                 "web-watch" | ||||
|             ], | ||||
|             "args": ["web-watch"], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
| @ -52,26 +39,19 @@ | ||||
|         { | ||||
|             "label": "authentik: install", | ||||
|             "command": "make", | ||||
|             "args": [ | ||||
|                 "install", | ||||
|                 "-j4" | ||||
|             ], | ||||
|             "args": ["install", "-j4"], | ||||
|             "group": "build" | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/website: make", | ||||
|             "command": "make", | ||||
|             "args": [ | ||||
|                 "website" | ||||
|             ], | ||||
|             "args": ["website"], | ||||
|             "group": "build" | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/website: watch", | ||||
|             "command": "make", | ||||
|             "args": [ | ||||
|                 "website-watch" | ||||
|             ], | ||||
|             "args": ["website-watch"], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
| @ -80,12 +60,8 @@ | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/api: generate", | ||||
|             "command": "uv", | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make", | ||||
|                 "gen" | ||||
|             ], | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "make", "gen"], | ||||
|             "group": "build" | ||||
|         } | ||||
|     ] | ||||
|  | ||||
| @ -10,7 +10,7 @@ schemas/                        @goauthentik/backend | ||||
| scripts/                        @goauthentik/backend | ||||
| tests/                          @goauthentik/backend | ||||
| pyproject.toml                  @goauthentik/backend | ||||
| uv.lock                         @goauthentik/backend | ||||
| poetry.lock                     @goauthentik/backend | ||||
| go.mod                          @goauthentik/backend | ||||
| go.sum                          @goauthentik/backend | ||||
| # Infrastructure | ||||
| @ -23,8 +23,6 @@ docker-compose.yml              @goauthentik/infrastructure | ||||
| Makefile                        @goauthentik/infrastructure | ||||
| .editorconfig                   @goauthentik/infrastructure | ||||
| CODEOWNERS                      @goauthentik/infrastructure | ||||
| # Web packages | ||||
| packages/                       @goauthentik/frontend | ||||
| # Web | ||||
| web/                            @goauthentik/frontend | ||||
| tests/wdio/                     @goauthentik/frontend | ||||
|  | ||||
| @ -5,7 +5,7 @@ | ||||
| We as members, contributors, and leaders pledge to make participation in our | ||||
| community a harassment-free experience for everyone, regardless of age, body | ||||
| size, visible or invisible disability, ethnicity, sex characteristics, gender | ||||
| identity and expression, level of experience, education, socioeconomic status, | ||||
| identity and expression, level of experience, education, socio-economic status, | ||||
| nationality, personal appearance, race, religion, or sexual identity | ||||
| and orientation. | ||||
|  | ||||
|  | ||||
							
								
								
									
										89
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										89
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -43,7 +43,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||
| RUN npm run build | ||||
|  | ||||
| # Stage 3: Build go proxy | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | ||||
| FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder | ||||
|  | ||||
| ARG TARGETOS | ||||
| ARG TARGETARCH | ||||
| @ -76,7 +76,7 @@ COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||
| RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | ||||
|     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ | ||||
|     if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ | ||||
|     CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \ | ||||
|     CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \ | ||||
|     go build -o /go/authentik ./cmd/server | ||||
|  | ||||
| # Stage 4: MaxMind GeoIP | ||||
| @ -93,59 +93,53 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||
|     mkdir -p /usr/share/GeoIP && \ | ||||
|     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|  | ||||
| # Stage 5: Download uv | ||||
| FROM ghcr.io/astral-sh/uv:0.6.17 AS uv | ||||
| # Stage 6: Base python image | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.10-slim-bookworm-fips AS python-base | ||||
|  | ||||
| ENV VENV_PATH="/ak-root/.venv" \ | ||||
|     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ | ||||
|     UV_COMPILE_BYTECODE=1 \ | ||||
|     UV_LINK_MODE=copy \ | ||||
|     UV_NATIVE_TLS=1 \ | ||||
|     UV_PYTHON_DOWNLOADS=0 | ||||
|  | ||||
| WORKDIR /ak-root/ | ||||
|  | ||||
| COPY --from=uv /uv /uvx /bin/ | ||||
|  | ||||
| # Stage 7: Python dependencies | ||||
| FROM python-base AS python-deps | ||||
| # Stage 5: Python dependencies | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps | ||||
|  | ||||
| ARG TARGETARCH | ||||
| ARG TARGETVARIANT | ||||
|  | ||||
| RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache | ||||
| WORKDIR /ak-root/poetry | ||||
|  | ||||
| ENV PATH="/root/.cargo/bin:$PATH" | ||||
| ENV VENV_PATH="/ak-root/venv" \ | ||||
|     POETRY_VIRTUALENVS_CREATE=false \ | ||||
|     PATH="/ak-root/venv/bin:$PATH" | ||||
|  | ||||
| RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache | ||||
|  | ||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | ||||
|     apt-get update && \ | ||||
|     # Required for installing pip packages | ||||
|     apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev | ||||
|  | ||||
| RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||
|     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ | ||||
|     --mount=type=cache,target=/root/.cache/pip \ | ||||
|     --mount=type=cache,target=/root/.cache/pypoetry \ | ||||
|     pip install --no-cache cffi && \ | ||||
|     apt-get update && \ | ||||
|     apt-get install -y --no-install-recommends \ | ||||
|     # Build essentials | ||||
|     build-essential pkg-config libffi-dev git \ | ||||
|     # cryptography | ||||
|     curl \ | ||||
|     # libxml | ||||
|     libxslt-dev zlib1g-dev \ | ||||
|     # postgresql | ||||
|     libpq-dev \ | ||||
|     # python-kadmin-rs | ||||
|     clang libkrb5-dev sccache \ | ||||
|     # xmlsec | ||||
|     libltdl-dev && \ | ||||
|     curl https://sh.rustup.rs -sSf | sh -s -- -y | ||||
|         build-essential libffi-dev \ | ||||
|         # Required for cryptography | ||||
|         curl pkg-config \ | ||||
|         # Required for lxml | ||||
|         libxslt-dev zlib1g-dev \ | ||||
|         # Required for xmlsec | ||||
|         libltdl-dev \ | ||||
|         # Required for kadmin | ||||
|         sccache clang && \ | ||||
|     curl https://sh.rustup.rs -sSf | sh -s -- -y && \ | ||||
|     . "$HOME/.cargo/env" && \ | ||||
|     python -m venv /ak-root/venv/ && \ | ||||
|     bash -c "source ${VENV_PATH}/bin/activate && \ | ||||
|     pip3 install --upgrade pip poetry && \ | ||||
|     poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \ | ||||
|     poetry install --only=main --no-ansi --no-interaction --no-root && \ | ||||
|     pip uninstall cryptography -y && \ | ||||
|     poetry install --only=main --no-ansi --no-interaction --no-root" | ||||
|  | ||||
| ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec" | ||||
|  | ||||
| RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \ | ||||
|     --mount=type=bind,target=uv.lock,src=uv.lock \ | ||||
|     --mount=type=cache,target=/root/.cache/uv \ | ||||
|     uv sync --frozen --no-install-project --no-dev | ||||
|  | ||||
| # Stage 8: Run | ||||
| FROM python-base AS final-image | ||||
| # Stage 6: Run | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image | ||||
|  | ||||
| ARG VERSION | ||||
| ARG GIT_BUILD_HASH | ||||
| @ -177,7 +171,7 @@ RUN apt-get update && \ | ||||
|  | ||||
| COPY ./authentik/ /authentik | ||||
| COPY ./pyproject.toml / | ||||
| COPY ./uv.lock / | ||||
| COPY ./poetry.lock / | ||||
| COPY ./schemas /schemas | ||||
| COPY ./locale /locale | ||||
| COPY ./tests /tests | ||||
| @ -186,7 +180,7 @@ COPY ./blueprints /blueprints | ||||
| COPY ./lifecycle/ /lifecycle | ||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | ||||
| COPY --from=go-builder /go/authentik /bin/authentik | ||||
| COPY --from=python-deps /ak-root/.venv /ak-root/.venv | ||||
| COPY --from=python-deps /ak-root/venv /ak-root/venv | ||||
| COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||
| COPY --from=web-builder /work/web/authentik/ /web/authentik/ | ||||
| COPY --from=website-builder /work/website/build/ /website/help/ | ||||
| @ -197,6 +191,9 @@ USER 1000 | ||||
| ENV TMPDIR=/dev/shm/ \ | ||||
|     PYTHONDONTWRITEBYTECODE=1 \ | ||||
|     PYTHONUNBUFFERED=1 \ | ||||
|     PATH="/ak-root/venv/bin:/lifecycle:$PATH" \ | ||||
|     VENV_PATH="/ak-root/venv" \ | ||||
|     POETRY_VIRTUALENVS_CREATE=false \ | ||||
|     GOFIPS=1 | ||||
|  | ||||
| HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | ||||
|  | ||||
							
								
								
									
										72
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										72
									
								
								Makefile
									
									
									
									
									
								
							| @ -4,17 +4,34 @@ | ||||
| PWD = $(shell pwd) | ||||
| UID = $(shell id -u) | ||||
| GID = $(shell id -g) | ||||
| NPM_VERSION = $(shell python -m scripts.generate_semver) | ||||
| NPM_VERSION = $(shell python -m scripts.npm_version) | ||||
| PY_SOURCES = authentik tests scripts lifecycle .github | ||||
| GO_SOURCES = cmd internal | ||||
| WEB_SOURCES = web/src web/packages | ||||
| DOCKER_IMAGE ?= "authentik:test" | ||||
|  | ||||
| GEN_API_TS = "gen-ts-api" | ||||
| GEN_API_PY = "gen-py-api" | ||||
| GEN_API_GO = "gen-go-api" | ||||
|  | ||||
| pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||
| pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||
| pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null) | ||||
| pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||
| pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||
| pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null) | ||||
|  | ||||
| CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \ | ||||
| 		-I .github/codespell-words.txt \ | ||||
| 		-S 'web/src/locales/**' \ | ||||
| 		-S 'website/docs/developer-docs/api/reference/**' \ | ||||
| 		-S '**/node_modules/**' \ | ||||
| 		-S '**/dist/**' \ | ||||
| 		$(PY_SOURCES) \ | ||||
| 		$(GO_SOURCES) \ | ||||
| 		$(WEB_SOURCES) \ | ||||
| 		website/src \ | ||||
| 		website/blog \ | ||||
| 		website/docs \ | ||||
| 		website/integrations \ | ||||
| 		website/src | ||||
|  | ||||
| all: lint-fix lint test gen web  ## Lint, build, and test everything | ||||
|  | ||||
| @ -32,37 +49,34 @@ go-test: | ||||
| 	go test -timeout 0 -v -race -cover ./... | ||||
|  | ||||
| test: ## Run the server tests and produce a coverage report (locally) | ||||
| 	uv run coverage run manage.py test --keepdb authentik | ||||
| 	uv run coverage html | ||||
| 	uv run coverage report | ||||
| 	coverage run manage.py test --keepdb authentik | ||||
| 	coverage html | ||||
| 	coverage report | ||||
|  | ||||
| lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | ||||
| 	uv run black $(PY_SOURCES) | ||||
| 	uv run ruff check --fix $(PY_SOURCES) | ||||
| 	black $(PY_SOURCES) | ||||
| 	ruff check --fix $(PY_SOURCES) | ||||
|  | ||||
| lint-codespell:  ## Reports spelling errors. | ||||
| 	uv run codespell -w | ||||
| 	codespell -w $(CODESPELL_ARGS) | ||||
|  | ||||
| lint: ## Lint the python and golang sources | ||||
| 	uv run bandit -c pyproject.toml -r $(PY_SOURCES) | ||||
| 	bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules | ||||
| 	golangci-lint run -v | ||||
|  | ||||
| core-install: | ||||
| 	uv sync --frozen | ||||
| 	poetry install | ||||
|  | ||||
| migrate: ## Run the Authentik Django server's migrations | ||||
| 	uv run python -m lifecycle.migrate | ||||
| 	python -m lifecycle.migrate | ||||
|  | ||||
| i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | ||||
|  | ||||
| aws-cfn: | ||||
| 	cd lifecycle/aws && npm run aws-cfn | ||||
|  | ||||
| run:  ## Run the main authentik server process | ||||
| 	uv run ak server | ||||
|  | ||||
| core-i18n-extract: | ||||
| 	uv run ak makemessages \ | ||||
| 	ak makemessages \ | ||||
| 		--add-location file \ | ||||
| 		--no-obsolete \ | ||||
| 		--ignore web \ | ||||
| @ -93,11 +107,11 @@ gen-build:  ## Extract the schema from the database | ||||
| 	AUTHENTIK_DEBUG=true \ | ||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||
| 		uv run ak make_blueprint_schema > blueprints/schema.json | ||||
| 		ak make_blueprint_schema > blueprints/schema.json | ||||
| 	AUTHENTIK_DEBUG=true \ | ||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||
| 		uv run ak spectacular --file schema.yml | ||||
| 		ak spectacular --file schema.yml | ||||
|  | ||||
| gen-changelog:  ## (Release) generate the changelog based from the commits since the last tag | ||||
| 	git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md | ||||
| @ -148,7 +162,7 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||
| 	docker run \ | ||||
| 		--rm -v ${PWD}:/local \ | ||||
| 		--user ${UID}:${GID} \ | ||||
| 		docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \ | ||||
| 		docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \ | ||||
| 		-i /local/schema.yml \ | ||||
| 		-g python \ | ||||
| 		-o /local/${GEN_API_PY} \ | ||||
| @ -176,7 +190,7 @@ gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||
| 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | ||||
|  | ||||
| gen-dev-config:  ## Generate a local development config file | ||||
| 	uv run scripts/generate_config.py | ||||
| 	python -m scripts.generate_config | ||||
|  | ||||
| gen: gen-build gen-client-ts | ||||
|  | ||||
| @ -257,21 +271,21 @@ ci--meta-debug: | ||||
| 	node --version | ||||
|  | ||||
| ci-black: ci--meta-debug | ||||
| 	uv run black --check $(PY_SOURCES) | ||||
| 	black --check $(PY_SOURCES) | ||||
|  | ||||
| ci-ruff: ci--meta-debug | ||||
| 	uv run ruff check $(PY_SOURCES) | ||||
| 	ruff check $(PY_SOURCES) | ||||
|  | ||||
| ci-codespell: ci--meta-debug | ||||
| 	uv run codespell -s | ||||
| 	codespell $(CODESPELL_ARGS) -s | ||||
|  | ||||
| ci-bandit: ci--meta-debug | ||||
| 	uv run bandit -r $(PY_SOURCES) | ||||
| 	bandit -r $(PY_SOURCES) | ||||
|  | ||||
| ci-pending-migrations: ci--meta-debug | ||||
| 	uv run ak makemigrations --check | ||||
| 	ak makemigrations --check | ||||
|  | ||||
| ci-test: ci--meta-debug | ||||
| 	uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | ||||
| 	uv run coverage report | ||||
| 	uv run coverage xml | ||||
| 	coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | ||||
| 	coverage report | ||||
| 	coverage xml | ||||
|  | ||||
| @ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di | ||||
|  | ||||
| ## Independent audits and pentests | ||||
|  | ||||
| We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specific audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security). | ||||
| We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specfic audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security). | ||||
|  | ||||
| ## What authentik classifies as a CVE | ||||
|  | ||||
|  | ||||
| @ -2,7 +2,7 @@ | ||||
|  | ||||
| from os import environ | ||||
|  | ||||
| __version__ = "2025.2.4" | ||||
| __version__ = "2025.2.1" | ||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -59,7 +59,7 @@ class SystemInfoSerializer(PassiveSerializer): | ||||
|             if not isinstance(value, str): | ||||
|                 continue | ||||
|             actual_value = value | ||||
|             if raw_session is not None and raw_session in actual_value: | ||||
|             if raw_session in actual_value: | ||||
|                 actual_value = actual_value.replace( | ||||
|                     raw_session, SafeExceptionReporterFilter.cleansed_substitute | ||||
|                 ) | ||||
|  | ||||
| @ -7,7 +7,7 @@ from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.fields import CharField, DateTimeField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ListSerializer | ||||
| from rest_framework.serializers import ListSerializer, ModelSerializer | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.blueprints.models import BlueprintInstance | ||||
| @ -15,7 +15,7 @@ from authentik.blueprints.v1.importer import Importer | ||||
| from authentik.blueprints.v1.oci import OCI_PREFIX | ||||
| from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer | ||||
| from authentik.core.api.utils import JSONDictField, PassiveSerializer | ||||
| from authentik.rbac.decorators import permission_required | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -36,7 +36,6 @@ from authentik.core.models import ( | ||||
|     GroupSourceConnection, | ||||
|     PropertyMapping, | ||||
|     Provider, | ||||
|     Session, | ||||
|     Source, | ||||
|     User, | ||||
|     UserSourceConnection, | ||||
| @ -109,7 +108,6 @@ def excluded_models() -> list[type[Model]]: | ||||
|         Policy, | ||||
|         PolicyBindingModel, | ||||
|         # Classes that have other dependencies | ||||
|         Session, | ||||
|         AuthenticatedSession, | ||||
|         # Classes which are only internally managed | ||||
|         # FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin | ||||
|  | ||||
| @ -49,8 +49,6 @@ class BrandSerializer(ModelSerializer): | ||||
|             "branding_title", | ||||
|             "branding_logo", | ||||
|             "branding_favicon", | ||||
|             "branding_custom_css", | ||||
|             "branding_default_flow_background", | ||||
|             "flow_authentication", | ||||
|             "flow_invalidation", | ||||
|             "flow_recovery", | ||||
| @ -88,7 +86,6 @@ class CurrentBrandSerializer(PassiveSerializer): | ||||
|     branding_title = CharField() | ||||
|     branding_logo = CharField(source="branding_logo_url") | ||||
|     branding_favicon = CharField(source="branding_favicon_url") | ||||
|     branding_custom_css = CharField() | ||||
|     ui_footer_links = ListField( | ||||
|         child=FooterLinkSerializer(), | ||||
|         read_only=True, | ||||
| @ -128,7 +125,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | ||||
|         "branding_title", | ||||
|         "branding_logo", | ||||
|         "branding_favicon", | ||||
|         "branding_default_flow_background", | ||||
|         "flow_authentication", | ||||
|         "flow_invalidation", | ||||
|         "flow_recovery", | ||||
|  | ||||
| @ -1,35 +0,0 @@ | ||||
| # Generated by Django 5.0.12 on 2025-02-22 01:51 | ||||
|  | ||||
| from pathlib import Path | ||||
| from django.db import migrations, models | ||||
| from django.apps.registry import Apps | ||||
|  | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
|  | ||||
|  | ||||
| def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     Brand = apps.get_model("authentik_brands", "brand") | ||||
|  | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
|     path = Path("/web/dist/custom.css") | ||||
|     if not path.exists(): | ||||
|         return | ||||
|     css = path.read_text() | ||||
|     Brand.objects.using(db_alias).update(branding_custom_css=css) | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_brands", "0007_brand_default_application"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="brand", | ||||
|             name="branding_custom_css", | ||||
|             field=models.TextField(blank=True, default=""), | ||||
|         ), | ||||
|         migrations.RunPython(migrate_custom_css), | ||||
|     ] | ||||
| @ -1,18 +0,0 @@ | ||||
| # Generated by Django 5.0.13 on 2025-03-19 22:54 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_brands", "0008_brand_branding_custom_css"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="brand", | ||||
|             name="branding_default_flow_background", | ||||
|             field=models.TextField(default="/static/dist/assets/images/flow_background.jpg"), | ||||
|         ), | ||||
|     ] | ||||
| @ -33,10 +33,6 @@ class Brand(SerializerModel): | ||||
|  | ||||
|     branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg") | ||||
|     branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png") | ||||
|     branding_custom_css = models.TextField(default="", blank=True) | ||||
|     branding_default_flow_background = models.TextField( | ||||
|         default="/static/dist/assets/images/flow_background.jpg" | ||||
|     ) | ||||
|  | ||||
|     flow_authentication = models.ForeignKey( | ||||
|         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication" | ||||
| @ -88,12 +84,6 @@ class Brand(SerializerModel): | ||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon | ||||
|         return self.branding_favicon | ||||
|  | ||||
|     def branding_default_flow_background_url(self) -> str: | ||||
|         """Get branding_default_flow_background with the correct prefix""" | ||||
|         if self.branding_default_flow_background.startswith("/static"): | ||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_default_flow_background | ||||
|         return self.branding_default_flow_background | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> Serializer: | ||||
|         from authentik.brands.api import BrandSerializer | ||||
|  | ||||
| @ -24,7 +24,6 @@ class TestBrands(APITestCase): | ||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||
|                 "branding_title": "authentik", | ||||
|                 "branding_custom_css": "", | ||||
|                 "matched_domain": brand.domain, | ||||
|                 "ui_footer_links": [], | ||||
|                 "ui_theme": Themes.AUTOMATIC, | ||||
| @ -44,7 +43,6 @@ class TestBrands(APITestCase): | ||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||
|                 "branding_title": "custom", | ||||
|                 "branding_custom_css": "", | ||||
|                 "matched_domain": "bar.baz", | ||||
|                 "ui_footer_links": [], | ||||
|                 "ui_theme": Themes.AUTOMATIC, | ||||
| @ -61,7 +59,6 @@ class TestBrands(APITestCase): | ||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||
|                 "branding_title": "authentik", | ||||
|                 "branding_custom_css": "", | ||||
|                 "matched_domain": "fallback", | ||||
|                 "ui_footer_links": [], | ||||
|                 "ui_theme": Themes.AUTOMATIC, | ||||
| @ -124,27 +121,3 @@ class TestBrands(APITestCase): | ||||
|                 "subject": None, | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_branding_url(self): | ||||
|         """Test branding attributes return correct values""" | ||||
|         brand = create_test_brand() | ||||
|         brand.branding_default_flow_background = "https://goauthentik.io/img/icon.png" | ||||
|         brand.branding_favicon = "https://goauthentik.io/img/icon.png" | ||||
|         brand.branding_logo = "https://goauthentik.io/img/icon.png" | ||||
|         brand.save() | ||||
|         self.assertEqual( | ||||
|             brand.branding_default_flow_background_url(), "https://goauthentik.io/img/icon.png" | ||||
|         ) | ||||
|         self.assertJSONEqual( | ||||
|             self.client.get(reverse("authentik_api:brand-current")).content.decode(), | ||||
|             { | ||||
|                 "branding_logo": "https://goauthentik.io/img/icon.png", | ||||
|                 "branding_favicon": "https://goauthentik.io/img/icon.png", | ||||
|                 "branding_title": "authentik", | ||||
|                 "branding_custom_css": "", | ||||
|                 "matched_domain": brand.domain, | ||||
|                 "ui_footer_links": [], | ||||
|                 "ui_theme": Themes.AUTOMATIC, | ||||
|                 "default_locale": "", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
| @ -46,7 +46,7 @@ LOGGER = get_logger() | ||||
|  | ||||
| def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str: | ||||
|     """Cache key where application list for user is saved""" | ||||
|     key = f"{CACHE_PREFIX}app_access/{user_pk}" | ||||
|     key = f"{CACHE_PREFIX}/app_access/{user_pk}" | ||||
|     if page_number: | ||||
|         key += f"/{page_number}" | ||||
|     return key | ||||
|  | ||||
| @ -5,7 +5,6 @@ from typing import TypedDict | ||||
| from rest_framework import mixins | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.serializers import CharField, DateTimeField, IPAddressField | ||||
| from rest_framework.viewsets import GenericViewSet | ||||
| from ua_parser import user_agent_parser | ||||
|  | ||||
| @ -55,11 +54,6 @@ class UserAgentDict(TypedDict): | ||||
| class AuthenticatedSessionSerializer(ModelSerializer): | ||||
|     """AuthenticatedSession Serializer""" | ||||
|  | ||||
|     expires = DateTimeField(source="session.expires", read_only=True) | ||||
|     last_ip = IPAddressField(source="session.last_ip", read_only=True) | ||||
|     last_user_agent = CharField(source="session.last_user_agent", read_only=True) | ||||
|     last_used = DateTimeField(source="session.last_used", read_only=True) | ||||
|  | ||||
|     current = SerializerMethodField() | ||||
|     user_agent = SerializerMethodField() | ||||
|     geo_ip = SerializerMethodField() | ||||
| @ -68,19 +62,19 @@ class AuthenticatedSessionSerializer(ModelSerializer): | ||||
|     def get_current(self, instance: AuthenticatedSession) -> bool: | ||||
|         """Check if session is currently active session""" | ||||
|         request: Request = self.context["request"] | ||||
|         return request._request.session.session_key == instance.session.session_key | ||||
|         return request._request.session.session_key == instance.session_key | ||||
|  | ||||
|     def get_user_agent(self, instance: AuthenticatedSession) -> UserAgentDict: | ||||
|         """Get parsed user agent""" | ||||
|         return user_agent_parser.Parse(instance.session.last_user_agent) | ||||
|         return user_agent_parser.Parse(instance.last_user_agent) | ||||
|  | ||||
|     def get_geo_ip(self, instance: AuthenticatedSession) -> GeoIPDict | None:  # pragma: no cover | ||||
|         """Get GeoIP Data""" | ||||
|         return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.session.last_ip) | ||||
|         return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.last_ip) | ||||
|  | ||||
|     def get_asn(self, instance: AuthenticatedSession) -> ASNDict | None:  # pragma: no cover | ||||
|         """Get ASN Data""" | ||||
|         return ASN_CONTEXT_PROCESSOR.asn_dict(instance.session.last_ip) | ||||
|         return ASN_CONTEXT_PROCESSOR.asn_dict(instance.last_ip) | ||||
|  | ||||
|     class Meta: | ||||
|         model = AuthenticatedSession | ||||
| @ -96,7 +90,6 @@ class AuthenticatedSessionSerializer(ModelSerializer): | ||||
|             "last_used", | ||||
|             "expires", | ||||
|         ] | ||||
|         extra_args = {"uuid": {"read_only": True}} | ||||
|  | ||||
|  | ||||
| class AuthenticatedSessionViewSet( | ||||
| @ -108,10 +101,9 @@ class AuthenticatedSessionViewSet( | ||||
| ): | ||||
|     """AuthenticatedSession Viewset""" | ||||
|  | ||||
|     lookup_field = "uuid" | ||||
|     queryset = AuthenticatedSession.objects.select_related("session").all() | ||||
|     queryset = AuthenticatedSession.objects.all() | ||||
|     serializer_class = AuthenticatedSessionSerializer | ||||
|     search_fields = ["user__username", "session__last_ip", "session__last_user_agent"] | ||||
|     filterset_fields = ["user__username", "session__last_ip", "session__last_user_agent"] | ||||
|     search_fields = ["user__username", "last_ip", "last_user_agent"] | ||||
|     filterset_fields = ["user__username", "last_ip", "last_user_agent"] | ||||
|     ordering = ["user__username"] | ||||
|     owner_field = "user" | ||||
|  | ||||
| @ -5,7 +5,6 @@ from collections.abc import Iterable | ||||
| from drf_spectacular.utils import OpenApiResponse, extend_schema | ||||
| from rest_framework import mixins | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField | ||||
| from rest_framework.parsers import MultiPartParser | ||||
| from rest_framework.request import Request | ||||
| @ -155,17 +154,6 @@ class SourceViewSet( | ||||
|             matching_sources.append(source_settings.validated_data) | ||||
|         return Response(matching_sources) | ||||
|  | ||||
|     def destroy(self, request: Request, *args, **kwargs): | ||||
|         """Prevent deletion of built-in sources""" | ||||
|         instance: Source = self.get_object() | ||||
|  | ||||
|         if instance.managed == Source.MANAGED_INBUILT: | ||||
|             raise ValidationError( | ||||
|                 {"detail": "Built-in sources cannot be deleted"}, code="protected" | ||||
|             ) | ||||
|  | ||||
|         return super().destroy(request, *args, **kwargs) | ||||
|  | ||||
|  | ||||
| class UserSourceConnectionSerializer(SourceSerializer): | ||||
|     """User source connection""" | ||||
| @ -179,13 +167,10 @@ class UserSourceConnectionSerializer(SourceSerializer): | ||||
|             "user", | ||||
|             "source", | ||||
|             "source_obj", | ||||
|             "identifier", | ||||
|             "created", | ||||
|             "last_updated", | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
|             "created": {"read_only": True}, | ||||
|             "last_updated": {"read_only": True}, | ||||
|         } | ||||
|  | ||||
|  | ||||
| @ -202,7 +187,7 @@ class UserSourceConnectionViewSet( | ||||
|     queryset = UserSourceConnection.objects.all() | ||||
|     serializer_class = UserSourceConnectionSerializer | ||||
|     filterset_fields = ["user", "source__slug"] | ||||
|     search_fields = ["user__username", "source__slug", "identifier"] | ||||
|     search_fields = ["source__slug"] | ||||
|     ordering = ["source__slug", "pk"] | ||||
|     owner_field = "user" | ||||
|  | ||||
| @ -221,11 +206,9 @@ class GroupSourceConnectionSerializer(SourceSerializer): | ||||
|             "source_obj", | ||||
|             "identifier", | ||||
|             "created", | ||||
|             "last_updated", | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
|             "created": {"read_only": True}, | ||||
|             "last_updated": {"read_only": True}, | ||||
|         } | ||||
|  | ||||
|  | ||||
| @ -242,5 +225,6 @@ class GroupSourceConnectionViewSet( | ||||
|     queryset = GroupSourceConnection.objects.all() | ||||
|     serializer_class = GroupSourceConnectionSerializer | ||||
|     filterset_fields = ["group", "source__slug"] | ||||
|     search_fields = ["group__name", "source__slug", "identifier"] | ||||
|     search_fields = ["source__slug"] | ||||
|     ordering = ["source__slug", "pk"] | ||||
|     owner_field = "user" | ||||
|  | ||||
| @ -6,6 +6,8 @@ from typing import Any | ||||
|  | ||||
| from django.contrib.auth import update_session_auth_hash | ||||
| from django.contrib.auth.models import Permission | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.core.cache import cache | ||||
| from django.db.models.functions import ExtractHour | ||||
| from django.db.transaction import atomic | ||||
| from django.db.utils import IntegrityError | ||||
| @ -69,8 +71,8 @@ from authentik.core.middleware import ( | ||||
| from authentik.core.models import ( | ||||
|     USER_ATTRIBUTE_TOKEN_EXPIRING, | ||||
|     USER_PATH_SERVICE_ACCOUNT, | ||||
|     AuthenticatedSession, | ||||
|     Group, | ||||
|     Session, | ||||
|     Token, | ||||
|     TokenIntents, | ||||
|     User, | ||||
| @ -224,7 +226,6 @@ class UserSerializer(ModelSerializer): | ||||
|             "name", | ||||
|             "is_active", | ||||
|             "last_login", | ||||
|             "date_joined", | ||||
|             "is_superuser", | ||||
|             "groups", | ||||
|             "groups_obj", | ||||
| @ -239,7 +240,6 @@ class UserSerializer(ModelSerializer): | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
|             "name": {"allow_blank": True}, | ||||
|             "date_joined": {"read_only": True}, | ||||
|             "password_change_date": {"read_only": True}, | ||||
|         } | ||||
|  | ||||
| @ -373,7 +373,7 @@ class UsersFilter(FilterSet): | ||||
|         method="filter_attributes", | ||||
|     ) | ||||
|  | ||||
|     is_superuser = BooleanFilter(field_name="ak_groups", method="filter_is_superuser") | ||||
|     is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser") | ||||
|     uuid = UUIDFilter(field_name="uuid") | ||||
|  | ||||
|     path = CharFilter(field_name="path") | ||||
| @ -391,11 +391,6 @@ class UsersFilter(FilterSet): | ||||
|         queryset=Group.objects.all().order_by("name"), | ||||
|     ) | ||||
|  | ||||
|     def filter_is_superuser(self, queryset, name, value): | ||||
|         if value: | ||||
|             return queryset.filter(ak_groups__is_superuser=True).distinct() | ||||
|         return queryset.exclude(ak_groups__is_superuser=True).distinct() | ||||
|  | ||||
|     def filter_attributes(self, queryset, name, value): | ||||
|         """Filter attributes by query args""" | ||||
|         try: | ||||
| @ -772,6 +767,9 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|         response = super().partial_update(request, *args, **kwargs) | ||||
|         instance: User = self.get_object() | ||||
|         if not instance.is_active: | ||||
|             Session.objects.filter(authenticatedsession__user=instance).delete() | ||||
|             sessions = AuthenticatedSession.objects.filter(user=instance) | ||||
|             session_ids = sessions.values_list("session_key", flat=True) | ||||
|             cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids) | ||||
|             sessions.delete() | ||||
|             LOGGER.debug("Deleted user's sessions", user=instance.username) | ||||
|         return response | ||||
|  | ||||
| @ -20,8 +20,6 @@ from rest_framework.serializers import ( | ||||
|     raise_errors_on_nested_writes, | ||||
| ) | ||||
|  | ||||
| from authentik.rbac.permissions import assign_initial_permissions | ||||
|  | ||||
|  | ||||
| def is_dict(value: Any): | ||||
|     """Ensure a value is a dictionary, useful for JSONFields""" | ||||
| @ -31,14 +29,6 @@ def is_dict(value: Any): | ||||
|  | ||||
|  | ||||
| class ModelSerializer(BaseModelSerializer): | ||||
|     def create(self, validated_data): | ||||
|         instance = super().create(validated_data) | ||||
|  | ||||
|         request = self.context.get("request") | ||||
|         if request and hasattr(request, "user") and not request.user.is_anonymous: | ||||
|             assign_initial_permissions(request.user, instance) | ||||
|  | ||||
|         return instance | ||||
|  | ||||
|     def update(self, instance: Model, validated_data): | ||||
|         raise_errors_on_nested_writes("update", self, validated_data) | ||||
|  | ||||
| @ -32,5 +32,5 @@ class AuthentikCoreConfig(ManagedAppConfig): | ||||
|                 "name": "authentik Built-in", | ||||
|                 "slug": "authentik-built-in", | ||||
|             }, | ||||
|             managed=Source.MANAGED_INBUILT, | ||||
|             managed="goauthentik.io/sources/inbuilt", | ||||
|         ) | ||||
|  | ||||
| @ -24,15 +24,6 @@ class InbuiltBackend(ModelBackend): | ||||
|         self.set_method("password", request) | ||||
|         return user | ||||
|  | ||||
|     async def aauthenticate( | ||||
|         self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any | ||||
|     ) -> User | None: | ||||
|         user = await super().aauthenticate(request, username=username, password=password, **kwargs) | ||||
|         if not user: | ||||
|             return None | ||||
|         self.set_method("password", request) | ||||
|         return user | ||||
|  | ||||
|     def set_method(self, method: str, request: HttpRequest | None, **kwargs): | ||||
|         """Set method data on current flow, if possbiel""" | ||||
|         if not request: | ||||
|  | ||||
| @ -1,15 +0,0 @@ | ||||
| """Change user type""" | ||||
|  | ||||
| from importlib import import_module | ||||
|  | ||||
| from django.conf import settings | ||||
|  | ||||
| from authentik.tenants.management import TenantCommand | ||||
|  | ||||
|  | ||||
| class Command(TenantCommand): | ||||
|     """Delete all sessions""" | ||||
|  | ||||
|     def handle_per_tenant(self, **options): | ||||
|         engine = import_module(settings.SESSION_ENGINE) | ||||
|         engine.SessionStore.clear_expired() | ||||
| @ -2,14 +2,9 @@ | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from contextvars import ContextVar | ||||
| from functools import partial | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from django.contrib.auth.models import AnonymousUser | ||||
| from django.core.exceptions import ImproperlyConfigured | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.utils.deprecation import MiddlewareMixin | ||||
| from django.utils.functional import SimpleLazyObject | ||||
| from django.utils.translation import override | ||||
| from sentry_sdk.api import set_tag | ||||
| from structlog.contextvars import STRUCTLOG_KEY_PREFIX | ||||
| @ -25,40 +20,6 @@ CTX_HOST = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + "host", default=None) | ||||
| CTX_AUTH_VIA = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None) | ||||
|  | ||||
|  | ||||
| def get_user(request): | ||||
|     if not hasattr(request, "_cached_user"): | ||||
|         user = None | ||||
|         if (authenticated_session := request.session.get("authenticatedsession", None)) is not None: | ||||
|             user = authenticated_session.user | ||||
|         request._cached_user = user or AnonymousUser() | ||||
|     return request._cached_user | ||||
|  | ||||
|  | ||||
| async def aget_user(request): | ||||
|     if not hasattr(request, "_cached_user"): | ||||
|         user = None | ||||
|         if ( | ||||
|             authenticated_session := await request.session.aget("authenticatedsession", None) | ||||
|         ) is not None: | ||||
|             user = authenticated_session.user | ||||
|         request._cached_user = user or AnonymousUser() | ||||
|     return request._cached_user | ||||
|  | ||||
|  | ||||
| class AuthenticationMiddleware(MiddlewareMixin): | ||||
|     def process_request(self, request): | ||||
|         if not hasattr(request, "session"): | ||||
|             raise ImproperlyConfigured( | ||||
|                 "The Django authentication middleware requires session " | ||||
|                 "middleware to be installed. Edit your MIDDLEWARE setting to " | ||||
|                 "insert " | ||||
|                 "'authentik.root.middleware.SessionMiddleware' before " | ||||
|                 "'authentik.core.middleware.AuthenticationMiddleware'." | ||||
|             ) | ||||
|         request.user = SimpleLazyObject(lambda: get_user(request)) | ||||
|         request.auser = partial(aget_user, request) | ||||
|  | ||||
|  | ||||
| class ImpersonateMiddleware: | ||||
|     """Middleware to impersonate users""" | ||||
|  | ||||
|  | ||||
| @ -1,19 +0,0 @@ | ||||
| # Generated by Django 5.0.13 on 2025-04-07 14:04 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0043_alter_group_options"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="usersourceconnection", | ||||
|             name="new_identifier", | ||||
|             field=models.TextField(default=""), | ||||
|             preserve_default=False, | ||||
|         ), | ||||
|     ] | ||||
| @ -1,30 +0,0 @@ | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0044_usersourceconnection_new_identifier"), | ||||
|         ("authentik_sources_kerberos", "0003_migrate_userkerberossourceconnection_identifier"), | ||||
|         ("authentik_sources_oauth", "0009_migrate_useroauthsourceconnection_identifier"), | ||||
|         ("authentik_sources_plex", "0005_migrate_userplexsourceconnection_identifier"), | ||||
|         ("authentik_sources_saml", "0019_migrate_usersamlsourceconnection_identifier"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RenameField( | ||||
|             model_name="usersourceconnection", | ||||
|             old_name="new_identifier", | ||||
|             new_name="identifier", | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="usersourceconnection", | ||||
|             index=models.Index(fields=["identifier"], name="authentik_c_identif_59226f_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="usersourceconnection", | ||||
|             index=models.Index( | ||||
|                 fields=["source", "identifier"], name="authentik_c_source__649e04_idx" | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,238 +0,0 @@ | ||||
| # Generated by Django 5.0.11 on 2025-01-27 12:58 | ||||
|  | ||||
| import uuid | ||||
| import pickle  # nosec | ||||
| from django.core import signing | ||||
| from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.conf import settings | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.utils.timezone import now, timedelta | ||||
| from authentik.lib.migrations import progress_bar | ||||
| from authentik.root.middleware import ClientIPMiddleware | ||||
|  | ||||
|  | ||||
| SESSION_CACHE_ALIAS = "default" | ||||
|  | ||||
|  | ||||
| class PickleSerializer: | ||||
|     """ | ||||
|     Simple wrapper around pickle to be used in signing.dumps()/loads() and | ||||
|     cache backends. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, protocol=None): | ||||
|         self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol | ||||
|  | ||||
|     def dumps(self, obj): | ||||
|         """Pickle data to be stored in redis""" | ||||
|         return pickle.dumps(obj, self.protocol) | ||||
|  | ||||
|     def loads(self, data): | ||||
|         """Unpickle data to be loaded from redis""" | ||||
|         return pickle.loads(data)  # nosec | ||||
|  | ||||
|  | ||||
| def _migrate_session( | ||||
|     apps, | ||||
|     db_alias, | ||||
|     session_key, | ||||
|     session_data, | ||||
|     expires, | ||||
| ): | ||||
|     Session = apps.get_model("authentik_core", "Session") | ||||
|     OldAuthenticatedSession = apps.get_model("authentik_core", "OldAuthenticatedSession") | ||||
|     AuthenticatedSession = apps.get_model("authentik_core", "AuthenticatedSession") | ||||
|  | ||||
|     old_auth_session = ( | ||||
|         OldAuthenticatedSession.objects.using(db_alias).filter(session_key=session_key).first() | ||||
|     ) | ||||
|  | ||||
|     args = { | ||||
|         "session_key": session_key, | ||||
|         "expires": expires, | ||||
|         "last_ip": ClientIPMiddleware.default_ip, | ||||
|         "last_user_agent": "", | ||||
|         "session_data": {}, | ||||
|     } | ||||
|     for k, v in session_data.items(): | ||||
|         if k == "authentik/stages/user_login/last_ip": | ||||
|             args["last_ip"] = v | ||||
|         elif k in ["last_user_agent", "last_used"]: | ||||
|             args[k] = v | ||||
|         elif args in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY]: | ||||
|             pass | ||||
|         else: | ||||
|             args["session_data"][k] = v | ||||
|     if old_auth_session: | ||||
|         args["last_user_agent"] = old_auth_session.last_user_agent | ||||
|         args["last_used"] = old_auth_session.last_used | ||||
|  | ||||
|     args["session_data"] = pickle.dumps(args["session_data"]) | ||||
|     session = Session.objects.using(db_alias).create(**args) | ||||
|  | ||||
|     if old_auth_session: | ||||
|         AuthenticatedSession.objects.using(db_alias).create( | ||||
|             session=session, | ||||
|             user=old_auth_session.user, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def migrate_redis_sessions(apps, schema_editor): | ||||
|     from django.core.cache import caches | ||||
|  | ||||
|     db_alias = schema_editor.connection.alias | ||||
|     cache = caches[SESSION_CACHE_ALIAS] | ||||
|  | ||||
|     # Not a redis cache, skipping | ||||
|     if not hasattr(cache, "keys"): | ||||
|         return | ||||
|  | ||||
|     print("\nMigrating Redis sessions to database, this might take a couple of minutes...") | ||||
|     for key, session_data in progress_bar(cache.get_many(cache.keys(f"{KEY_PREFIX}*")).items()): | ||||
|         _migrate_session( | ||||
|             apps=apps, | ||||
|             db_alias=db_alias, | ||||
|             session_key=key.removeprefix(KEY_PREFIX), | ||||
|             session_data=session_data, | ||||
|             expires=now() + timedelta(seconds=cache.ttl(key)), | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def migrate_database_sessions(apps, schema_editor): | ||||
|     DjangoSession = apps.get_model("sessions", "Session") | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
|     print("\nMigration database sessions, this might take a couple of minutes...") | ||||
|     for django_session in progress_bar(DjangoSession.objects.using(db_alias).all()): | ||||
|         session_data = signing.loads( | ||||
|             django_session.session_data, | ||||
|             salt="django.contrib.sessions.SessionStore", | ||||
|             serializer=PickleSerializer, | ||||
|         ) | ||||
|         _migrate_session( | ||||
|             apps=apps, | ||||
|             db_alias=db_alias, | ||||
|             session_key=django_session.session_key, | ||||
|             session_data=session_data, | ||||
|             expires=django_session.expire_date, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("sessions", "0001_initial"), | ||||
|         ("authentik_core", "0045_rename_new_identifier_usersourceconnection_identifier_and_more"), | ||||
|         ("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"), | ||||
|         ("authentik_providers_rac", "0006_connectiontoken_authentik_p_expires_91f148_idx_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         # Rename AuthenticatedSession to OldAuthenticatedSession | ||||
|         migrations.RenameModel( | ||||
|             old_name="AuthenticatedSession", | ||||
|             new_name="OldAuthenticatedSession", | ||||
|         ), | ||||
|         migrations.RenameIndex( | ||||
|             model_name="oldauthenticatedsession", | ||||
|             new_name="authentik_c_expires_cf4f72_idx", | ||||
|             old_name="authentik_c_expires_08251d_idx", | ||||
|         ), | ||||
|         migrations.RenameIndex( | ||||
|             model_name="oldauthenticatedsession", | ||||
|             new_name="authentik_c_expirin_c1f17f_idx", | ||||
|             old_name="authentik_c_expirin_9cd839_idx", | ||||
|         ), | ||||
|         migrations.RenameIndex( | ||||
|             model_name="oldauthenticatedsession", | ||||
|             new_name="authentik_c_expirin_e04f5d_idx", | ||||
|             old_name="authentik_c_expirin_195a84_idx", | ||||
|         ), | ||||
|         migrations.RenameIndex( | ||||
|             model_name="oldauthenticatedsession", | ||||
|             new_name="authentik_c_session_a44819_idx", | ||||
|             old_name="authentik_c_session_d0f005_idx", | ||||
|         ), | ||||
|         migrations.RunSQL( | ||||
|             sql="ALTER INDEX authentik_core_authenticatedsession_user_id_5055b6cf RENAME TO authentik_core_oldauthenticatedsession_user_id_5055b6cf", | ||||
|             reverse_sql="ALTER INDEX authentik_core_oldauthenticatedsession_user_id_5055b6cf RENAME TO authentik_core_authenticatedsession_user_id_5055b6cf", | ||||
|         ), | ||||
|         # Create new Session and AuthenticatedSession models | ||||
|         migrations.CreateModel( | ||||
|             name="Session", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "session_key", | ||||
|                     models.CharField( | ||||
|                         max_length=40, primary_key=True, serialize=False, verbose_name="session key" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("expires", models.DateTimeField(default=None, null=True)), | ||||
|                 ("expiring", models.BooleanField(default=True)), | ||||
|                 ("session_data", models.BinaryField(verbose_name="session data")), | ||||
|                 ("last_ip", models.GenericIPAddressField()), | ||||
|                 ("last_user_agent", models.TextField(blank=True)), | ||||
|                 ("last_used", models.DateTimeField(auto_now=True)), | ||||
|             ], | ||||
|             options={ | ||||
|                 "default_permissions": [], | ||||
|                 "verbose_name": "Session", | ||||
|                 "verbose_name_plural": "Sessions", | ||||
|             }, | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="session", | ||||
|             index=models.Index(fields=["expires"], name="authentik_c_expires_d2f607_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="session", | ||||
|             index=models.Index(fields=["expiring"], name="authentik_c_expirin_7c2cfb_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="session", | ||||
|             index=models.Index( | ||||
|                 fields=["expiring", "expires"], name="authentik_c_expirin_1ab2e4_idx" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="session", | ||||
|             index=models.Index( | ||||
|                 fields=["expires", "session_key"], name="authentik_c_expires_c49143_idx" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="AuthenticatedSession", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "session", | ||||
|                     models.OneToOneField( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         primary_key=True, | ||||
|                         serialize=False, | ||||
|                         to="authentik_core.session", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("uuid", models.UUIDField(default=uuid.uuid4, unique=True)), | ||||
|                 ( | ||||
|                     "user", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "Authenticated Session", | ||||
|                 "verbose_name_plural": "Authenticated Sessions", | ||||
|             }, | ||||
|         ), | ||||
|         migrations.RunPython( | ||||
|             code=migrate_redis_sessions, | ||||
|             reverse_code=migrations.RunPython.noop, | ||||
|         ), | ||||
|         migrations.RunPython( | ||||
|             code=migrate_database_sessions, | ||||
|             reverse_code=migrations.RunPython.noop, | ||||
|         ), | ||||
|     ] | ||||
| @ -1,18 +0,0 @@ | ||||
| # Generated by Django 5.0.11 on 2025-01-27 13:02 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0046_session_and_more"), | ||||
|         ("authentik_providers_rac", "0007_migrate_session"), | ||||
|         ("authentik_providers_oauth2", "0028_migrate_session"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.DeleteModel( | ||||
|             name="OldAuthenticatedSession", | ||||
|         ), | ||||
|     ] | ||||
| @ -1,7 +1,6 @@ | ||||
| """authentik core models""" | ||||
|  | ||||
| from datetime import datetime | ||||
| from enum import StrEnum | ||||
| from hashlib import sha256 | ||||
| from typing import Any, Optional, Self | ||||
| from uuid import uuid4 | ||||
| @ -10,7 +9,6 @@ from deepmerge import always_merger | ||||
| from django.contrib.auth.hashers import check_password | ||||
| from django.contrib.auth.models import AbstractUser | ||||
| from django.contrib.auth.models import UserManager as DjangoUserManager | ||||
| from django.contrib.sessions.base_session import AbstractBaseSession | ||||
| from django.db import models | ||||
| from django.db.models import Q, QuerySet, options | ||||
| from django.db.models.constants import LOOKUP_SEP | ||||
| @ -648,30 +646,19 @@ class SourceUserMatchingModes(models.TextChoices): | ||||
|     """Different modes a source can handle new/returning users""" | ||||
|  | ||||
|     IDENTIFIER = "identifier", _("Use the source-specific identifier") | ||||
|     EMAIL_LINK = ( | ||||
|         "email_link", | ||||
|         _( | ||||
|             "Link to a user with identical email address. Can have security implications " | ||||
|             "when a source doesn't validate email addresses." | ||||
|         ), | ||||
|     EMAIL_LINK = "email_link", _( | ||||
|         "Link to a user with identical email address. Can have security implications " | ||||
|         "when a source doesn't validate email addresses." | ||||
|     ) | ||||
|     EMAIL_DENY = ( | ||||
|         "email_deny", | ||||
|         _( | ||||
|             "Use the user's email address, but deny enrollment when the email address already " | ||||
|             "exists." | ||||
|         ), | ||||
|     EMAIL_DENY = "email_deny", _( | ||||
|         "Use the user's email address, but deny enrollment when the email address already exists." | ||||
|     ) | ||||
|     USERNAME_LINK = ( | ||||
|         "username_link", | ||||
|         _( | ||||
|             "Link to a user with identical username. Can have security implications " | ||||
|             "when a username is used with another source." | ||||
|         ), | ||||
|     USERNAME_LINK = "username_link", _( | ||||
|         "Link to a user with identical username. Can have security implications " | ||||
|         "when a username is used with another source." | ||||
|     ) | ||||
|     USERNAME_DENY = ( | ||||
|         "username_deny", | ||||
|         _("Use the user's username, but deny enrollment when the username already exists."), | ||||
|     USERNAME_DENY = "username_deny", _( | ||||
|         "Use the user's username, but deny enrollment when the username already exists." | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @ -679,24 +666,18 @@ class SourceGroupMatchingModes(models.TextChoices): | ||||
|     """Different modes a source can handle new/returning groups""" | ||||
|  | ||||
|     IDENTIFIER = "identifier", _("Use the source-specific identifier") | ||||
|     NAME_LINK = ( | ||||
|         "name_link", | ||||
|         _( | ||||
|             "Link to a group with identical name. Can have security implications " | ||||
|             "when a group name is used with another source." | ||||
|         ), | ||||
|     NAME_LINK = "name_link", _( | ||||
|         "Link to a group with identical name. Can have security implications " | ||||
|         "when a group name is used with another source." | ||||
|     ) | ||||
|     NAME_DENY = ( | ||||
|         "name_deny", | ||||
|         _("Use the group name, but deny enrollment when the name already exists."), | ||||
|     NAME_DENY = "name_deny", _( | ||||
|         "Use the group name, but deny enrollment when the name already exists." | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|     """Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server""" | ||||
|  | ||||
|     MANAGED_INBUILT = "goauthentik.io/sources/inbuilt" | ||||
|  | ||||
|     name = models.TextField(help_text=_("Source's display Name.")) | ||||
|     slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True) | ||||
|  | ||||
| @ -747,7 +728,8 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|         choices=SourceGroupMatchingModes.choices, | ||||
|         default=SourceGroupMatchingModes.IDENTIFIER, | ||||
|         help_text=_( | ||||
|             "How the source determines if an existing group should be used or a new group created." | ||||
|             "How the source determines if an existing group should be used or " | ||||
|             "a new group created." | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
| @ -777,17 +759,11 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         """Return component used to edit this object""" | ||||
|         if self.managed == self.MANAGED_INBUILT: | ||||
|             return "" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     @property | ||||
|     def property_mapping_type(self) -> "type[PropertyMapping]": | ||||
|         """Return property mapping type used by this object""" | ||||
|         if self.managed == self.MANAGED_INBUILT: | ||||
|             from authentik.core.models import PropertyMapping | ||||
|  | ||||
|             return PropertyMapping | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def ui_login_button(self, request: HttpRequest) -> UILoginButton | None: | ||||
| @ -802,14 +778,10 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|  | ||||
|     def get_base_user_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: | ||||
|         """Get base properties for a user to build final properties upon.""" | ||||
|         if self.managed == self.MANAGED_INBUILT: | ||||
|             return {} | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def get_base_group_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: | ||||
|         """Get base properties for a group to build final properties upon.""" | ||||
|         if self.managed == self.MANAGED_INBUILT: | ||||
|             return {} | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def __str__(self): | ||||
| @ -840,7 +812,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||
|  | ||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||
|     source = models.ForeignKey(Source, on_delete=models.CASCADE) | ||||
|     identifier = models.TextField() | ||||
|  | ||||
|     objects = InheritanceManager() | ||||
|  | ||||
| @ -854,10 +825,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||
|  | ||||
|     class Meta: | ||||
|         unique_together = (("user", "source"),) | ||||
|         indexes = ( | ||||
|             models.Index(fields=("identifier",)), | ||||
|             models.Index(fields=("source", "identifier")), | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class GroupSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||
| @ -1028,75 +995,45 @@ class PropertyMapping(SerializerModel, ManagedModel): | ||||
|         verbose_name_plural = _("Property Mappings") | ||||
|  | ||||
|  | ||||
| class Session(ExpiringModel, AbstractBaseSession): | ||||
|     """User session with extra fields for fast access""" | ||||
| class AuthenticatedSession(ExpiringModel): | ||||
|     """Additional session class for authenticated users. Augments the standard django session | ||||
|     to achieve the following: | ||||
|         - Make it queryable by user | ||||
|         - Have a direct connection to user objects | ||||
|         - Allow users to view their own sessions and terminate them | ||||
|         - Save structured and well-defined information. | ||||
|     """ | ||||
|  | ||||
|     # Remove upstream field because we're using our own ExpiringModel | ||||
|     expire_date = None | ||||
|     session_data = models.BinaryField(_("session data")) | ||||
|     uuid = models.UUIDField(default=uuid4, primary_key=True) | ||||
|  | ||||
|     # Keep in sync with Session.Keys | ||||
|     last_ip = models.GenericIPAddressField() | ||||
|     session_key = models.CharField(max_length=40) | ||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||
|  | ||||
|     last_ip = models.TextField() | ||||
|     last_user_agent = models.TextField(blank=True) | ||||
|     last_used = models.DateTimeField(auto_now=True) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Session") | ||||
|         verbose_name_plural = _("Sessions") | ||||
|         indexes = ExpiringModel.Meta.indexes + [ | ||||
|             models.Index(fields=["expires", "session_key"]), | ||||
|         ] | ||||
|         default_permissions = [] | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.session_key | ||||
|  | ||||
|     class Keys(StrEnum): | ||||
|         """ | ||||
|         Keys to be set with the session interface for the fields above to be updated. | ||||
|  | ||||
|         If a field is added here that needs to be initialized when the session is initialized, | ||||
|         it must also be reflected in authentik.root.middleware.SessionMiddleware.process_request | ||||
|         and in authentik.core.sessions.SessionStore.__init__ | ||||
|         """ | ||||
|  | ||||
|         LAST_IP = "last_ip" | ||||
|         LAST_USER_AGENT = "last_user_agent" | ||||
|         LAST_USED = "last_used" | ||||
|  | ||||
|     @classmethod | ||||
|     def get_session_store_class(cls): | ||||
|         from authentik.core.sessions import SessionStore | ||||
|  | ||||
|         return SessionStore | ||||
|  | ||||
|     def get_decoded(self): | ||||
|         raise NotImplementedError | ||||
|  | ||||
|  | ||||
| class AuthenticatedSession(SerializerModel): | ||||
|     session = models.OneToOneField(Session, on_delete=models.CASCADE, primary_key=True) | ||||
|     # We use the session as primary key, but we need the API to be able to reference | ||||
|     # this object uniquely without exposing the session key | ||||
|     uuid = models.UUIDField(default=uuid4, unique=True) | ||||
|  | ||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Authenticated Session") | ||||
|         verbose_name_plural = _("Authenticated Sessions") | ||||
|         indexes = ExpiringModel.Meta.indexes + [ | ||||
|             models.Index(fields=["session_key"]), | ||||
|         ] | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"Authenticated Session {str(self.pk)[:10]}" | ||||
|         return f"Authenticated Session {self.session_key[:10]}" | ||||
|  | ||||
|     @staticmethod | ||||
|     def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]: | ||||
|         """Create a new session from a http request""" | ||||
|         if not hasattr(request, "session") or not request.session.exists( | ||||
|             request.session.session_key | ||||
|         ): | ||||
|         from authentik.root.middleware import ClientIPMiddleware | ||||
|  | ||||
|         if not hasattr(request, "session") or not request.session.session_key: | ||||
|             return None | ||||
|         return AuthenticatedSession( | ||||
|             session=Session.objects.filter(session_key=request.session.session_key).first(), | ||||
|             session_key=request.session.session_key, | ||||
|             user=user, | ||||
|             last_ip=ClientIPMiddleware.get_client_ip(request), | ||||
|             last_user_agent=request.META.get("HTTP_USER_AGENT", ""), | ||||
|             expires=request.session.get_expiry_date(), | ||||
|         ) | ||||
|  | ||||
| @ -1,168 +0,0 @@ | ||||
| """authentik sessions engine""" | ||||
|  | ||||
| import pickle  # nosec | ||||
|  | ||||
| from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY | ||||
| from django.contrib.sessions.backends.db import SessionStore as SessionBase | ||||
| from django.core.exceptions import SuspiciousOperation | ||||
| from django.utils import timezone | ||||
| from django.utils.functional import cached_property | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.root.middleware import ClientIPMiddleware | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class SessionStore(SessionBase): | ||||
|     def __init__(self, session_key=None, last_ip=None, last_user_agent=""): | ||||
|         super().__init__(session_key) | ||||
|         self._create_kwargs = { | ||||
|             "last_ip": last_ip or ClientIPMiddleware.default_ip, | ||||
|             "last_user_agent": last_user_agent, | ||||
|         } | ||||
|  | ||||
|     @classmethod | ||||
|     def get_model_class(cls): | ||||
|         from authentik.core.models import Session | ||||
|  | ||||
|         return Session | ||||
|  | ||||
|     @cached_property | ||||
|     def model_fields(self): | ||||
|         return [k.value for k in self.model.Keys] | ||||
|  | ||||
|     def _get_session_from_db(self): | ||||
|         try: | ||||
|             return ( | ||||
|                 self.model.objects.select_related( | ||||
|                     "authenticatedsession", | ||||
|                     "authenticatedsession__user", | ||||
|                 ) | ||||
|                 .prefetch_related( | ||||
|                     "authenticatedsession__user__groups", | ||||
|                     "authenticatedsession__user__user_permissions", | ||||
|                 ) | ||||
|                 .get( | ||||
|                     session_key=self.session_key, | ||||
|                     expires__gt=timezone.now(), | ||||
|                 ) | ||||
|             ) | ||||
|         except (self.model.DoesNotExist, SuspiciousOperation) as exc: | ||||
|             if isinstance(exc, SuspiciousOperation): | ||||
|                 LOGGER.warning(str(exc)) | ||||
|             self._session_key = None | ||||
|  | ||||
|     async def _aget_session_from_db(self): | ||||
|         try: | ||||
|             return ( | ||||
|                 await self.model.objects.select_related( | ||||
|                     "authenticatedsession", | ||||
|                     "authenticatedsession__user", | ||||
|                 ) | ||||
|                 .prefetch_related( | ||||
|                     "authenticatedsession__user__groups", | ||||
|                     "authenticatedsession__user__user_permissions", | ||||
|                 ) | ||||
|                 .aget( | ||||
|                     session_key=self.session_key, | ||||
|                     expires__gt=timezone.now(), | ||||
|                 ) | ||||
|             ) | ||||
|         except (self.model.DoesNotExist, SuspiciousOperation) as exc: | ||||
|             if isinstance(exc, SuspiciousOperation): | ||||
|                 LOGGER.warning(str(exc)) | ||||
|             self._session_key = None | ||||
|  | ||||
|     def encode(self, session_dict): | ||||
|         return pickle.dumps(session_dict, protocol=pickle.HIGHEST_PROTOCOL) | ||||
|  | ||||
|     def decode(self, session_data): | ||||
|         try: | ||||
|             return pickle.loads(session_data)  # nosec | ||||
|         except pickle.PickleError: | ||||
|             # ValueError, unpickling exceptions. If any of these happen, just return an empty | ||||
|             # dictionary (an empty session) | ||||
|             pass | ||||
|         return {} | ||||
|  | ||||
|     def load(self): | ||||
|         s = self._get_session_from_db() | ||||
|         if s: | ||||
|             return { | ||||
|                 "authenticatedsession": getattr(s, "authenticatedsession", None), | ||||
|                 **{k: getattr(s, k) for k in self.model_fields}, | ||||
|                 **self.decode(s.session_data), | ||||
|             } | ||||
|         else: | ||||
|             return {} | ||||
|  | ||||
|     async def aload(self): | ||||
|         s = await self._aget_session_from_db() | ||||
|         if s: | ||||
|             return { | ||||
|                 "authenticatedsession": getattr(s, "authenticatedsession", None), | ||||
|                 **{k: getattr(s, k) for k in self.model_fields}, | ||||
|                 **self.decode(s.session_data), | ||||
|             } | ||||
|         else: | ||||
|             return {} | ||||
|  | ||||
|     def create_model_instance(self, data): | ||||
|         args = { | ||||
|             "session_key": self._get_or_create_session_key(), | ||||
|             "expires": self.get_expiry_date(), | ||||
|             "session_data": {}, | ||||
|             **self._create_kwargs, | ||||
|         } | ||||
|         for k, v in data.items(): | ||||
|             # Don't save: | ||||
|             # - unused auth data | ||||
|             # - related models | ||||
|             if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]: | ||||
|                 pass | ||||
|             elif k in self.model_fields: | ||||
|                 args[k] = v | ||||
|             else: | ||||
|                 args["session_data"][k] = v | ||||
|         args["session_data"] = self.encode(args["session_data"]) | ||||
|         return self.model(**args) | ||||
|  | ||||
|     async def acreate_model_instance(self, data): | ||||
|         args = { | ||||
|             "session_key": await self._aget_or_create_session_key(), | ||||
|             "expires": await self.aget_expiry_date(), | ||||
|             "session_data": {}, | ||||
|             **self._create_kwargs, | ||||
|         } | ||||
|         for k, v in data.items(): | ||||
|             # Don't save: | ||||
|             # - unused auth data | ||||
|             # - related models | ||||
|             if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]: | ||||
|                 pass | ||||
|             elif k in self.model_fields: | ||||
|                 args[k] = v | ||||
|             else: | ||||
|                 args["session_data"][k] = v | ||||
|         args["session_data"] = self.encode(args["session_data"]) | ||||
|         return self.model(**args) | ||||
|  | ||||
|     @classmethod | ||||
|     def clear_expired(cls): | ||||
|         cls.get_model_class().objects.filter(expires__lt=timezone.now()).delete() | ||||
|  | ||||
|     @classmethod | ||||
|     async def aclear_expired(cls): | ||||
|         await cls.get_model_class().objects.filter(expires__lt=timezone.now()).adelete() | ||||
|  | ||||
|     def cycle_key(self): | ||||
|         data = self._session | ||||
|         key = self.session_key | ||||
|         self.create() | ||||
|         self._session_cache = data | ||||
|         if key: | ||||
|             self.delete(key) | ||||
|         if (authenticated_session := data.get("authenticatedsession")) is not None: | ||||
|             authenticated_session.session_id = self.session_key | ||||
|             authenticated_session.save(force_insert=True) | ||||
| @ -1,10 +1,11 @@ | ||||
| """authentik core signals""" | ||||
|  | ||||
| from django.contrib.auth.signals import user_logged_in | ||||
| from django.contrib.auth.signals import user_logged_in, user_logged_out | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.core.cache import cache | ||||
| from django.core.signals import Signal | ||||
| from django.db.models import Model | ||||
| from django.db.models.signals import post_delete, post_save, pre_save | ||||
| from django.db.models.signals import post_save, pre_delete, pre_save | ||||
| from django.dispatch import receiver | ||||
| from django.http.request import HttpRequest | ||||
| from structlog.stdlib import get_logger | ||||
| @ -14,7 +15,6 @@ from authentik.core.models import ( | ||||
|     AuthenticatedSession, | ||||
|     BackchannelProvider, | ||||
|     ExpiringModel, | ||||
|     Session, | ||||
|     User, | ||||
|     default_token_duration, | ||||
| ) | ||||
| @ -49,10 +49,19 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_): | ||||
|         session.save() | ||||
|  | ||||
|  | ||||
| @receiver(post_delete, sender=AuthenticatedSession) | ||||
| @receiver(user_logged_out) | ||||
| def user_logged_out_session(sender, request: HttpRequest, user: User, **_): | ||||
|     """Delete AuthenticatedSession if it exists""" | ||||
|     if not request.session or not request.session.session_key: | ||||
|         return | ||||
|     AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete() | ||||
|  | ||||
|  | ||||
| @receiver(pre_delete, sender=AuthenticatedSession) | ||||
| def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_): | ||||
|     """Delete session when authenticated session is deleted""" | ||||
|     Session.objects.filter(session_key=instance.pk).delete() | ||||
|     cache_key = f"{KEY_PREFIX}{instance.session_key}" | ||||
|     cache.delete(cache_key) | ||||
|  | ||||
|  | ||||
| @receiver(pre_save) | ||||
|  | ||||
| @ -48,7 +48,6 @@ LOGGER = get_logger() | ||||
|  | ||||
| PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | ||||
| SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages" | ||||
| SESSION_KEY_SOURCE_FLOW_CONTEXT = "authentik/flows/source_flow_context" | ||||
| SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | ||||
|  | ||||
|  | ||||
| @ -262,7 +261,6 @@ class SourceFlowManager: | ||||
|                 plan.append_stage(stage) | ||||
|         for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []): | ||||
|             plan.append_stage(stage) | ||||
|         plan.context.update(self.request.session.get(SESSION_KEY_SOURCE_FLOW_CONTEXT, {})) | ||||
|         return plan.to_redirect(self.request, flow) | ||||
|  | ||||
|     def handle_auth( | ||||
|  | ||||
| @ -2,16 +2,22 @@ | ||||
|  | ||||
| from datetime import datetime, timedelta | ||||
|  | ||||
| from django.conf import ImproperlyConfigured | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.contrib.sessions.backends.db import SessionStore as DBSessionStore | ||||
| from django.core.cache import cache | ||||
| from django.utils.timezone import now | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.models import ( | ||||
|     USER_ATTRIBUTE_EXPIRES, | ||||
|     USER_ATTRIBUTE_GENERATED, | ||||
|     AuthenticatedSession, | ||||
|     ExpiringModel, | ||||
|     User, | ||||
| ) | ||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| @ -32,6 +38,40 @@ def clean_expired_models(self: SystemTask): | ||||
|             obj.expire_action() | ||||
|         LOGGER.debug("Expired models", model=cls, amount=amount) | ||||
|         messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}") | ||||
|     # Special case | ||||
|     amount = 0 | ||||
|  | ||||
|     for session in AuthenticatedSession.objects.all(): | ||||
|         match CONFIG.get("session_storage", "cache"): | ||||
|             case "cache": | ||||
|                 cache_key = f"{KEY_PREFIX}{session.session_key}" | ||||
|                 value = None | ||||
|                 try: | ||||
|                     value = cache.get(cache_key) | ||||
|  | ||||
|                 except Exception as exc: | ||||
|                     LOGGER.debug("Failed to get session from cache", exc=exc) | ||||
|                 if not value: | ||||
|                     session.delete() | ||||
|                     amount += 1 | ||||
|             case "db": | ||||
|                 if not ( | ||||
|                     DBSessionStore.get_model_class() | ||||
|                     .objects.filter(session_key=session.session_key, expire_date__gt=now()) | ||||
|                     .exists() | ||||
|                 ): | ||||
|                     session.delete() | ||||
|                     amount += 1 | ||||
|             case _: | ||||
|                 # Should never happen, as we check for other values in authentik/root/settings.py | ||||
|                 raise ImproperlyConfigured( | ||||
|                     "Invalid session_storage setting, allowed values are db and cache" | ||||
|                 ) | ||||
|     if CONFIG.get("session_storage", "cache") == "db": | ||||
|         DBSessionStore.clear_expired() | ||||
|     LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount) | ||||
|  | ||||
|     messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}") | ||||
|     self.set_status(TaskStatus.SUCCESSFUL, *messages) | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -16,7 +16,7 @@ | ||||
|         {% block head_before %} | ||||
|         {% endblock %} | ||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> | ||||
|         <style>{{ brand.branding_custom_css }}</style> | ||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject> | ||||
|         <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> | ||||
|         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> | ||||
|         {% block head %} | ||||
|  | ||||
| @ -4,7 +4,7 @@ | ||||
| {% load i18n %} | ||||
|  | ||||
| {% block head_before %} | ||||
| <link rel="prefetch" href="{{ request.brand.branding_default_flow_background_url }}" /> | ||||
| <link rel="prefetch" href="{% static 'dist/assets/images/flow_background.jpg' %}" /> | ||||
| <link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}"> | ||||
| <link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)"> | ||||
| {% include "base/header_js.html" %} | ||||
| @ -13,7 +13,7 @@ | ||||
| {% block head %} | ||||
| <style> | ||||
| :root { | ||||
|     --ak-flow-background: url("{{ request.brand.branding_default_flow_background_url }}"); | ||||
|     --ak-flow-background: url("{% static 'dist/assets/images/flow_background.jpg' %}"); | ||||
|     --pf-c-background-image--BackgroundImage: var(--ak-flow-background); | ||||
|     --pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background); | ||||
|     --pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background); | ||||
|  | ||||
| @ -1,17 +1,9 @@ | ||||
| """Test API Utils""" | ||||
|  | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.serializers import ( | ||||
|     HyperlinkedModelSerializer, | ||||
| ) | ||||
| from rest_framework.serializers import ( | ||||
|     ModelSerializer as BaseModelSerializer, | ||||
| ) | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.api.utils import ModelSerializer as CustomModelSerializer | ||||
| from authentik.core.api.utils import is_dict | ||||
| from authentik.lib.utils.reflection import all_subclasses | ||||
|  | ||||
|  | ||||
| class TestAPIUtils(APITestCase): | ||||
| @ -22,14 +14,3 @@ class TestAPIUtils(APITestCase): | ||||
|         self.assertIsNone(is_dict({})) | ||||
|         with self.assertRaises(ValidationError): | ||||
|             is_dict("foo") | ||||
|  | ||||
|     def test_all_serializers_descend_from_custom(self): | ||||
|         """Test that every serializer we define descends from our own ModelSerializer""" | ||||
|         # Weirdly, there's only one serializer in `rest_framework` which descends from | ||||
|         # ModelSerializer: HyperlinkedModelSerializer | ||||
|         expected = {CustomModelSerializer, HyperlinkedModelSerializer} | ||||
|         actual = set(all_subclasses(BaseModelSerializer)) - set( | ||||
|             all_subclasses(CustomModelSerializer) | ||||
|         ) | ||||
|  | ||||
|         self.assertEqual(expected, actual) | ||||
|  | ||||
| @ -5,7 +5,7 @@ from json import loads | ||||
| from django.urls.base import reverse | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import AuthenticatedSession, Session, User | ||||
| from authentik.core.models import User | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
|  | ||||
|  | ||||
| @ -30,18 +30,3 @@ class TestAuthenticatedSessionsAPI(APITestCase): | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         body = loads(response.content.decode()) | ||||
|         self.assertEqual(body["pagination"]["count"], 1) | ||||
|  | ||||
|     def test_delete(self): | ||||
|         """Test deletion""" | ||||
|         self.client.force_login(self.user) | ||||
|         self.assertEqual(AuthenticatedSession.objects.all().count(), 1) | ||||
|         self.assertEqual(Session.objects.all().count(), 1) | ||||
|         response = self.client.delete( | ||||
|             reverse( | ||||
|                 "authentik_api:authenticatedsession-detail", | ||||
|                 kwargs={"uuid": AuthenticatedSession.objects.first().uuid}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 204) | ||||
|         self.assertEqual(AuthenticatedSession.objects.all().count(), 0) | ||||
|         self.assertEqual(Session.objects.all().count(), 0) | ||||
|  | ||||
| @ -1,19 +0,0 @@ | ||||
| from django.apps import apps | ||||
| from django.urls import reverse | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
|  | ||||
|  | ||||
| class TestSourceAPI(APITestCase): | ||||
|     def setUp(self) -> None: | ||||
|         self.user = create_test_admin_user() | ||||
|         self.client.force_login(self.user) | ||||
|  | ||||
|     def test_builtin_source_used_by(self): | ||||
|         """Test Providers's types endpoint""" | ||||
|         apps.get_app_config("authentik_core").source_inbuilt() | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:source-used-by", kwargs={"slug": "authentik-built-in"}), | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
| @ -13,10 +13,7 @@ from authentik.core.models import ( | ||||
|     TokenIntents, | ||||
|     User, | ||||
| ) | ||||
| from authentik.core.tasks import ( | ||||
|     clean_expired_models, | ||||
|     clean_temporary_users, | ||||
| ) | ||||
| from authentik.core.tasks import clean_expired_models, clean_temporary_users | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| @ -1,8 +1,9 @@ | ||||
| """Test Users API""" | ||||
|  | ||||
| from datetime import datetime | ||||
| from json import loads | ||||
|  | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.core.cache import cache | ||||
| from django.urls.base import reverse | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| @ -10,17 +11,11 @@ from authentik.brands.models import Brand | ||||
| from authentik.core.models import ( | ||||
|     USER_ATTRIBUTE_TOKEN_EXPIRING, | ||||
|     AuthenticatedSession, | ||||
|     Session, | ||||
|     Token, | ||||
|     User, | ||||
|     UserTypes, | ||||
| ) | ||||
| from authentik.core.tests.utils import ( | ||||
|     create_test_admin_user, | ||||
|     create_test_brand, | ||||
|     create_test_flow, | ||||
|     create_test_user, | ||||
| ) | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_brand, create_test_flow | ||||
| from authentik.flows.models import FlowDesignation | ||||
| from authentik.lib.generators import generate_id, generate_key | ||||
| from authentik.stages.email.models import EmailStage | ||||
| @ -31,7 +26,7 @@ class TestUsersAPI(APITestCase): | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         self.admin = create_test_admin_user() | ||||
|         self.user = create_test_user() | ||||
|         self.user = User.objects.create(username="test-user") | ||||
|  | ||||
|     def test_filter_type(self): | ||||
|         """Test API filtering by type""" | ||||
| @ -46,35 +41,6 @@ class TestUsersAPI(APITestCase): | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_filter_is_superuser(self): | ||||
|         """Test API filtering by superuser status""" | ||||
|         User.objects.all().delete() | ||||
|         admin = create_test_admin_user() | ||||
|         self.client.force_login(admin) | ||||
|         # Test superuser | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:user-list"), | ||||
|             data={ | ||||
|                 "is_superuser": True, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         body = loads(response.content) | ||||
|         self.assertEqual(len(body["results"]), 1) | ||||
|         self.assertEqual(body["results"][0]["username"], admin.username) | ||||
|         # Test non-superuser | ||||
|         user = create_test_user() | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:user-list"), | ||||
|             data={ | ||||
|                 "is_superuser": False, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         body = loads(response.content) | ||||
|         self.assertEqual(len(body["results"]), 1, body) | ||||
|         self.assertEqual(body["results"][0]["username"], user.username) | ||||
|  | ||||
|     def test_list_with_groups(self): | ||||
|         """Test listing with groups""" | ||||
|         self.client.force_login(self.admin) | ||||
| @ -133,8 +99,6 @@ class TestUsersAPI(APITestCase): | ||||
|     def test_recovery_email_no_flow(self): | ||||
|         """Test user recovery link (no recovery flow set)""" | ||||
|         self.client.force_login(self.admin) | ||||
|         self.user.email = "" | ||||
|         self.user.save() | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk}) | ||||
|         ) | ||||
| @ -380,15 +344,12 @@ class TestUsersAPI(APITestCase): | ||||
|         """Ensure sessions are deleted when a user is deactivated""" | ||||
|         user = create_test_admin_user() | ||||
|         session_id = generate_id() | ||||
|         session = Session.objects.create( | ||||
|             session_key=session_id, | ||||
|             last_ip="255.255.255.255", | ||||
|             last_user_agent="", | ||||
|         ) | ||||
|         AuthenticatedSession.objects.create( | ||||
|             session=session, | ||||
|             user=user, | ||||
|             session_key=session_id, | ||||
|             last_ip="", | ||||
|         ) | ||||
|         cache.set(KEY_PREFIX + session_id, "foo") | ||||
|  | ||||
|         self.client.force_login(self.admin) | ||||
|         response = self.client.patch( | ||||
| @ -399,7 +360,5 @@ class TestUsersAPI(APITestCase): | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|         self.assertFalse(Session.objects.filter(session_key=session_id).exists()) | ||||
|         self.assertFalse( | ||||
|             AuthenticatedSession.objects.filter(session__session_key=session_id).exists() | ||||
|         ) | ||||
|         self.assertIsNone(cache.get(KEY_PREFIX + session_id)) | ||||
|         self.assertFalse(AuthenticatedSession.objects.filter(session_key=session_id).exists()) | ||||
|  | ||||
| @ -1,5 +1,7 @@ | ||||
| """authentik URL Configuration""" | ||||
|  | ||||
| from channels.auth import AuthMiddleware | ||||
| from channels.sessions import CookieMiddleware | ||||
| from django.conf import settings | ||||
| from django.contrib.auth.decorators import login_required | ||||
| from django.urls import path | ||||
| @ -11,11 +13,7 @@ from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet | ||||
| from authentik.core.api.groups import GroupViewSet | ||||
| from authentik.core.api.property_mappings import PropertyMappingViewSet | ||||
| from authentik.core.api.providers import ProviderViewSet | ||||
| from authentik.core.api.sources import ( | ||||
|     GroupSourceConnectionViewSet, | ||||
|     SourceViewSet, | ||||
|     UserSourceConnectionViewSet, | ||||
| ) | ||||
| from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet | ||||
| from authentik.core.api.tokens import TokenViewSet | ||||
| from authentik.core.api.transactional_applications import TransactionalApplicationView | ||||
| from authentik.core.api.users import UserViewSet | ||||
| @ -27,7 +25,7 @@ from authentik.core.views.interface import ( | ||||
|     RootRedirectView, | ||||
| ) | ||||
| from authentik.flows.views.interface import FlowInterfaceView | ||||
| from authentik.root.asgi_middleware import AuthMiddlewareStack | ||||
| from authentik.root.asgi_middleware import SessionMiddleware | ||||
| from authentik.root.messages.consumer import MessageConsumer | ||||
| from authentik.root.middleware import ChannelsLoggingMiddleware | ||||
|  | ||||
| @ -83,7 +81,6 @@ api_urlpatterns = [ | ||||
|     ("core/tokens", TokenViewSet), | ||||
|     ("sources/all", SourceViewSet), | ||||
|     ("sources/user_connections/all", UserSourceConnectionViewSet), | ||||
|     ("sources/group_connections/all", GroupSourceConnectionViewSet), | ||||
|     ("providers/all", ProviderViewSet), | ||||
|     ("propertymappings/all", PropertyMappingViewSet), | ||||
|     ("authenticators/all", DeviceViewSet, "device"), | ||||
| @ -97,7 +94,9 @@ api_urlpatterns = [ | ||||
| websocket_urlpatterns = [ | ||||
|     path( | ||||
|         "ws/client/", | ||||
|         ChannelsLoggingMiddleware(AuthMiddlewareStack(MessageConsumer.as_asgi())), | ||||
|         ChannelsLoggingMiddleware( | ||||
|             CookieMiddleware(SessionMiddleware(AuthMiddleware(MessageConsumer.as_asgi()))) | ||||
|         ), | ||||
|     ), | ||||
| ] | ||||
|  | ||||
|  | ||||
| @ -55,7 +55,7 @@ class RedirectToAppLaunch(View): | ||||
|             ) | ||||
|         except FlowNonApplicableException: | ||||
|             raise Http404 from None | ||||
|         plan.append_stage(in_memory_stage(RedirectToAppStage)) | ||||
|         plan.insert_stage(in_memory_stage(RedirectToAppStage)) | ||||
|         return plan.to_redirect(request, flow) | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -1,27 +0,0 @@ | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.policies.unique_password.models import UniquePasswordPolicy | ||||
| from authentik.policies.api.policies import PolicySerializer | ||||
|  | ||||
|  | ||||
| class UniquePasswordPolicySerializer(EnterpriseRequiredMixin, PolicySerializer): | ||||
|     """Password Uniqueness Policy Serializer""" | ||||
|  | ||||
|     class Meta: | ||||
|         model = UniquePasswordPolicy | ||||
|         fields = PolicySerializer.Meta.fields + [ | ||||
|             "password_field", | ||||
|             "num_historical_passwords", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class UniquePasswordPolicyViewSet(UsedByMixin, ModelViewSet): | ||||
|     """Password Uniqueness Policy Viewset""" | ||||
|  | ||||
|     queryset = UniquePasswordPolicy.objects.all() | ||||
|     serializer_class = UniquePasswordPolicySerializer | ||||
|     filterset_fields = "__all__" | ||||
|     ordering = ["name"] | ||||
|     search_fields = ["name"] | ||||
| @ -1,10 +0,0 @@ | ||||
| """authentik Unique Password policy app config""" | ||||
|  | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
|  | ||||
|  | ||||
| class AuthentikEnterprisePoliciesUniquePasswordConfig(EnterpriseConfig): | ||||
|     name = "authentik.enterprise.policies.unique_password" | ||||
|     label = "authentik_policies_unique_password" | ||||
|     verbose_name = "authentik Enterprise.Policies.Unique Password" | ||||
|     default = True | ||||
| @ -1,81 +0,0 @@ | ||||
| # Generated by Django 5.0.13 on 2025-03-26 23:02 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.conf import settings | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     initial = True | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_policies", "0011_policybinding_failure_result_and_more"), | ||||
|         migrations.swappable_dependency(settings.AUTH_USER_MODEL), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="UniquePasswordPolicy", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "policy_ptr", | ||||
|                     models.OneToOneField( | ||||
|                         auto_created=True, | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         parent_link=True, | ||||
|                         primary_key=True, | ||||
|                         serialize=False, | ||||
|                         to="authentik_policies.policy", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "password_field", | ||||
|                     models.TextField( | ||||
|                         default="password", | ||||
|                         help_text="Field key to check, field keys defined in Prompt stages are available.", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "num_historical_passwords", | ||||
|                     models.PositiveIntegerField( | ||||
|                         default=1, help_text="Number of passwords to check against." | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "Password Uniqueness Policy", | ||||
|                 "verbose_name_plural": "Password Uniqueness Policies", | ||||
|                 "indexes": [ | ||||
|                     models.Index(fields=["policy_ptr_id"], name="authentik_p_policy__f559aa_idx") | ||||
|                 ], | ||||
|             }, | ||||
|             bases=("authentik_policies.policy",), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="UserPasswordHistory", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         auto_created=True, primary_key=True, serialize=False, verbose_name="ID" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("old_password", models.CharField(max_length=128)), | ||||
|                 ("created_at", models.DateTimeField(auto_now_add=True)), | ||||
|                 ("hibp_prefix_sha1", models.CharField(max_length=5)), | ||||
|                 ("hibp_pw_hash", models.TextField()), | ||||
|                 ( | ||||
|                     "user", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         related_name="old_passwords", | ||||
|                         to=settings.AUTH_USER_MODEL, | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "User Password History", | ||||
|             }, | ||||
|         ), | ||||
|     ] | ||||
| @ -1,151 +0,0 @@ | ||||
| from hashlib import sha1 | ||||
|  | ||||
| from django.contrib.auth.hashers import identify_hasher, make_password | ||||
| from django.db import models | ||||
| from django.utils.translation import gettext as _ | ||||
| from rest_framework.serializers import BaseSerializer | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.policies.models import Policy | ||||
| from authentik.policies.types import PolicyRequest, PolicyResult | ||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class UniquePasswordPolicy(Policy): | ||||
|     """This policy prevents users from reusing old passwords.""" | ||||
|  | ||||
|     password_field = models.TextField( | ||||
|         default="password", | ||||
|         help_text=_("Field key to check, field keys defined in Prompt stages are available."), | ||||
|     ) | ||||
|  | ||||
|     # Limit on the number of previous passwords the policy evaluates | ||||
|     # Also controls number of old passwords the system stores. | ||||
|     num_historical_passwords = models.PositiveIntegerField( | ||||
|         default=1, | ||||
|         help_text=_("Number of passwords to check against."), | ||||
|     ) | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[BaseSerializer]: | ||||
|         from authentik.enterprise.policies.unique_password.api import UniquePasswordPolicySerializer | ||||
|  | ||||
|         return UniquePasswordPolicySerializer | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         return "ak-policy-password-uniqueness-form" | ||||
|  | ||||
|     def passes(self, request: PolicyRequest) -> PolicyResult: | ||||
|         from authentik.enterprise.policies.unique_password.models import UserPasswordHistory | ||||
|  | ||||
|         password = request.context.get(PLAN_CONTEXT_PROMPT, {}).get( | ||||
|             self.password_field, request.context.get(self.password_field) | ||||
|         ) | ||||
|         if not password: | ||||
|             LOGGER.warning( | ||||
|                 "Password field not found in request when checking UniquePasswordPolicy", | ||||
|                 field=self.password_field, | ||||
|                 fields=request.context.keys(), | ||||
|             ) | ||||
|             return PolicyResult(False, _("Password not set in context")) | ||||
|         password = str(password) | ||||
|  | ||||
|         if not self.num_historical_passwords: | ||||
|             # Policy not configured to check against any passwords | ||||
|             return PolicyResult(True) | ||||
|  | ||||
|         num_to_check = self.num_historical_passwords | ||||
|         password_history = UserPasswordHistory.objects.filter(user=request.user).order_by( | ||||
|             "-created_at" | ||||
|         )[:num_to_check] | ||||
|  | ||||
|         if not password_history: | ||||
|             return PolicyResult(True) | ||||
|  | ||||
|         for record in password_history: | ||||
|             if not record.old_password: | ||||
|                 continue | ||||
|  | ||||
|             if self._passwords_match(new_password=password, old_password=record.old_password): | ||||
|                 # Return on first match. Authentik does not consider timing attacks | ||||
|                 # on old passwords to be an attack surface. | ||||
|                 return PolicyResult( | ||||
|                     False, | ||||
|                     _("This password has been used previously. Please choose a different one."), | ||||
|                 ) | ||||
|  | ||||
|         return PolicyResult(True) | ||||
|  | ||||
|     def _passwords_match(self, *, new_password: str, old_password: str) -> bool: | ||||
|         try: | ||||
|             hasher = identify_hasher(old_password) | ||||
|         except ValueError: | ||||
|             LOGGER.warning( | ||||
|                 "Skipping password; could not load hash algorithm", | ||||
|             ) | ||||
|             return False | ||||
|  | ||||
|         return hasher.verify(new_password, old_password) | ||||
|  | ||||
|     @classmethod | ||||
|     def is_in_use(cls): | ||||
|         """Check if any UniquePasswordPolicy is in use, either through policy bindings | ||||
|         or direct attachment to a PromptStage. | ||||
|  | ||||
|         Returns: | ||||
|             bool: True if any policy is in use, False otherwise | ||||
|         """ | ||||
|         from authentik.policies.models import PolicyBinding | ||||
|  | ||||
|         # Check if any policy is in use through bindings | ||||
|         if PolicyBinding.in_use.for_policy(cls).exists(): | ||||
|             return True | ||||
|  | ||||
|         # Check if any policy is attached to a PromptStage | ||||
|         if cls.objects.filter(promptstage__isnull=False).exists(): | ||||
|             return True | ||||
|  | ||||
|         return False | ||||
|  | ||||
|     class Meta(Policy.PolicyMeta): | ||||
|         verbose_name = _("Password Uniqueness Policy") | ||||
|         verbose_name_plural = _("Password Uniqueness Policies") | ||||
|  | ||||
|  | ||||
| class UserPasswordHistory(models.Model): | ||||
|     user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="old_passwords") | ||||
|     # Mimic's column type of AbstractBaseUser.password | ||||
|     old_password = models.CharField(max_length=128) | ||||
|     created_at = models.DateTimeField(auto_now_add=True) | ||||
|  | ||||
|     hibp_prefix_sha1 = models.CharField(max_length=5) | ||||
|     hibp_pw_hash = models.TextField() | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("User Password History") | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         timestamp = f"{self.created_at:%Y/%m/%d %X}" if self.created_at else "N/A" | ||||
|         return f"Previous Password (user: {self.user_id}, recorded: {timestamp})" | ||||
|  | ||||
|     @classmethod | ||||
|     def create_for_user(cls, user: User, password: str): | ||||
|         # To check users' passwords against Have I been Pwned, we need the first 5 chars | ||||
|         # of the password hashed with SHA1 without a salt... | ||||
|         pw_hash_sha1 = sha1(password.encode("utf-8")).hexdigest()  # nosec | ||||
|         # ...however that'll give us a list of hashes from HIBP, and to compare that we still | ||||
|         # need a full unsalted SHA1 of the password. We don't want to save that directly in | ||||
|         # the database, so we hash that SHA1 again with a modern hashing alg, | ||||
|         # and then when we check users' passwords against HIBP we can use `check_password` | ||||
|         # which will take care of this. | ||||
|         hibp_hash_hash = make_password(pw_hash_sha1) | ||||
|         return cls.objects.create( | ||||
|             user=user, | ||||
|             old_password=password, | ||||
|             hibp_prefix_sha1=pw_hash_sha1[:5], | ||||
|             hibp_pw_hash=hibp_hash_hash, | ||||
|         ) | ||||
| @ -1,20 +0,0 @@ | ||||
| """Unique Password Policy settings""" | ||||
|  | ||||
| from celery.schedules import crontab | ||||
|  | ||||
| from authentik.lib.utils.time import fqdn_rand | ||||
|  | ||||
| CELERY_BEAT_SCHEDULE = { | ||||
|     "policies_unique_password_trim_history": { | ||||
|         "task": "authentik.enterprise.policies.unique_password.tasks.trim_password_histories", | ||||
|         "schedule": crontab(minute=fqdn_rand("policies_unique_password_trim"), hour="*/12"), | ||||
|         "options": {"queue": "authentik_scheduled"}, | ||||
|     }, | ||||
|     "policies_unique_password_check_purge": { | ||||
|         "task": ( | ||||
|             "authentik.enterprise.policies.unique_password.tasks.check_and_purge_password_history" | ||||
|         ), | ||||
|         "schedule": crontab(minute=fqdn_rand("policies_unique_password_purge"), hour="*/24"), | ||||
|         "options": {"queue": "authentik_scheduled"}, | ||||
|     }, | ||||
| } | ||||
| @ -1,23 +0,0 @@ | ||||
| """authentik policy signals""" | ||||
|  | ||||
| from django.dispatch import receiver | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.core.signals import password_changed | ||||
| from authentik.enterprise.policies.unique_password.models import ( | ||||
|     UniquePasswordPolicy, | ||||
|     UserPasswordHistory, | ||||
| ) | ||||
|  | ||||
|  | ||||
| @receiver(password_changed) | ||||
| def copy_password_to_password_history(sender, user: User, *args, **kwargs): | ||||
|     """Preserve the user's old password if UniquePasswordPolicy is enabled anywhere""" | ||||
|     # Check if any UniquePasswordPolicy is in use | ||||
|     unique_pwd_policy_in_use = UniquePasswordPolicy.is_in_use() | ||||
|  | ||||
|     if unique_pwd_policy_in_use: | ||||
|         """NOTE: Because we run this in a signal after saving the user, | ||||
|         we are not atomically guaranteed to save password history. | ||||
|         """ | ||||
|         UserPasswordHistory.create_for_user(user, user.password) | ||||
| @ -1,66 +0,0 @@ | ||||
| from django.db.models.aggregates import Count | ||||
| from structlog import get_logger | ||||
|  | ||||
| from authentik.enterprise.policies.unique_password.models import ( | ||||
|     UniquePasswordPolicy, | ||||
|     UserPasswordHistory, | ||||
| ) | ||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task(bind=True, base=SystemTask) | ||||
| @prefill_task | ||||
| def check_and_purge_password_history(self: SystemTask): | ||||
|     """Check if any UniquePasswordPolicy exists, and if not, purge the password history table. | ||||
|     This is run on a schedule instead of being triggered by policy binding deletion. | ||||
|     """ | ||||
|     if not UniquePasswordPolicy.objects.exists(): | ||||
|         UserPasswordHistory.objects.all().delete() | ||||
|         LOGGER.debug("Purged UserPasswordHistory table as no policies are in use") | ||||
|         self.set_status(TaskStatus.SUCCESSFUL, "Successfully purged UserPasswordHistory") | ||||
|         return | ||||
|  | ||||
|     self.set_status( | ||||
|         TaskStatus.SUCCESSFUL, "Not purging password histories, a unique password policy exists" | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task(bind=True, base=SystemTask) | ||||
| def trim_password_histories(self: SystemTask): | ||||
|     """Removes rows from UserPasswordHistory older than | ||||
|     the `n` most recent entries. | ||||
|  | ||||
|     The `n` is defined by the largest configured value for all bound | ||||
|     UniquePasswordPolicy policies. | ||||
|     """ | ||||
|  | ||||
|     # No policy, we'll let the cleanup above do its thing | ||||
|     if not UniquePasswordPolicy.objects.exists(): | ||||
|         return | ||||
|  | ||||
|     num_rows_to_preserve = 0 | ||||
|     for policy in UniquePasswordPolicy.objects.all(): | ||||
|         num_rows_to_preserve = max(num_rows_to_preserve, policy.num_historical_passwords) | ||||
|  | ||||
|     all_pks_to_keep = [] | ||||
|  | ||||
|     # Get all users who have password history entries | ||||
|     users_with_history = ( | ||||
|         UserPasswordHistory.objects.values("user") | ||||
|         .annotate(count=Count("user")) | ||||
|         .filter(count__gt=0) | ||||
|         .values_list("user", flat=True) | ||||
|     ) | ||||
|     for user_pk in users_with_history: | ||||
|         entries = UserPasswordHistory.objects.filter(user__pk=user_pk) | ||||
|         pks_to_keep = entries.order_by("-created_at")[:num_rows_to_preserve].values_list( | ||||
|             "pk", flat=True | ||||
|         ) | ||||
|         all_pks_to_keep.extend(pks_to_keep) | ||||
|  | ||||
|     num_deleted, _ = UserPasswordHistory.objects.exclude(pk__in=all_pks_to_keep).delete() | ||||
|     LOGGER.debug("Deleted stale password history records", count=num_deleted) | ||||
|     self.set_status(TaskStatus.SUCCESSFUL, f"Delete {num_deleted} stale password history records") | ||||
| @ -1,108 +0,0 @@ | ||||
| """Unique Password Policy flow tests""" | ||||
|  | ||||
| from django.contrib.auth.hashers import make_password | ||||
| from django.urls.base import reverse | ||||
|  | ||||
| from authentik.core.tests.utils import create_test_flow, create_test_user | ||||
| from authentik.enterprise.policies.unique_password.models import ( | ||||
|     UniquePasswordPolicy, | ||||
|     UserPasswordHistory, | ||||
| ) | ||||
| from authentik.flows.models import FlowDesignation, FlowStageBinding | ||||
| from authentik.flows.tests import FlowTestCase | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage | ||||
|  | ||||
|  | ||||
| class TestUniquePasswordPolicyFlow(FlowTestCase): | ||||
|     """Test Unique Password Policy in a flow""" | ||||
|  | ||||
|     REUSED_PASSWORD = "hunter1"  # nosec B105 | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         self.user = create_test_user() | ||||
|         self.flow = create_test_flow(FlowDesignation.AUTHENTICATION) | ||||
|  | ||||
|         password_prompt = Prompt.objects.create( | ||||
|             name=generate_id(), | ||||
|             field_key="password", | ||||
|             label="PASSWORD_LABEL", | ||||
|             type=FieldTypes.PASSWORD, | ||||
|             required=True, | ||||
|             placeholder="PASSWORD_PLACEHOLDER", | ||||
|         ) | ||||
|  | ||||
|         self.policy = UniquePasswordPolicy.objects.create( | ||||
|             name="password_must_unique", | ||||
|             password_field=password_prompt.field_key, | ||||
|             num_historical_passwords=1, | ||||
|         ) | ||||
|         stage = PromptStage.objects.create(name="prompt-stage") | ||||
|         stage.validation_policies.set([self.policy]) | ||||
|         stage.fields.set( | ||||
|             [ | ||||
|                 password_prompt, | ||||
|             ] | ||||
|         ) | ||||
|         FlowStageBinding.objects.create(target=self.flow, stage=stage, order=2) | ||||
|  | ||||
|         # Seed the user's password history | ||||
|         UserPasswordHistory.create_for_user(self.user, make_password(self.REUSED_PASSWORD)) | ||||
|  | ||||
|     def test_prompt_data(self): | ||||
|         """Test policy attached to a prompt stage""" | ||||
|         # Test the policy directly | ||||
|         from authentik.policies.types import PolicyRequest | ||||
|         from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||
|  | ||||
|         # Create a policy request with the reused password | ||||
|         request = PolicyRequest(user=self.user) | ||||
|         request.context[PLAN_CONTEXT_PROMPT] = {"password": self.REUSED_PASSWORD} | ||||
|  | ||||
|         # Test the policy directly | ||||
|         result = self.policy.passes(request) | ||||
|  | ||||
|         # Verify that the policy fails (returns False) with the expected error message | ||||
|         self.assertFalse(result.passing, "Policy should fail for reused password") | ||||
|         self.assertEqual( | ||||
|             result.messages[0], | ||||
|             "This password has been used previously. Please choose a different one.", | ||||
|             "Incorrect error message", | ||||
|         ) | ||||
|  | ||||
|         # API-based testing approach: | ||||
|  | ||||
|         self.client.force_login(self.user) | ||||
|  | ||||
|         # Send a POST request to the flow executor with the reused password | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), | ||||
|             {"password": self.REUSED_PASSWORD}, | ||||
|         ) | ||||
|         self.assertStageResponse( | ||||
|             response, | ||||
|             self.flow, | ||||
|             component="ak-stage-prompt", | ||||
|             fields=[ | ||||
|                 { | ||||
|                     "choices": None, | ||||
|                     "field_key": "password", | ||||
|                     "label": "PASSWORD_LABEL", | ||||
|                     "order": 0, | ||||
|                     "placeholder": "PASSWORD_PLACEHOLDER", | ||||
|                     "initial_value": "", | ||||
|                     "required": True, | ||||
|                     "type": "password", | ||||
|                     "sub_text": "", | ||||
|                 } | ||||
|             ], | ||||
|             response_errors={ | ||||
|                 "non_field_errors": [ | ||||
|                     { | ||||
|                         "code": "invalid", | ||||
|                         "string": "This password has been used previously. " | ||||
|                         "Please choose a different one.", | ||||
|                     } | ||||
|                 ] | ||||
|             }, | ||||
|         ) | ||||
| @ -1,77 +0,0 @@ | ||||
| """Unique Password Policy tests""" | ||||
|  | ||||
| from django.contrib.auth.hashers import make_password | ||||
| from django.test import TestCase | ||||
| from guardian.shortcuts import get_anonymous_user | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.enterprise.policies.unique_password.models import ( | ||||
|     UniquePasswordPolicy, | ||||
|     UserPasswordHistory, | ||||
| ) | ||||
| from authentik.policies.types import PolicyRequest, PolicyResult | ||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||
|  | ||||
|  | ||||
| class TestUniquePasswordPolicy(TestCase): | ||||
|     """Test Password Uniqueness Policy""" | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         self.policy = UniquePasswordPolicy.objects.create( | ||||
|             name="test_unique_password", num_historical_passwords=1 | ||||
|         ) | ||||
|         self.user = User.objects.create(username="test-user") | ||||
|  | ||||
|     def test_invalid(self): | ||||
|         """Test without password present in request""" | ||||
|         request = PolicyRequest(get_anonymous_user()) | ||||
|         result: PolicyResult = self.policy.passes(request) | ||||
|         self.assertFalse(result.passing) | ||||
|         self.assertEqual(result.messages[0], "Password not set in context") | ||||
|  | ||||
|     def test_passes_no_previous_passwords(self): | ||||
|         request = PolicyRequest(get_anonymous_user()) | ||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}} | ||||
|         result: PolicyResult = self.policy.passes(request) | ||||
|         self.assertTrue(result.passing) | ||||
|  | ||||
|     def test_passes_passwords_are_different(self): | ||||
|         # Seed database with an old password | ||||
|         UserPasswordHistory.create_for_user(self.user, make_password("hunter1")) | ||||
|  | ||||
|         request = PolicyRequest(self.user) | ||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}} | ||||
|         result: PolicyResult = self.policy.passes(request) | ||||
|         self.assertTrue(result.passing) | ||||
|  | ||||
|     def test_passes_multiple_old_passwords(self): | ||||
|         # Seed with multiple old passwords | ||||
|         UserPasswordHistory.objects.bulk_create( | ||||
|             [ | ||||
|                 UserPasswordHistory(user=self.user, old_password=make_password("hunter1")), | ||||
|                 UserPasswordHistory(user=self.user, old_password=make_password("hunter2")), | ||||
|             ] | ||||
|         ) | ||||
|         request = PolicyRequest(self.user) | ||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter3"}} | ||||
|         result: PolicyResult = self.policy.passes(request) | ||||
|         self.assertTrue(result.passing) | ||||
|  | ||||
|     def test_fails_password_matches_old_password(self): | ||||
|         # Seed database with an old password | ||||
|  | ||||
|         UserPasswordHistory.create_for_user(self.user, make_password("hunter1")) | ||||
|  | ||||
|         request = PolicyRequest(self.user) | ||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter1"}} | ||||
|         result: PolicyResult = self.policy.passes(request) | ||||
|         self.assertFalse(result.passing) | ||||
|  | ||||
|     def test_fails_if_identical_password_with_different_hash_algos(self): | ||||
|         UserPasswordHistory.create_for_user( | ||||
|             self.user, make_password("hunter2", "somesalt", "scrypt") | ||||
|         ) | ||||
|         request = PolicyRequest(self.user) | ||||
|         request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}} | ||||
|         result: PolicyResult = self.policy.passes(request) | ||||
|         self.assertFalse(result.passing) | ||||
| @ -1,90 +0,0 @@ | ||||
| from django.urls import reverse | ||||
|  | ||||
| from authentik.core.models import Group, Source, User | ||||
| from authentik.core.tests.utils import create_test_flow, create_test_user | ||||
| from authentik.enterprise.policies.unique_password.models import ( | ||||
|     UniquePasswordPolicy, | ||||
|     UserPasswordHistory, | ||||
| ) | ||||
| from authentik.flows.markers import StageMarker | ||||
| from authentik.flows.models import FlowStageBinding | ||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan | ||||
| from authentik.flows.tests import FlowTestCase | ||||
| from authentik.flows.views.executor import SESSION_KEY_PLAN | ||||
| from authentik.lib.generators import generate_key | ||||
| from authentik.policies.models import PolicyBinding, PolicyBindingModel | ||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||
| from authentik.stages.user_write.models import UserWriteStage | ||||
|  | ||||
|  | ||||
| class TestUserWriteStage(FlowTestCase): | ||||
|     """Write tests""" | ||||
|  | ||||
|     def setUp(self): | ||||
|         super().setUp() | ||||
|         self.flow = create_test_flow() | ||||
|         self.group = Group.objects.create(name="test-group") | ||||
|         self.other_group = Group.objects.create(name="other-group") | ||||
|         self.stage: UserWriteStage = UserWriteStage.objects.create( | ||||
|             name="write", create_users_as_inactive=True, create_users_group=self.group | ||||
|         ) | ||||
|         self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2) | ||||
|         self.source = Source.objects.create(name="fake_source") | ||||
|  | ||||
|     def test_save_password_history_if_policy_binding_enforced(self): | ||||
|         """Test user's new password is recorded when ANY enabled UniquePasswordPolicy exists""" | ||||
|         unique_password_policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) | ||||
|         pbm = PolicyBindingModel.objects.create() | ||||
|         PolicyBinding.objects.create( | ||||
|             target=pbm, policy=unique_password_policy, order=0, enabled=True | ||||
|         ) | ||||
|  | ||||
|         test_user = create_test_user() | ||||
|         # Store original password for verification | ||||
|         original_password = test_user.password | ||||
|  | ||||
|         # We're changing our own password | ||||
|         self.client.force_login(test_user) | ||||
|  | ||||
|         new_password = generate_key() | ||||
|         plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]) | ||||
|         plan.context[PLAN_CONTEXT_PENDING_USER] = test_user | ||||
|         plan.context[PLAN_CONTEXT_PROMPT] = { | ||||
|             "username": test_user.username, | ||||
|             "password": new_password, | ||||
|         } | ||||
|         session = self.client.session | ||||
|         session[SESSION_KEY_PLAN] = plan | ||||
|         session.save() | ||||
|         # Password history should be recorded | ||||
|         user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user) | ||||
|         self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded") | ||||
|         self.assertEqual(len(user_password_history_qs), 1, "expected 1 recorded password") | ||||
|  | ||||
|         # Create a password history entry manually to simulate the signal behavior | ||||
|         # This is what would happen if the signal worked correctly | ||||
|         UserPasswordHistory.objects.create(user=test_user, old_password=original_password) | ||||
|         user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user) | ||||
|         self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded") | ||||
|         self.assertEqual(len(user_password_history_qs), 2, "expected 2 recorded password") | ||||
|  | ||||
|         # Execute the flow by sending a POST request to the flow executor endpoint | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}) | ||||
|         ) | ||||
|  | ||||
|         # Verify that the request was successful | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         user_qs = User.objects.filter(username=plan.context[PLAN_CONTEXT_PROMPT]["username"]) | ||||
|         self.assertTrue(user_qs.exists()) | ||||
|  | ||||
|         # Verify the password history entry exists | ||||
|         user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user) | ||||
|         self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded") | ||||
|  | ||||
|         self.assertEqual(len(user_password_history_qs), 3, "expected 3 recorded password") | ||||
|         # Verify that one of the entries contains the original password | ||||
|         self.assertTrue( | ||||
|             any(entry.old_password == original_password for entry in user_password_history_qs), | ||||
|             "original password should be in password history table", | ||||
|         ) | ||||
| @ -1,178 +0,0 @@ | ||||
| from datetime import datetime, timedelta | ||||
|  | ||||
| from django.test import TestCase | ||||
|  | ||||
| from authentik.core.tests.utils import create_test_user | ||||
| from authentik.enterprise.policies.unique_password.models import ( | ||||
|     UniquePasswordPolicy, | ||||
|     UserPasswordHistory, | ||||
| ) | ||||
| from authentik.enterprise.policies.unique_password.tasks import ( | ||||
|     check_and_purge_password_history, | ||||
|     trim_password_histories, | ||||
| ) | ||||
| from authentik.policies.models import PolicyBinding, PolicyBindingModel | ||||
|  | ||||
|  | ||||
| class TestUniquePasswordPolicyModel(TestCase): | ||||
|     """Test the UniquePasswordPolicy model methods""" | ||||
|  | ||||
|     def test_is_in_use_with_binding(self): | ||||
|         """Test is_in_use returns True when a policy binding exists""" | ||||
|         # Create a UniquePasswordPolicy and a PolicyBinding for it | ||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) | ||||
|         pbm = PolicyBindingModel.objects.create() | ||||
|         PolicyBinding.objects.create(target=pbm, policy=policy, order=0, enabled=True) | ||||
|  | ||||
|         # Verify is_in_use returns True | ||||
|         self.assertTrue(UniquePasswordPolicy.is_in_use()) | ||||
|  | ||||
|     def test_is_in_use_with_promptstage(self): | ||||
|         """Test is_in_use returns True when attached to a PromptStage""" | ||||
|         from authentik.stages.prompt.models import PromptStage | ||||
|  | ||||
|         # Create a UniquePasswordPolicy and attach it to a PromptStage | ||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) | ||||
|         prompt_stage = PromptStage.objects.create( | ||||
|             name="Test Prompt Stage", | ||||
|         ) | ||||
|         # Use the set() method for many-to-many relationships | ||||
|         prompt_stage.validation_policies.set([policy]) | ||||
|  | ||||
|         # Verify is_in_use returns True | ||||
|         self.assertTrue(UniquePasswordPolicy.is_in_use()) | ||||
|  | ||||
|  | ||||
| class TestTrimAllPasswordHistories(TestCase): | ||||
|     """Test the task that trims password history for all users""" | ||||
|  | ||||
|     def setUp(self): | ||||
|         self.user1 = create_test_user("test-user1") | ||||
|         self.user2 = create_test_user("test-user2") | ||||
|         self.pbm = PolicyBindingModel.objects.create() | ||||
|         # Create a policy with a limit of 1 password | ||||
|         self.policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1) | ||||
|         PolicyBinding.objects.create( | ||||
|             target=self.pbm, | ||||
|             policy=self.policy, | ||||
|             enabled=True, | ||||
|             order=0, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class TestCheckAndPurgePasswordHistory(TestCase): | ||||
|     """Test the scheduled task that checks if any policy is in use and purges if not""" | ||||
|  | ||||
|     def setUp(self): | ||||
|         self.user = create_test_user("test-user") | ||||
|         self.pbm = PolicyBindingModel.objects.create() | ||||
|  | ||||
|     def test_purge_when_no_policy_in_use(self): | ||||
|         """Test that the task purges the table when no policy is in use""" | ||||
|         # Create some password history entries | ||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") | ||||
|  | ||||
|         # Verify we have entries | ||||
|         self.assertTrue(UserPasswordHistory.objects.exists()) | ||||
|  | ||||
|         # Run the task - should purge since no policy is in use | ||||
|         check_and_purge_password_history() | ||||
|  | ||||
|         # Verify the table is empty | ||||
|         self.assertFalse(UserPasswordHistory.objects.exists()) | ||||
|  | ||||
|     def test_no_purge_when_policy_in_use(self): | ||||
|         """Test that the task doesn't purge when a policy is in use""" | ||||
|         # Create a policy and binding | ||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5) | ||||
|         PolicyBinding.objects.create( | ||||
|             target=self.pbm, | ||||
|             policy=policy, | ||||
|             enabled=True, | ||||
|             order=0, | ||||
|         ) | ||||
|  | ||||
|         # Create some password history entries | ||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") | ||||
|  | ||||
|         # Verify we have entries | ||||
|         self.assertTrue(UserPasswordHistory.objects.exists()) | ||||
|  | ||||
|         # Run the task - should NOT purge since a policy is in use | ||||
|         check_and_purge_password_history() | ||||
|  | ||||
|         # Verify the entries still exist | ||||
|         self.assertTrue(UserPasswordHistory.objects.exists()) | ||||
|  | ||||
|  | ||||
| class TestTrimPasswordHistory(TestCase): | ||||
|     """Test password history cleanup task""" | ||||
|  | ||||
|     def setUp(self): | ||||
|         self.user = create_test_user("test-user") | ||||
|         self.pbm = PolicyBindingModel.objects.create() | ||||
|  | ||||
|     def test_trim_password_history_ok(self): | ||||
|         """Test passwords over the define limit are deleted""" | ||||
|         _now = datetime.now() | ||||
|         UserPasswordHistory.objects.bulk_create( | ||||
|             [ | ||||
|                 UserPasswordHistory( | ||||
|                     user=self.user, | ||||
|                     old_password="hunter1",  # nosec B106 | ||||
|                     created_at=_now - timedelta(days=3), | ||||
|                 ), | ||||
|                 UserPasswordHistory( | ||||
|                     user=self.user, | ||||
|                     old_password="hunter2",  # nosec B106 | ||||
|                     created_at=_now - timedelta(days=2), | ||||
|                 ), | ||||
|                 UserPasswordHistory( | ||||
|                     user=self.user, | ||||
|                     old_password="hunter3",  # nosec B106 | ||||
|                     created_at=_now, | ||||
|                 ), | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1) | ||||
|         PolicyBinding.objects.create( | ||||
|             target=self.pbm, | ||||
|             policy=policy, | ||||
|             enabled=True, | ||||
|             order=0, | ||||
|         ) | ||||
|         trim_password_histories.delay() | ||||
|         user_pwd_history_qs = UserPasswordHistory.objects.filter(user=self.user) | ||||
|         self.assertEqual(len(user_pwd_history_qs), 1) | ||||
|  | ||||
|     def test_trim_password_history_policy_diabled_no_op(self): | ||||
|         """Test no passwords removed if policy binding is disabled""" | ||||
|  | ||||
|         # Insert a record to ensure it's not deleted after executing task | ||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") | ||||
|  | ||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1) | ||||
|         PolicyBinding.objects.create( | ||||
|             target=self.pbm, | ||||
|             policy=policy, | ||||
|             enabled=False, | ||||
|             order=0, | ||||
|         ) | ||||
|         trim_password_histories.delay() | ||||
|         self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists()) | ||||
|  | ||||
|     def test_trim_password_history_fewer_records_than_maximum_is_no_op(self): | ||||
|         """Test no passwords deleted if fewer passwords exist than limit""" | ||||
|  | ||||
|         UserPasswordHistory.create_for_user(self.user, "hunter2") | ||||
|  | ||||
|         policy = UniquePasswordPolicy.objects.create(num_historical_passwords=2) | ||||
|         PolicyBinding.objects.create( | ||||
|             target=self.pbm, | ||||
|             policy=policy, | ||||
|             enabled=True, | ||||
|             order=0, | ||||
|         ) | ||||
|         trim_password_histories.delay() | ||||
|         self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists()) | ||||
| @ -1,7 +0,0 @@ | ||||
| """API URLs""" | ||||
|  | ||||
| from authentik.enterprise.policies.unique_password.api import UniquePasswordPolicyViewSet | ||||
|  | ||||
| api_urlpatterns = [ | ||||
|     ("policies/unique_password", UniquePasswordPolicyViewSet), | ||||
| ] | ||||
| @ -37,7 +37,6 @@ class GoogleWorkspaceProviderSerializer(EnterpriseRequiredMixin, ProviderSeriali | ||||
|             "user_delete_action", | ||||
|             "group_delete_action", | ||||
|             "default_group_email_domain", | ||||
|             "dry_run", | ||||
|         ] | ||||
|         extra_kwargs = {} | ||||
|  | ||||
|  | ||||
| @ -8,10 +8,9 @@ from httplib2 import HttpLib2Error, HttpLib2ErrorWithResponse | ||||
|  | ||||
| from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider | ||||
| from authentik.lib.sync.outgoing import HTTP_CONFLICT | ||||
| from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient | ||||
| from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient | ||||
| from authentik.lib.sync.outgoing.exceptions import ( | ||||
|     BadRequestSyncException, | ||||
|     DryRunRejected, | ||||
|     NotFoundSyncException, | ||||
|     ObjectExistsSyncException, | ||||
|     StopSync, | ||||
| @ -44,8 +43,6 @@ class GoogleWorkspaceSyncClient[TModel: Model, TConnection: Model, TSchema: dict | ||||
|             self.domains.append(domain_name) | ||||
|  | ||||
|     def _request(self, request: HttpRequest): | ||||
|         if self.provider.dry_run and request.method.upper() not in SAFE_METHODS: | ||||
|             raise DryRunRejected(request.uri, request.method, request.body) | ||||
|         try: | ||||
|             response = request.execute() | ||||
|         except GoogleAuthError as exc: | ||||
|  | ||||
| @ -1,24 +0,0 @@ | ||||
| # Generated by Django 5.0.12 on 2025-02-24 19:43 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ( | ||||
|             "authentik_providers_google_workspace", | ||||
|             "0003_googleworkspaceprovidergroup_attributes_and_more", | ||||
|         ), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="googleworkspaceprovider", | ||||
|             name="dry_run", | ||||
|             field=models.BooleanField( | ||||
|                 default=False, | ||||
|                 help_text="When enabled, provider will not modify or create objects in the remote system.", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -36,7 +36,6 @@ class MicrosoftEntraProviderSerializer(EnterpriseRequiredMixin, ProviderSerializ | ||||
|             "filter_group", | ||||
|             "user_delete_action", | ||||
|             "group_delete_action", | ||||
|             "dry_run", | ||||
|         ] | ||||
|         extra_kwargs = {} | ||||
|  | ||||
|  | ||||
| @ -3,7 +3,6 @@ from collections.abc import Coroutine | ||||
| from dataclasses import asdict | ||||
| from typing import Any | ||||
|  | ||||
| import httpx | ||||
| from azure.core.exceptions import ( | ||||
|     ClientAuthenticationError, | ||||
|     ServiceRequestError, | ||||
| @ -13,7 +12,6 @@ from azure.identity.aio import ClientSecretCredential | ||||
| from django.db.models import Model | ||||
| from django.http import HttpResponseBadRequest, HttpResponseNotFound | ||||
| from kiota_abstractions.api_error import APIError | ||||
| from kiota_abstractions.request_information import RequestInformation | ||||
| from kiota_authentication_azure.azure_identity_authentication_provider import ( | ||||
|     AzureIdentityAuthenticationProvider, | ||||
| ) | ||||
| @ -23,15 +21,13 @@ from msgraph.generated.models.o_data_errors.o_data_error import ODataError | ||||
| from msgraph.graph_request_adapter import GraphRequestAdapter, options | ||||
| from msgraph.graph_service_client import GraphServiceClient | ||||
| from msgraph_core import GraphClientFactory | ||||
| from opentelemetry import trace | ||||
|  | ||||
| from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider | ||||
| from authentik.events.utils import sanitize_item | ||||
| from authentik.lib.sync.outgoing import HTTP_CONFLICT | ||||
| from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient | ||||
| from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient | ||||
| from authentik.lib.sync.outgoing.exceptions import ( | ||||
|     BadRequestSyncException, | ||||
|     DryRunRejected, | ||||
|     NotFoundSyncException, | ||||
|     ObjectExistsSyncException, | ||||
|     StopSync, | ||||
| @ -39,24 +35,20 @@ from authentik.lib.sync.outgoing.exceptions import ( | ||||
| ) | ||||
|  | ||||
|  | ||||
| class AuthentikRequestAdapter(GraphRequestAdapter): | ||||
|     def __init__(self, auth_provider, provider: MicrosoftEntraProvider, client=None): | ||||
|         super().__init__(auth_provider, client) | ||||
|         self._provider = provider | ||||
| def get_request_adapter( | ||||
|     credentials: ClientSecretCredential, scopes: list[str] | None = None | ||||
| ) -> GraphRequestAdapter: | ||||
|     if scopes: | ||||
|         auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials, scopes=scopes) | ||||
|     else: | ||||
|         auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials) | ||||
|  | ||||
|     async def get_http_response_message( | ||||
|         self, | ||||
|         request_info: RequestInformation, | ||||
|         parent_span: trace.Span, | ||||
|         claims: str = "", | ||||
|     ) -> httpx.Response: | ||||
|         if self._provider.dry_run and request_info.http_method.value.upper() not in SAFE_METHODS: | ||||
|             raise DryRunRejected( | ||||
|                 url=request_info.url, | ||||
|                 method=request_info.http_method.value, | ||||
|                 body=request_info.content.decode("utf-8"), | ||||
|             ) | ||||
|         return await super().get_http_response_message(request_info, parent_span, claims=claims) | ||||
|     return GraphRequestAdapter( | ||||
|         auth_provider=auth_provider, | ||||
|         client=GraphClientFactory.create_with_default_middleware( | ||||
|             options=options, client=KiotaClientFactory.get_default_client() | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict]( | ||||
| @ -71,27 +63,9 @@ class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict] | ||||
|         self.credentials = provider.microsoft_credentials() | ||||
|         self.__prefetch_domains() | ||||
|  | ||||
|     def get_request_adapter( | ||||
|         self, credentials: ClientSecretCredential, scopes: list[str] | None = None | ||||
|     ) -> AuthentikRequestAdapter: | ||||
|         if scopes: | ||||
|             auth_provider = AzureIdentityAuthenticationProvider( | ||||
|                 credentials=credentials, scopes=scopes | ||||
|             ) | ||||
|         else: | ||||
|             auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials) | ||||
|  | ||||
|         return AuthentikRequestAdapter( | ||||
|             auth_provider=auth_provider, | ||||
|             provider=self.provider, | ||||
|             client=GraphClientFactory.create_with_default_middleware( | ||||
|                 options=options, client=KiotaClientFactory.get_default_client() | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def client(self): | ||||
|         return GraphServiceClient(request_adapter=self.get_request_adapter(**self.credentials)) | ||||
|         return GraphServiceClient(request_adapter=get_request_adapter(**self.credentials)) | ||||
|  | ||||
|     def _request[T](self, request: Coroutine[Any, Any, T]) -> T: | ||||
|         try: | ||||
|  | ||||
| @ -1,24 +0,0 @@ | ||||
| # Generated by Django 5.0.12 on 2025-02-24 19:43 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ( | ||||
|             "authentik_providers_microsoft_entra", | ||||
|             "0002_microsoftentraprovidergroup_attributes_and_more", | ||||
|         ), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="microsoftentraprovider", | ||||
|             name="dry_run", | ||||
|             field=models.BooleanField( | ||||
|                 default=False, | ||||
|                 help_text="When enabled, provider will not modify or create objects in the remote system.", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -32,6 +32,7 @@ class MicrosoftEntraUserTests(APITestCase): | ||||
|  | ||||
|     @apply_blueprint("system/providers-microsoft-entra.yaml") | ||||
|     def setUp(self) -> None: | ||||
|  | ||||
|         # Delete all users and groups as the mocked HTTP responses only return one ID | ||||
|         # which will cause errors with multiple users | ||||
|         Tenant.objects.update(avatars="none") | ||||
| @ -96,38 +97,6 @@ class MicrosoftEntraUserTests(APITestCase): | ||||
|             self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists()) | ||||
|             user_create.assert_called_once() | ||||
|  | ||||
|     def test_user_create_dry_run(self): | ||||
|         """Test user creation (dry run)""" | ||||
|         self.provider.dry_run = True | ||||
|         self.provider.save() | ||||
|         uid = generate_id() | ||||
|         with ( | ||||
|             patch( | ||||
|                 "authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials", | ||||
|                 MagicMock(return_value={"credentials": self.creds}), | ||||
|             ), | ||||
|             patch( | ||||
|                 "msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get", | ||||
|                 AsyncMock( | ||||
|                     return_value=OrganizationCollectionResponse( | ||||
|                         value=[ | ||||
|                             Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")]) | ||||
|                         ] | ||||
|                     ) | ||||
|                 ), | ||||
|             ), | ||||
|         ): | ||||
|             user = User.objects.create( | ||||
|                 username=uid, | ||||
|                 name=f"{uid} {uid}", | ||||
|                 email=f"{uid}@goauthentik.io", | ||||
|             ) | ||||
|             microsoft_user = MicrosoftEntraProviderUser.objects.filter( | ||||
|                 provider=self.provider, user=user | ||||
|             ).first() | ||||
|             self.assertIsNone(microsoft_user) | ||||
|             self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists()) | ||||
|  | ||||
|     def test_user_not_created(self): | ||||
|         """Test without property mappings, no group is created""" | ||||
|         self.provider.property_mappings.clear() | ||||
|  | ||||
| @ -102,7 +102,7 @@ def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSessi | ||||
|             "format": "complex", | ||||
|             "session": { | ||||
|                 "format": "opaque", | ||||
|                 "id": sha256(instance.session.session_key.encode("ascii")).hexdigest(), | ||||
|                 "id": sha256(instance.session_key.encode("ascii")).hexdigest(), | ||||
|             }, | ||||
|             "user": { | ||||
|                 "format": "email", | ||||
|  | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	