Compare commits
	
		
			10 Commits
		
	
	
		
			fix/issue_
			...
			deny-unaut
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 7d40e00263 | |||
| 42501f6d1e | |||
| 2759b1c089 | |||
| ce6d76babe | |||
| 5cc2bd5b36 | |||
| bad8a8ead5 | |||
| 1f7a2d5194 | |||
| 5e328403d6 | |||
| f03e56af93 | |||
| 516aa9d9b1 | 
| @ -1,5 +1,5 @@ | |||||||
| [bumpversion] | [bumpversion] | ||||||
| current_version = 2025.2.2 | current_version = 2025.2.1 | ||||||
| tag = True | tag = True | ||||||
| commit = True | commit = True | ||||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||||
|  | |||||||
							
								
								
									
										22
									
								
								.github/ISSUE_TEMPLATE/docs_issue.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								.github/ISSUE_TEMPLATE/docs_issue.md
									
									
									
									
										vendored
									
									
								
							| @ -1,22 +0,0 @@ | |||||||
| --- |  | ||||||
| name: Documentation issue |  | ||||||
| about: Suggest an improvement or report a problem |  | ||||||
| title: "" |  | ||||||
| labels: documentation |  | ||||||
| assignees: "" |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| **Do you see an area that can be clarified or expanded, a technical inaccuracy, or a broken link? Please describe.** |  | ||||||
| A clear and concise description of what the problem is, or where the document can be improved. Ex. I believe we need more details about [...] |  | ||||||
|  |  | ||||||
| **Provide the URL or link to the exact page in the documentation to which you are referring.** |  | ||||||
| If there are multiple pages, list them all, and be sure to state the header or section where the content is. |  | ||||||
|  |  | ||||||
| **Describe the solution you'd like** |  | ||||||
| A clear and concise description of what you want to happen. |  | ||||||
|  |  | ||||||
| **Additional context** |  | ||||||
| Add any other context or screenshots about the documentation issue here. |  | ||||||
|  |  | ||||||
| **Consider opening a PR!** |  | ||||||
| If the issue is one that you can fix, or even make a good pass at, we'd appreciate a PR. For more information about making a contribution to the docs, and using our Style Guide and our templates, refer to ["Writing documentation"](https://docs.goauthentik.io/docs/developer-docs/docs/writing-documentation). |  | ||||||
| @ -44,6 +44,7 @@ if is_release: | |||||||
|         ] |         ] | ||||||
|         if not prerelease: |         if not prerelease: | ||||||
|             image_tags += [ |             image_tags += [ | ||||||
|  |                 f"{name}:latest", | ||||||
|                 f"{name}:{version_family}", |                 f"{name}:{version_family}", | ||||||
|             ] |             ] | ||||||
| else: | else: | ||||||
|  | |||||||
							
								
								
									
										20
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										20
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -9,22 +9,17 @@ inputs: | |||||||
| runs: | runs: | ||||||
|   using: "composite" |   using: "composite" | ||||||
|   steps: |   steps: | ||||||
|     - name: Install apt deps |     - name: Install poetry & deps | ||||||
|       shell: bash |       shell: bash | ||||||
|       run: | |       run: | | ||||||
|  |         pipx install poetry || true | ||||||
|         sudo apt-get update |         sudo apt-get update | ||||||
|         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server |         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server | ||||||
|     - name: Install uv |     - name: Setup python and restore poetry | ||||||
|       uses: astral-sh/setup-uv@v5 |  | ||||||
|       with: |  | ||||||
|         enable-cache: true |  | ||||||
|     - name: Setup python |  | ||||||
|       uses: actions/setup-python@v5 |       uses: actions/setup-python@v5 | ||||||
|       with: |       with: | ||||||
|         python-version-file: "pyproject.toml" |         python-version-file: "pyproject.toml" | ||||||
|     - name: Install Python deps |         cache: "poetry" | ||||||
|       shell: bash |  | ||||||
|       run: uv sync --all-extras --dev --frozen |  | ||||||
|     - name: Setup node |     - name: Setup node | ||||||
|       uses: actions/setup-node@v4 |       uses: actions/setup-node@v4 | ||||||
|       with: |       with: | ||||||
| @ -35,18 +30,15 @@ runs: | |||||||
|       uses: actions/setup-go@v5 |       uses: actions/setup-go@v5 | ||||||
|       with: |       with: | ||||||
|         go-version-file: "go.mod" |         go-version-file: "go.mod" | ||||||
|     - name: Setup docker cache |  | ||||||
|       uses: ScribeMD/docker-cache@0.5.0 |  | ||||||
|       with: |  | ||||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} |  | ||||||
|     - name: Setup dependencies |     - name: Setup dependencies | ||||||
|       shell: bash |       shell: bash | ||||||
|       run: | |       run: | | ||||||
|         export PSQL_TAG=${{ inputs.postgresql_version }} |         export PSQL_TAG=${{ inputs.postgresql_version }} | ||||||
|         docker compose -f .github/actions/setup/docker-compose.yml up -d |         docker compose -f .github/actions/setup/docker-compose.yml up -d | ||||||
|  |         poetry sync | ||||||
|         cd web && npm ci |         cd web && npm ci | ||||||
|     - name: Generate config |     - name: Generate config | ||||||
|       shell: uv run python {0} |       shell: poetry run python {0} | ||||||
|       run: | |       run: | | ||||||
|         from authentik.lib.generators import generate_id |         from authentik.lib.generators import generate_id | ||||||
|         from yaml import safe_dump |         from yaml import safe_dump | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,7 +11,7 @@ services: | |||||||
|       - 5432:5432 |       - 5432:5432 | ||||||
|     restart: always |     restart: always | ||||||
|   redis: |   redis: | ||||||
|     image: docker.io/library/redis:7 |     image: docker.io/library/redis | ||||||
|     ports: |     ports: | ||||||
|       - 6379:6379 |       - 6379:6379 | ||||||
|     restart: always |     restart: always | ||||||
|  | |||||||
							
								
								
									
										33
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										33
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							| @ -1,32 +1,7 @@ | |||||||
| akadmin |  | ||||||
| asgi |  | ||||||
| assertIn |  | ||||||
| authentik |  | ||||||
| authn |  | ||||||
| crate |  | ||||||
| docstrings |  | ||||||
| entra |  | ||||||
| goauthentik |  | ||||||
| gunicorn |  | ||||||
| hass |  | ||||||
| jwe |  | ||||||
| jwks |  | ||||||
| keypair | keypair | ||||||
| keypairs | keypairs | ||||||
| kubernetes | hass | ||||||
| oidc |  | ||||||
| ontext |  | ||||||
| openid |  | ||||||
| passwordless |  | ||||||
| plex |  | ||||||
| saml |  | ||||||
| scim |  | ||||||
| singed |  | ||||||
| slo |  | ||||||
| sso |  | ||||||
| totp |  | ||||||
| traefik |  | ||||||
| # https://github.com/codespell-project/codespell/issues/1224 |  | ||||||
| upToDate |  | ||||||
| warmup | warmup | ||||||
| webauthn | ontext | ||||||
|  | singed | ||||||
|  | assertIn | ||||||
|  | |||||||
							
								
								
									
										8
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -82,12 +82,6 @@ updates: | |||||||
|       docusaurus: |       docusaurus: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@docusaurus/*" |           - "@docusaurus/*" | ||||||
|       build: |  | ||||||
|         patterns: |  | ||||||
|           - "@swc/*" |  | ||||||
|           - "swc-*" |  | ||||||
|           - "lightningcss*" |  | ||||||
|           - "@rspack/binding*" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/lifecycle/aws" |     directory: "/lifecycle/aws" | ||||||
|     schedule: |     schedule: | ||||||
| @ -98,7 +92,7 @@ updates: | |||||||
|       prefix: "lifecycle/aws:" |       prefix: "lifecycle/aws:" | ||||||
|     labels: |     labels: | ||||||
|       - dependencies |       - dependencies | ||||||
|   - package-ecosystem: uv |   - package-ecosystem: pip | ||||||
|     directory: "/" |     directory: "/" | ||||||
|     schedule: |     schedule: | ||||||
|       interval: daily |       interval: daily | ||||||
|  | |||||||
| @ -40,7 +40,7 @@ jobs: | |||||||
|       attestations: write |       attestations: write | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - uses: docker/setup-qemu-action@v3.6.0 |       - uses: docker/setup-qemu-action@v3.5.0 | ||||||
|       - uses: docker/setup-buildx-action@v3 |       - uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|         uses: ./.github/actions/docker-push-variables |         uses: ./.github/actions/docker-push-variables | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							| @ -33,7 +33,7 @@ jobs: | |||||||
|           npm ci |           npm ci | ||||||
|       - name: Check changes have been applied |       - name: Check changes have been applied | ||||||
|         run: | |         run: | | ||||||
|           uv run make aws-cfn |           poetry run make aws-cfn | ||||||
|           git diff --exit-code |           git diff --exit-code | ||||||
|   ci-aws-cfn-mark: |   ci-aws-cfn-mark: | ||||||
|     if: always() |     if: always() | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							| @ -15,8 +15,8 @@ jobs: | |||||||
|       matrix: |       matrix: | ||||||
|         version: |         version: | ||||||
|           - docs |           - docs | ||||||
|           - version-2025-2 |  | ||||||
|           - version-2024-12 |           - version-2024-12 | ||||||
|  |           - version-2024-10 | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - run: | |       - run: | | ||||||
|  | |||||||
							
								
								
									
										32
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										32
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -34,7 +34,7 @@ jobs: | |||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: run job |       - name: run job | ||||||
|         run: uv run make ci-${{ matrix.job }} |         run: poetry run make ci-${{ matrix.job }} | ||||||
|   test-migrations: |   test-migrations: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
| @ -42,7 +42,7 @@ jobs: | |||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: run migrations |       - name: run migrations | ||||||
|         run: uv run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|   test-make-seed: |   test-make-seed: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
| @ -69,21 +69,19 @@ jobs: | |||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|       - name: checkout stable |       - name: checkout stable | ||||||
|         run: | |         run: | | ||||||
|  |           # Delete all poetry envs | ||||||
|  |           rm -rf /home/runner/.cache/pypoetry | ||||||
|           # Copy current, latest config to local |           # Copy current, latest config to local | ||||||
|           # Temporarly comment the .github backup while migrating to uv |  | ||||||
|           cp authentik/lib/default.yml local.env.yml |           cp authentik/lib/default.yml local.env.yml | ||||||
|           # cp -R .github .. |           cp -R .github .. | ||||||
|           cp -R scripts .. |           cp -R scripts .. | ||||||
|           git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) |           git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) | ||||||
|           # rm -rf .github/ scripts/ |           rm -rf .github/ scripts/ | ||||||
|           # mv ../.github ../scripts . |           mv ../.github ../scripts . | ||||||
|           rm -rf scripts/ |  | ||||||
|           mv ../scripts . |  | ||||||
|       - name: Setup authentik env (stable) |       - name: Setup authentik env (stable) | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|         with: |         with: | ||||||
|           postgresql_version: ${{ matrix.psql }} |           postgresql_version: ${{ matrix.psql }} | ||||||
|         continue-on-error: true |  | ||||||
|       - name: run migrations to stable |       - name: run migrations to stable | ||||||
|         run: poetry run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|       - name: checkout current code |       - name: checkout current code | ||||||
| @ -93,13 +91,15 @@ jobs: | |||||||
|           git reset --hard HEAD |           git reset --hard HEAD | ||||||
|           git clean -d -fx . |           git clean -d -fx . | ||||||
|           git checkout $GITHUB_SHA |           git checkout $GITHUB_SHA | ||||||
|  |           # Delete previous poetry env | ||||||
|  |           rm -rf /home/runner/.cache/pypoetry/virtualenvs/* | ||||||
|       - name: Setup authentik env (ensure latest deps are installed) |       - name: Setup authentik env (ensure latest deps are installed) | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|         with: |         with: | ||||||
|           postgresql_version: ${{ matrix.psql }} |           postgresql_version: ${{ matrix.psql }} | ||||||
|       - name: migrate to latest |       - name: migrate to latest | ||||||
|         run: | |         run: | | ||||||
|           uv run python -m lifecycle.migrate |           poetry run python -m lifecycle.migrate | ||||||
|       - name: run tests |       - name: run tests | ||||||
|         env: |         env: | ||||||
|           # Test in the main database that we just migrated from the previous stable version |           # Test in the main database that we just migrated from the previous stable version | ||||||
| @ -108,7 +108,7 @@ jobs: | |||||||
|           CI_RUN_ID: ${{ matrix.run_id }} |           CI_RUN_ID: ${{ matrix.run_id }} | ||||||
|           CI_TOTAL_RUNS: "5" |           CI_TOTAL_RUNS: "5" | ||||||
|         run: | |         run: | | ||||||
|           uv run make ci-test |           poetry run make ci-test | ||||||
|   test-unittest: |   test-unittest: | ||||||
|     name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 |     name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
| @ -133,7 +133,7 @@ jobs: | |||||||
|           CI_RUN_ID: ${{ matrix.run_id }} |           CI_RUN_ID: ${{ matrix.run_id }} | ||||||
|           CI_TOTAL_RUNS: "5" |           CI_TOTAL_RUNS: "5" | ||||||
|         run: | |         run: | | ||||||
|           uv run make ci-test |           poetry run make ci-test | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v5 | ||||||
|         with: |         with: | ||||||
| @ -156,8 +156,8 @@ jobs: | |||||||
|         uses: helm/kind-action@v1.12.0 |         uses: helm/kind-action@v1.12.0 | ||||||
|       - name: run integration |       - name: run integration | ||||||
|         run: | |         run: | | ||||||
|           uv run coverage run manage.py test tests/integration |           poetry run coverage run manage.py test tests/integration | ||||||
|           uv run coverage xml |           poetry run coverage xml | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v5 | ||||||
|         with: |         with: | ||||||
| @ -214,8 +214,8 @@ jobs: | |||||||
|           npm run build |           npm run build | ||||||
|       - name: run e2e |       - name: run e2e | ||||||
|         run: | |         run: | | ||||||
|           uv run coverage run manage.py test ${{ matrix.job.glob }} |           poetry run coverage run manage.py test ${{ matrix.job.glob }} | ||||||
|           uv run coverage xml |           poetry run coverage xml | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v5 | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										4
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -29,7 +29,7 @@ jobs: | |||||||
|       - name: Generate API |       - name: Generate API | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: golangci-lint |       - name: golangci-lint | ||||||
|         uses: golangci/golangci-lint-action@v7 |         uses: golangci/golangci-lint-action@v6 | ||||||
|         with: |         with: | ||||||
|           version: latest |           version: latest | ||||||
|           args: --timeout 5000s --verbose |           args: --timeout 5000s --verbose | ||||||
| @ -82,7 +82,7 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           ref: ${{ github.event.pull_request.head.sha }} |           ref: ${{ github.event.pull_request.head.sha }} | ||||||
|       - name: Set up QEMU |       - name: Set up QEMU | ||||||
|         uses: docker/setup-qemu-action@v3.6.0 |         uses: docker/setup-qemu-action@v3.5.0 | ||||||
|       - name: Set up Docker Buildx |       - name: Set up Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@v3 |         uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ name: authentik-gen-update-webauthn-mds | |||||||
| on: | on: | ||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
|   schedule: |   schedule: | ||||||
|     - cron: "30 1 1,15 * *" |     - cron: '30 1 1,15 * *' | ||||||
|  |  | ||||||
| env: | env: | ||||||
|   POSTGRES_DB: authentik |   POSTGRES_DB: authentik | ||||||
| @ -24,7 +24,7 @@ jobs: | |||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - run: uv run ak update_webauthn_mds |       - run: poetry run ak update_webauthn_mds | ||||||
|       - uses: peter-evans/create-pull-request@v7 |       - uses: peter-evans/create-pull-request@v7 | ||||||
|         id: cpr |         id: cpr | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										4
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							| @ -21,8 +21,8 @@ jobs: | |||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: generate docs |       - name: generate docs | ||||||
|         run: | |         run: | | ||||||
|           uv run make migrate |           poetry run make migrate | ||||||
|           uv run ak build_source_docs |           poetry run ak build_source_docs | ||||||
|       - name: Publish |       - name: Publish | ||||||
|         uses: netlify/actions/cli@master |         uses: netlify/actions/cli@master | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										4
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -42,7 +42,7 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           go-version-file: "go.mod" |           go-version-file: "go.mod" | ||||||
|       - name: Set up QEMU |       - name: Set up QEMU | ||||||
|         uses: docker/setup-qemu-action@v3.6.0 |         uses: docker/setup-qemu-action@v3.5.0 | ||||||
|       - name: Set up Docker Buildx |       - name: Set up Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@v3 |         uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
| @ -186,7 +186,7 @@ jobs: | |||||||
|           container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) |           container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) | ||||||
|           docker cp ${container}:web/ . |           docker cp ${container}:web/ . | ||||||
|       - name: Create a Sentry.io release |       - name: Create a Sentry.io release | ||||||
|         uses: getsentry/action-release@v3 |         uses: getsentry/action-release@v1 | ||||||
|         continue-on-error: true |         continue-on-error: true | ||||||
|         env: |         env: | ||||||
|           SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} |           SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} | ||||||
|  | |||||||
							
								
								
									
										27
									
								
								.github/workflows/semgrep.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								.github/workflows/semgrep.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,27 +0,0 @@ | |||||||
| name: authentik-semgrep |  | ||||||
| on: |  | ||||||
|   workflow_dispatch: {} |  | ||||||
|   pull_request: {} |  | ||||||
|   push: |  | ||||||
|     branches: |  | ||||||
|       - main |  | ||||||
|       - master |  | ||||||
|     paths: |  | ||||||
|       - .github/workflows/semgrep.yml |  | ||||||
|   schedule: |  | ||||||
|     # random HH:MM to avoid a load spike on GitHub Actions at 00:00 |  | ||||||
|     - cron: '12 15 * * *' |  | ||||||
| jobs: |  | ||||||
|   semgrep: |  | ||||||
|     name: semgrep/ci |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     permissions: |  | ||||||
|       contents: read |  | ||||||
|     env: |  | ||||||
|       SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} |  | ||||||
|     container: |  | ||||||
|       image: semgrep/semgrep |  | ||||||
|     if: (github.actor != 'dependabot[bot]') |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - run: semgrep ci |  | ||||||
| @ -36,10 +36,10 @@ jobs: | |||||||
|         run: make gen-client-ts |         run: make gen-client-ts | ||||||
|       - name: run extract |       - name: run extract | ||||||
|         run: | |         run: | | ||||||
|           uv run make i18n-extract |           poetry run make i18n-extract | ||||||
|       - name: run compile |       - name: run compile | ||||||
|         run: | |         run: | | ||||||
|           uv run ak compilemessages |           poetry run ak compilemessages | ||||||
|           make web-check-compile |           make web-check-compile | ||||||
|       - name: Create Pull Request |       - name: Create Pull Request | ||||||
|         if: ${{ github.event_name != 'pull_request' }} |         if: ${{ github.event_name != 'pull_request' }} | ||||||
|  | |||||||
							
								
								
									
										22
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -1,4 +1,26 @@ | |||||||
| { | { | ||||||
|  |     "cSpell.words": [ | ||||||
|  |         "akadmin", | ||||||
|  |         "asgi", | ||||||
|  |         "authentik", | ||||||
|  |         "authn", | ||||||
|  |         "entra", | ||||||
|  |         "goauthentik", | ||||||
|  |         "jwe", | ||||||
|  |         "jwks", | ||||||
|  |         "kubernetes", | ||||||
|  |         "oidc", | ||||||
|  |         "openid", | ||||||
|  |         "passwordless", | ||||||
|  |         "plex", | ||||||
|  |         "saml", | ||||||
|  |         "scim", | ||||||
|  |         "slo", | ||||||
|  |         "sso", | ||||||
|  |         "totp", | ||||||
|  |         "traefik", | ||||||
|  |         "webauthn" | ||||||
|  |     ], | ||||||
|     "todo-tree.tree.showCountsInTree": true, |     "todo-tree.tree.showCountsInTree": true, | ||||||
|     "todo-tree.tree.showBadges": true, |     "todo-tree.tree.showBadges": true, | ||||||
|     "yaml.customTags": [ |     "yaml.customTags": [ | ||||||
|  | |||||||
							
								
								
									
										46
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										46
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @ -3,13 +3,8 @@ | |||||||
|     "tasks": [ |     "tasks": [ | ||||||
|         { |         { | ||||||
|             "label": "authentik/core: make", |             "label": "authentik/core: make", | ||||||
|             "command": "uv", |             "command": "poetry", | ||||||
|             "args": [ |             "args": ["run", "make", "lint-fix", "lint"], | ||||||
|                 "run", |  | ||||||
|                 "make", |  | ||||||
|                 "lint-fix", |  | ||||||
|                 "lint" |  | ||||||
|             ], |  | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "new" |                 "panel": "new" | ||||||
|             }, |             }, | ||||||
| @ -17,12 +12,8 @@ | |||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/core: run", |             "label": "authentik/core: run", | ||||||
|             "command": "uv", |             "command": "poetry", | ||||||
|             "args": [ |             "args": ["run", "ak", "server"], | ||||||
|                 "run", |  | ||||||
|                 "ak", |  | ||||||
|                 "server" |  | ||||||
|             ], |  | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
| @ -32,17 +23,13 @@ | |||||||
|         { |         { | ||||||
|             "label": "authentik/web: make", |             "label": "authentik/web: make", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["web"], | ||||||
|                 "web" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/web: watch", |             "label": "authentik/web: watch", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["web-watch"], | ||||||
|                 "web-watch" |  | ||||||
|             ], |  | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
| @ -52,26 +39,19 @@ | |||||||
|         { |         { | ||||||
|             "label": "authentik: install", |             "label": "authentik: install", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["install", "-j4"], | ||||||
|                 "install", |  | ||||||
|                 "-j4" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/website: make", |             "label": "authentik/website: make", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["website"], | ||||||
|                 "website" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/website: watch", |             "label": "authentik/website: watch", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": [ |             "args": ["website-watch"], | ||||||
|                 "website-watch" |  | ||||||
|             ], |  | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
| @ -80,12 +60,8 @@ | |||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/api: generate", |             "label": "authentik/api: generate", | ||||||
|             "command": "uv", |             "command": "poetry", | ||||||
|             "args": [ |             "args": ["run", "make", "gen"], | ||||||
|                 "run", |  | ||||||
|                 "make", |  | ||||||
|                 "gen" |  | ||||||
|             ], |  | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         } |         } | ||||||
|     ] |     ] | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ schemas/                        @goauthentik/backend | |||||||
| scripts/                        @goauthentik/backend | scripts/                        @goauthentik/backend | ||||||
| tests/                          @goauthentik/backend | tests/                          @goauthentik/backend | ||||||
| pyproject.toml                  @goauthentik/backend | pyproject.toml                  @goauthentik/backend | ||||||
| uv.lock                         @goauthentik/backend | poetry.lock                     @goauthentik/backend | ||||||
| go.mod                          @goauthentik/backend | go.mod                          @goauthentik/backend | ||||||
| go.sum                          @goauthentik/backend | go.sum                          @goauthentik/backend | ||||||
| # Infrastructure | # Infrastructure | ||||||
|  | |||||||
| @ -5,7 +5,7 @@ | |||||||
| We as members, contributors, and leaders pledge to make participation in our | We as members, contributors, and leaders pledge to make participation in our | ||||||
| community a harassment-free experience for everyone, regardless of age, body | community a harassment-free experience for everyone, regardless of age, body | ||||||
| size, visible or invisible disability, ethnicity, sex characteristics, gender | size, visible or invisible disability, ethnicity, sex characteristics, gender | ||||||
| identity and expression, level of experience, education, socioeconomic status, | identity and expression, level of experience, education, socio-economic status, | ||||||
| nationality, personal appearance, race, religion, or sexual identity | nationality, personal appearance, race, religion, or sexual identity | ||||||
| and orientation. | and orientation. | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										85
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										85
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -93,59 +93,53 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | |||||||
|     mkdir -p /usr/share/GeoIP && \ |     mkdir -p /usr/share/GeoIP && \ | ||||||
|     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" |     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||||
|  |  | ||||||
| # Stage 5: Download uv | # Stage 5: Python dependencies | ||||||
| FROM ghcr.io/astral-sh/uv:0.6.10 AS uv | FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps | ||||||
| # Stage 6: Base python image |  | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-base |  | ||||||
|  |  | ||||||
| ENV VENV_PATH="/ak-root/.venv" \ |  | ||||||
|     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ |  | ||||||
|     UV_COMPILE_BYTECODE=1 \ |  | ||||||
|     UV_LINK_MODE=copy \ |  | ||||||
|     UV_NATIVE_TLS=1 \ |  | ||||||
|     UV_PYTHON_DOWNLOADS=0 |  | ||||||
|  |  | ||||||
| WORKDIR /ak-root/ |  | ||||||
|  |  | ||||||
| COPY --from=uv /uv /uvx /bin/ |  | ||||||
|  |  | ||||||
| # Stage 7: Python dependencies |  | ||||||
| FROM python-base AS python-deps |  | ||||||
|  |  | ||||||
| ARG TARGETARCH | ARG TARGETARCH | ||||||
| ARG TARGETVARIANT | ARG TARGETVARIANT | ||||||
|  |  | ||||||
| RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache | WORKDIR /ak-root/poetry | ||||||
|  |  | ||||||
| ENV PATH="/root/.cargo/bin:$PATH" | ENV VENV_PATH="/ak-root/venv" \ | ||||||
|  |     POETRY_VIRTUALENVS_CREATE=false \ | ||||||
|  |     PATH="/ak-root/venv/bin:$PATH" | ||||||
|  |  | ||||||
|  | RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache | ||||||
|  |  | ||||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | ||||||
|     apt-get update && \ |     apt-get update && \ | ||||||
|     # Required for installing pip packages |     # Required for installing pip packages | ||||||
|  |     apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev | ||||||
|  |  | ||||||
|  | RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||||
|  |     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ | ||||||
|  |     --mount=type=cache,target=/root/.cache/pip \ | ||||||
|  |     --mount=type=cache,target=/root/.cache/pypoetry \ | ||||||
|  |     pip install --no-cache cffi && \ | ||||||
|  |     apt-get update && \ | ||||||
|     apt-get install -y --no-install-recommends \ |     apt-get install -y --no-install-recommends \ | ||||||
|     # Build essentials |         build-essential libffi-dev \ | ||||||
|     build-essential pkg-config libffi-dev git \ |         # Required for cryptography | ||||||
|     # cryptography |         curl pkg-config \ | ||||||
|     curl \ |         # Required for lxml | ||||||
|     # libxml |         libxslt-dev zlib1g-dev \ | ||||||
|     libxslt-dev zlib1g-dev \ |         # Required for xmlsec | ||||||
|     # postgresql |         libltdl-dev \ | ||||||
|     libpq-dev \ |         # Required for kadmin | ||||||
|     # python-kadmin-rs |         sccache clang && \ | ||||||
|     clang libkrb5-dev sccache \ |     curl https://sh.rustup.rs -sSf | sh -s -- -y && \ | ||||||
|     # xmlsec |     . "$HOME/.cargo/env" && \ | ||||||
|     libltdl-dev && \ |     python -m venv /ak-root/venv/ && \ | ||||||
|     curl https://sh.rustup.rs -sSf | sh -s -- -y |     bash -c "source ${VENV_PATH}/bin/activate && \ | ||||||
|  |     pip3 install --upgrade pip poetry && \ | ||||||
|  |     poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \ | ||||||
|  |     poetry install --only=main --no-ansi --no-interaction --no-root && \ | ||||||
|  |     pip uninstall cryptography -y && \ | ||||||
|  |     poetry install --only=main --no-ansi --no-interaction --no-root" | ||||||
|  |  | ||||||
| ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec" | # Stage 6: Run | ||||||
|  | FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image | ||||||
| RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \ |  | ||||||
|     --mount=type=bind,target=uv.lock,src=uv.lock \ |  | ||||||
|     --mount=type=cache,target=/root/.cache/uv \ |  | ||||||
|     uv sync --frozen --no-install-project --no-dev |  | ||||||
|  |  | ||||||
| # Stage 8: Run |  | ||||||
| FROM python-base AS final-image |  | ||||||
|  |  | ||||||
| ARG VERSION | ARG VERSION | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
| @ -177,7 +171,7 @@ RUN apt-get update && \ | |||||||
|  |  | ||||||
| COPY ./authentik/ /authentik | COPY ./authentik/ /authentik | ||||||
| COPY ./pyproject.toml / | COPY ./pyproject.toml / | ||||||
| COPY ./uv.lock / | COPY ./poetry.lock / | ||||||
| COPY ./schemas /schemas | COPY ./schemas /schemas | ||||||
| COPY ./locale /locale | COPY ./locale /locale | ||||||
| COPY ./tests /tests | COPY ./tests /tests | ||||||
| @ -186,7 +180,7 @@ COPY ./blueprints /blueprints | |||||||
| COPY ./lifecycle/ /lifecycle | COPY ./lifecycle/ /lifecycle | ||||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | ||||||
| COPY --from=go-builder /go/authentik /bin/authentik | COPY --from=go-builder /go/authentik /bin/authentik | ||||||
| COPY --from=python-deps /ak-root/.venv /ak-root/.venv | COPY --from=python-deps /ak-root/venv /ak-root/venv | ||||||
| COPY --from=web-builder /work/web/dist/ /web/dist/ | COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||||
| COPY --from=web-builder /work/web/authentik/ /web/authentik/ | COPY --from=web-builder /work/web/authentik/ /web/authentik/ | ||||||
| COPY --from=website-builder /work/website/build/ /website/help/ | COPY --from=website-builder /work/website/build/ /website/help/ | ||||||
| @ -197,6 +191,9 @@ USER 1000 | |||||||
| ENV TMPDIR=/dev/shm/ \ | ENV TMPDIR=/dev/shm/ \ | ||||||
|     PYTHONDONTWRITEBYTECODE=1 \ |     PYTHONDONTWRITEBYTECODE=1 \ | ||||||
|     PYTHONUNBUFFERED=1 \ |     PYTHONUNBUFFERED=1 \ | ||||||
|  |     PATH="/ak-root/venv/bin:/lifecycle:$PATH" \ | ||||||
|  |     VENV_PATH="/ak-root/venv" \ | ||||||
|  |     POETRY_VIRTUALENVS_CREATE=false \ | ||||||
|     GOFIPS=1 |     GOFIPS=1 | ||||||
|  |  | ||||||
| HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | ||||||
|  | |||||||
							
								
								
									
										72
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										72
									
								
								Makefile
									
									
									
									
									
								
							| @ -4,17 +4,34 @@ | |||||||
| PWD = $(shell pwd) | PWD = $(shell pwd) | ||||||
| UID = $(shell id -u) | UID = $(shell id -u) | ||||||
| GID = $(shell id -g) | GID = $(shell id -g) | ||||||
| NPM_VERSION = $(shell python -m scripts.generate_semver) | NPM_VERSION = $(shell python -m scripts.npm_version) | ||||||
| PY_SOURCES = authentik tests scripts lifecycle .github | PY_SOURCES = authentik tests scripts lifecycle .github | ||||||
|  | GO_SOURCES = cmd internal | ||||||
|  | WEB_SOURCES = web/src web/packages | ||||||
| DOCKER_IMAGE ?= "authentik:test" | DOCKER_IMAGE ?= "authentik:test" | ||||||
|  |  | ||||||
| GEN_API_TS = "gen-ts-api" | GEN_API_TS = "gen-ts-api" | ||||||
| GEN_API_PY = "gen-py-api" | GEN_API_PY = "gen-py-api" | ||||||
| GEN_API_GO = "gen-go-api" | GEN_API_GO = "gen-go-api" | ||||||
|  |  | ||||||
| pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||||
| pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||||
| pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null) | pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null) | ||||||
|  |  | ||||||
|  | CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \ | ||||||
|  | 		-I .github/codespell-words.txt \ | ||||||
|  | 		-S 'web/src/locales/**' \ | ||||||
|  | 		-S 'website/docs/developer-docs/api/reference/**' \ | ||||||
|  | 		-S '**/node_modules/**' \ | ||||||
|  | 		-S '**/dist/**' \ | ||||||
|  | 		$(PY_SOURCES) \ | ||||||
|  | 		$(GO_SOURCES) \ | ||||||
|  | 		$(WEB_SOURCES) \ | ||||||
|  | 		website/src \ | ||||||
|  | 		website/blog \ | ||||||
|  | 		website/docs \ | ||||||
|  | 		website/integrations \ | ||||||
|  | 		website/src | ||||||
|  |  | ||||||
| all: lint-fix lint test gen web  ## Lint, build, and test everything | all: lint-fix lint test gen web  ## Lint, build, and test everything | ||||||
|  |  | ||||||
| @ -32,37 +49,34 @@ go-test: | |||||||
| 	go test -timeout 0 -v -race -cover ./... | 	go test -timeout 0 -v -race -cover ./... | ||||||
|  |  | ||||||
| test: ## Run the server tests and produce a coverage report (locally) | test: ## Run the server tests and produce a coverage report (locally) | ||||||
| 	uv run coverage run manage.py test --keepdb authentik | 	coverage run manage.py test --keepdb authentik | ||||||
| 	uv run coverage html | 	coverage html | ||||||
| 	uv run coverage report | 	coverage report | ||||||
|  |  | ||||||
| lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | ||||||
| 	uv run black $(PY_SOURCES) | 	black $(PY_SOURCES) | ||||||
| 	uv run ruff check --fix $(PY_SOURCES) | 	ruff check --fix $(PY_SOURCES) | ||||||
|  |  | ||||||
| lint-codespell:  ## Reports spelling errors. | lint-codespell:  ## Reports spelling errors. | ||||||
| 	uv run codespell -w | 	codespell -w $(CODESPELL_ARGS) | ||||||
|  |  | ||||||
| lint: ## Lint the python and golang sources | lint: ## Lint the python and golang sources | ||||||
| 	uv run bandit -c pyproject.toml -r $(PY_SOURCES) | 	bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules | ||||||
| 	golangci-lint run -v | 	golangci-lint run -v | ||||||
|  |  | ||||||
| core-install: | core-install: | ||||||
| 	uv sync --frozen | 	poetry install | ||||||
|  |  | ||||||
| migrate: ## Run the Authentik Django server's migrations | migrate: ## Run the Authentik Django server's migrations | ||||||
| 	uv run python -m lifecycle.migrate | 	python -m lifecycle.migrate | ||||||
|  |  | ||||||
| i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | ||||||
|  |  | ||||||
| aws-cfn: | aws-cfn: | ||||||
| 	cd lifecycle/aws && npm run aws-cfn | 	cd lifecycle/aws && npm run aws-cfn | ||||||
|  |  | ||||||
| run:  ## Run the main authentik server process |  | ||||||
| 	uv run ak server |  | ||||||
|  |  | ||||||
| core-i18n-extract: | core-i18n-extract: | ||||||
| 	uv run ak makemessages \ | 	ak makemessages \ | ||||||
| 		--add-location file \ | 		--add-location file \ | ||||||
| 		--no-obsolete \ | 		--no-obsolete \ | ||||||
| 		--ignore web \ | 		--ignore web \ | ||||||
| @ -93,11 +107,11 @@ gen-build:  ## Extract the schema from the database | |||||||
| 	AUTHENTIK_DEBUG=true \ | 	AUTHENTIK_DEBUG=true \ | ||||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||||
| 		uv run ak make_blueprint_schema > blueprints/schema.json | 		ak make_blueprint_schema > blueprints/schema.json | ||||||
| 	AUTHENTIK_DEBUG=true \ | 	AUTHENTIK_DEBUG=true \ | ||||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||||
| 		uv run ak spectacular --file schema.yml | 		ak spectacular --file schema.yml | ||||||
|  |  | ||||||
| gen-changelog:  ## (Release) generate the changelog based from the commits since the last tag | gen-changelog:  ## (Release) generate the changelog based from the commits since the last tag | ||||||
| 	git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md | 	git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md | ||||||
| @ -148,7 +162,7 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python | |||||||
| 	docker run \ | 	docker run \ | ||||||
| 		--rm -v ${PWD}:/local \ | 		--rm -v ${PWD}:/local \ | ||||||
| 		--user ${UID}:${GID} \ | 		--user ${UID}:${GID} \ | ||||||
| 		docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \ | 		docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \ | ||||||
| 		-i /local/schema.yml \ | 		-i /local/schema.yml \ | ||||||
| 		-g python \ | 		-g python \ | ||||||
| 		-o /local/${GEN_API_PY} \ | 		-o /local/${GEN_API_PY} \ | ||||||
| @ -176,7 +190,7 @@ gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | |||||||
| 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | ||||||
|  |  | ||||||
| gen-dev-config:  ## Generate a local development config file | gen-dev-config:  ## Generate a local development config file | ||||||
| 	uv run scripts/generate_config.py | 	python -m scripts.generate_config | ||||||
|  |  | ||||||
| gen: gen-build gen-client-ts | gen: gen-build gen-client-ts | ||||||
|  |  | ||||||
| @ -257,21 +271,21 @@ ci--meta-debug: | |||||||
| 	node --version | 	node --version | ||||||
|  |  | ||||||
| ci-black: ci--meta-debug | ci-black: ci--meta-debug | ||||||
| 	uv run black --check $(PY_SOURCES) | 	black --check $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-ruff: ci--meta-debug | ci-ruff: ci--meta-debug | ||||||
| 	uv run ruff check $(PY_SOURCES) | 	ruff check $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-codespell: ci--meta-debug | ci-codespell: ci--meta-debug | ||||||
| 	uv run codespell -s | 	codespell $(CODESPELL_ARGS) -s | ||||||
|  |  | ||||||
| ci-bandit: ci--meta-debug | ci-bandit: ci--meta-debug | ||||||
| 	uv run bandit -r $(PY_SOURCES) | 	bandit -r $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-pending-migrations: ci--meta-debug | ci-pending-migrations: ci--meta-debug | ||||||
| 	uv run ak makemigrations --check | 	ak makemigrations --check | ||||||
|  |  | ||||||
| ci-test: ci--meta-debug | ci-test: ci--meta-debug | ||||||
| 	uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | 	coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | ||||||
| 	uv run coverage report | 	coverage report | ||||||
| 	uv run coverage xml | 	coverage xml | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di | |||||||
|  |  | ||||||
| ## Independent audits and pentests | ## Independent audits and pentests | ||||||
|  |  | ||||||
| We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specific audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security). | We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specfic audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security). | ||||||
|  |  | ||||||
| ## What authentik classifies as a CVE | ## What authentik classifies as a CVE | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
|  |  | ||||||
| from os import environ | from os import environ | ||||||
|  |  | ||||||
| __version__ = "2025.2.2" | __version__ = "2025.2.1" | ||||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -59,7 +59,7 @@ class SystemInfoSerializer(PassiveSerializer): | |||||||
|             if not isinstance(value, str): |             if not isinstance(value, str): | ||||||
|                 continue |                 continue | ||||||
|             actual_value = value |             actual_value = value | ||||||
|             if raw_session is not None and raw_session in actual_value: |             if raw_session in actual_value: | ||||||
|                 actual_value = actual_value.replace( |                 actual_value = actual_value.replace( | ||||||
|                     raw_session, SafeExceptionReporterFilter.cleansed_substitute |                     raw_session, SafeExceptionReporterFilter.cleansed_substitute | ||||||
|                 ) |                 ) | ||||||
|  | |||||||
| @ -49,8 +49,6 @@ class BrandSerializer(ModelSerializer): | |||||||
|             "branding_title", |             "branding_title", | ||||||
|             "branding_logo", |             "branding_logo", | ||||||
|             "branding_favicon", |             "branding_favicon", | ||||||
|             "branding_custom_css", |  | ||||||
|             "branding_default_flow_background", |  | ||||||
|             "flow_authentication", |             "flow_authentication", | ||||||
|             "flow_invalidation", |             "flow_invalidation", | ||||||
|             "flow_recovery", |             "flow_recovery", | ||||||
| @ -88,7 +86,6 @@ class CurrentBrandSerializer(PassiveSerializer): | |||||||
|     branding_title = CharField() |     branding_title = CharField() | ||||||
|     branding_logo = CharField(source="branding_logo_url") |     branding_logo = CharField(source="branding_logo_url") | ||||||
|     branding_favicon = CharField(source="branding_favicon_url") |     branding_favicon = CharField(source="branding_favicon_url") | ||||||
|     branding_custom_css = CharField() |  | ||||||
|     ui_footer_links = ListField( |     ui_footer_links = ListField( | ||||||
|         child=FooterLinkSerializer(), |         child=FooterLinkSerializer(), | ||||||
|         read_only=True, |         read_only=True, | ||||||
| @ -128,7 +125,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "branding_title", |         "branding_title", | ||||||
|         "branding_logo", |         "branding_logo", | ||||||
|         "branding_favicon", |         "branding_favicon", | ||||||
|         "branding_default_flow_background", |  | ||||||
|         "flow_authentication", |         "flow_authentication", | ||||||
|         "flow_invalidation", |         "flow_invalidation", | ||||||
|         "flow_recovery", |         "flow_recovery", | ||||||
|  | |||||||
| @ -1,35 +0,0 @@ | |||||||
| # Generated by Django 5.0.12 on 2025-02-22 01:51 |  | ||||||
|  |  | ||||||
| from pathlib import Path |  | ||||||
| from django.db import migrations, models |  | ||||||
| from django.apps.registry import Apps |  | ||||||
|  |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     Brand = apps.get_model("authentik_brands", "brand") |  | ||||||
|  |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     path = Path("/web/dist/custom.css") |  | ||||||
|     if not path.exists(): |  | ||||||
|         return |  | ||||||
|     css = path.read_text() |  | ||||||
|     Brand.objects.using(db_alias).update(branding_custom_css=css) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0007_brand_default_application"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="branding_custom_css", |  | ||||||
|             field=models.TextField(blank=True, default=""), |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython(migrate_custom_css), |  | ||||||
|     ] |  | ||||||
| @ -1,18 +0,0 @@ | |||||||
| # Generated by Django 5.0.13 on 2025-03-19 22:54 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0008_brand_branding_custom_css"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="branding_default_flow_background", |  | ||||||
|             field=models.TextField(default="/static/dist/assets/images/flow_background.jpg"), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -33,10 +33,6 @@ class Brand(SerializerModel): | |||||||
|  |  | ||||||
|     branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg") |     branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg") | ||||||
|     branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png") |     branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png") | ||||||
|     branding_custom_css = models.TextField(default="", blank=True) |  | ||||||
|     branding_default_flow_background = models.TextField( |  | ||||||
|         default="/static/dist/assets/images/flow_background.jpg" |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     flow_authentication = models.ForeignKey( |     flow_authentication = models.ForeignKey( | ||||||
|         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication" |         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication" | ||||||
| @ -88,12 +84,6 @@ class Brand(SerializerModel): | |||||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon |             return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon | ||||||
|         return self.branding_favicon |         return self.branding_favicon | ||||||
|  |  | ||||||
|     def branding_default_flow_background_url(self) -> str: |  | ||||||
|         """Get branding_default_flow_background with the correct prefix""" |  | ||||||
|         if self.branding_default_flow_background.startswith("/static"): |  | ||||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_default_flow_background |  | ||||||
|         return self.branding_default_flow_background |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> Serializer: |     def serializer(self) -> Serializer: | ||||||
|         from authentik.brands.api import BrandSerializer |         from authentik.brands.api import BrandSerializer | ||||||
|  | |||||||
| @ -24,7 +24,6 @@ class TestBrands(APITestCase): | |||||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", |                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", |                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||||
|                 "branding_title": "authentik", |                 "branding_title": "authentik", | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": brand.domain, |                 "matched_domain": brand.domain, | ||||||
|                 "ui_footer_links": [], |                 "ui_footer_links": [], | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |                 "ui_theme": Themes.AUTOMATIC, | ||||||
| @ -44,7 +43,6 @@ class TestBrands(APITestCase): | |||||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", |                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", |                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||||
|                 "branding_title": "custom", |                 "branding_title": "custom", | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": "bar.baz", |                 "matched_domain": "bar.baz", | ||||||
|                 "ui_footer_links": [], |                 "ui_footer_links": [], | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |                 "ui_theme": Themes.AUTOMATIC, | ||||||
| @ -61,7 +59,6 @@ class TestBrands(APITestCase): | |||||||
|                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", |                 "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", | ||||||
|                 "branding_favicon": "/static/dist/assets/icons/icon.png", |                 "branding_favicon": "/static/dist/assets/icons/icon.png", | ||||||
|                 "branding_title": "authentik", |                 "branding_title": "authentik", | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": "fallback", |                 "matched_domain": "fallback", | ||||||
|                 "ui_footer_links": [], |                 "ui_footer_links": [], | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |                 "ui_theme": Themes.AUTOMATIC, | ||||||
| @ -124,27 +121,3 @@ class TestBrands(APITestCase): | |||||||
|                 "subject": None, |                 "subject": None, | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_branding_url(self): |  | ||||||
|         """Test branding attributes return correct values""" |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         brand.branding_default_flow_background = "https://goauthentik.io/img/icon.png" |  | ||||||
|         brand.branding_favicon = "https://goauthentik.io/img/icon.png" |  | ||||||
|         brand.branding_logo = "https://goauthentik.io/img/icon.png" |  | ||||||
|         brand.save() |  | ||||||
|         self.assertEqual( |  | ||||||
|             brand.branding_default_flow_background_url(), "https://goauthentik.io/img/icon.png" |  | ||||||
|         ) |  | ||||||
|         self.assertJSONEqual( |  | ||||||
|             self.client.get(reverse("authentik_api:brand-current")).content.decode(), |  | ||||||
|             { |  | ||||||
|                 "branding_logo": "https://goauthentik.io/img/icon.png", |  | ||||||
|                 "branding_favicon": "https://goauthentik.io/img/icon.png", |  | ||||||
|                 "branding_title": "authentik", |  | ||||||
|                 "branding_custom_css": "", |  | ||||||
|                 "matched_domain": brand.domain, |  | ||||||
|                 "ui_footer_links": [], |  | ||||||
|                 "ui_theme": Themes.AUTOMATIC, |  | ||||||
|                 "default_locale": "", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  | |||||||
| @ -6,7 +6,7 @@ from django.utils.translation import gettext_lazy as _ | |||||||
| from django_filters.filters import BooleanFilter | from django_filters.filters import BooleanFilter | ||||||
| from django_filters.filterset import FilterSet | from django_filters.filterset import FilterSet | ||||||
| from rest_framework import mixins | from rest_framework import mixins | ||||||
| from rest_framework.fields import SerializerMethodField | from rest_framework.fields import ReadOnlyField, SerializerMethodField | ||||||
| from rest_framework.viewsets import GenericViewSet | from rest_framework.viewsets import GenericViewSet | ||||||
|  |  | ||||||
| from authentik.core.api.object_types import TypesMixin | from authentik.core.api.object_types import TypesMixin | ||||||
| @ -18,10 +18,10 @@ from authentik.core.models import Provider | |||||||
| class ProviderSerializer(ModelSerializer, MetaNameSerializer): | class ProviderSerializer(ModelSerializer, MetaNameSerializer): | ||||||
|     """Provider Serializer""" |     """Provider Serializer""" | ||||||
|  |  | ||||||
|     assigned_application_slug = SerializerMethodField() |     assigned_application_slug = ReadOnlyField(source="application.slug") | ||||||
|     assigned_application_name = SerializerMethodField() |     assigned_application_name = ReadOnlyField(source="application.name") | ||||||
|     assigned_backchannel_application_slug = SerializerMethodField() |     assigned_backchannel_application_slug = ReadOnlyField(source="backchannel_application.slug") | ||||||
|     assigned_backchannel_application_name = SerializerMethodField() |     assigned_backchannel_application_name = ReadOnlyField(source="backchannel_application.name") | ||||||
|  |  | ||||||
|     component = SerializerMethodField() |     component = SerializerMethodField() | ||||||
|  |  | ||||||
| @ -31,38 +31,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer): | |||||||
|             return "" |             return "" | ||||||
|         return obj.component |         return obj.component | ||||||
|  |  | ||||||
|     def get_assigned_application_slug(self, obj: Provider) -> str: |  | ||||||
|         """Get application slug, return empty string if no application exists""" |  | ||||||
|         try: |  | ||||||
|             return obj.application.slug |  | ||||||
|         except Provider.application.RelatedObjectDoesNotExist: |  | ||||||
|             return "" |  | ||||||
|  |  | ||||||
|     def get_assigned_application_name(self, obj: Provider) -> str: |  | ||||||
|         """Get application name, return empty string if no application exists""" |  | ||||||
|         try: |  | ||||||
|             return obj.application.name |  | ||||||
|         except Provider.application.RelatedObjectDoesNotExist: |  | ||||||
|             return "" |  | ||||||
|  |  | ||||||
|     def get_assigned_backchannel_application_slug(self, obj: Provider) -> str: |  | ||||||
|         """Get backchannel application slug. |  | ||||||
|  |  | ||||||
|         Returns an empty string if no backchannel application exists. |  | ||||||
|         """ |  | ||||||
|         if not obj.backchannel_application: |  | ||||||
|             return "" |  | ||||||
|         return obj.backchannel_application.slug or "" |  | ||||||
|  |  | ||||||
|     def get_assigned_backchannel_application_name(self, obj: Provider) -> str: |  | ||||||
|         """Get backchannel application name. |  | ||||||
|  |  | ||||||
|         Returns an empty string if no backchannel application exists. |  | ||||||
|         """ |  | ||||||
|         if not obj.backchannel_application: |  | ||||||
|             return "" |  | ||||||
|         return obj.backchannel_application.name or "" |  | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         model = Provider |         model = Provider | ||||||
|         fields = [ |         fields = [ | ||||||
|  | |||||||
| @ -5,7 +5,6 @@ from collections.abc import Iterable | |||||||
| from drf_spectacular.utils import OpenApiResponse, extend_schema | from drf_spectacular.utils import OpenApiResponse, extend_schema | ||||||
| from rest_framework import mixins | from rest_framework import mixins | ||||||
| from rest_framework.decorators import action | from rest_framework.decorators import action | ||||||
| from rest_framework.exceptions import ValidationError |  | ||||||
| from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField | from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField | ||||||
| from rest_framework.parsers import MultiPartParser | from rest_framework.parsers import MultiPartParser | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| @ -155,17 +154,6 @@ class SourceViewSet( | |||||||
|             matching_sources.append(source_settings.validated_data) |             matching_sources.append(source_settings.validated_data) | ||||||
|         return Response(matching_sources) |         return Response(matching_sources) | ||||||
|  |  | ||||||
|     def destroy(self, request: Request, *args, **kwargs): |  | ||||||
|         """Prevent deletion of built-in sources""" |  | ||||||
|         instance: Source = self.get_object() |  | ||||||
|  |  | ||||||
|         if instance.managed == Source.MANAGED_INBUILT: |  | ||||||
|             raise ValidationError( |  | ||||||
|                 {"detail": "Built-in sources cannot be deleted"}, code="protected" |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         return super().destroy(request, *args, **kwargs) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserSourceConnectionSerializer(SourceSerializer): | class UserSourceConnectionSerializer(SourceSerializer): | ||||||
|     """User source connection""" |     """User source connection""" | ||||||
|  | |||||||
| @ -32,5 +32,5 @@ class AuthentikCoreConfig(ManagedAppConfig): | |||||||
|                 "name": "authentik Built-in", |                 "name": "authentik Built-in", | ||||||
|                 "slug": "authentik-built-in", |                 "slug": "authentik-built-in", | ||||||
|             }, |             }, | ||||||
|             managed=Source.MANAGED_INBUILT, |             managed="goauthentik.io/sources/inbuilt", | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -678,8 +678,6 @@ class SourceGroupMatchingModes(models.TextChoices): | |||||||
| class Source(ManagedModel, SerializerModel, PolicyBindingModel): | class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||||
|     """Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server""" |     """Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server""" | ||||||
|  |  | ||||||
|     MANAGED_INBUILT = "goauthentik.io/sources/inbuilt" |  | ||||||
|  |  | ||||||
|     name = models.TextField(help_text=_("Source's display Name.")) |     name = models.TextField(help_text=_("Source's display Name.")) | ||||||
|     slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True) |     slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True) | ||||||
|  |  | ||||||
|  | |||||||
| @ -48,7 +48,6 @@ LOGGER = get_logger() | |||||||
|  |  | ||||||
| PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | ||||||
| SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages" | SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages" | ||||||
| SESSION_KEY_SOURCE_FLOW_CONTEXT = "authentik/flows/source_flow_context" |  | ||||||
| SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -262,7 +261,6 @@ class SourceFlowManager: | |||||||
|                 plan.append_stage(stage) |                 plan.append_stage(stage) | ||||||
|         for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []): |         for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []): | ||||||
|             plan.append_stage(stage) |             plan.append_stage(stage) | ||||||
|         plan.context.update(self.request.session.get(SESSION_KEY_SOURCE_FLOW_CONTEXT, {})) |  | ||||||
|         return plan.to_redirect(self.request, flow) |         return plan.to_redirect(self.request, flow) | ||||||
|  |  | ||||||
|     def handle_auth( |     def handle_auth( | ||||||
|  | |||||||
| @ -16,7 +16,7 @@ | |||||||
|         {% block head_before %} |         {% block head_before %} | ||||||
|         {% endblock %} |         {% endblock %} | ||||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> |         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> | ||||||
|         <style>{{ brand.branding_custom_css }}</style> |         <link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject> | ||||||
|         <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> |         <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> | ||||||
|         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> |         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> | ||||||
|         {% block head %} |         {% block head %} | ||||||
|  | |||||||
| @ -4,7 +4,7 @@ | |||||||
| {% load i18n %} | {% load i18n %} | ||||||
|  |  | ||||||
| {% block head_before %} | {% block head_before %} | ||||||
| <link rel="prefetch" href="{{ request.brand.branding_default_flow_background_url }}" /> | <link rel="prefetch" href="{% static 'dist/assets/images/flow_background.jpg' %}" /> | ||||||
| <link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}"> | <link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}"> | ||||||
| <link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)"> | <link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)"> | ||||||
| {% include "base/header_js.html" %} | {% include "base/header_js.html" %} | ||||||
| @ -13,7 +13,7 @@ | |||||||
| {% block head %} | {% block head %} | ||||||
| <style> | <style> | ||||||
| :root { | :root { | ||||||
|     --ak-flow-background: url("{{ request.brand.branding_default_flow_background_url }}"); |     --ak-flow-background: url("{% static 'dist/assets/images/flow_background.jpg' %}"); | ||||||
|     --pf-c-background-image--BackgroundImage: var(--ak-flow-background); |     --pf-c-background-image--BackgroundImage: var(--ak-flow-background); | ||||||
|     --pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background); |     --pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background); | ||||||
|     --pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background); |     --pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background); | ||||||
|  | |||||||
| @ -133,8 +133,6 @@ class TestApplicationsAPI(APITestCase): | |||||||
|                         "provider_obj": { |                         "provider_obj": { | ||||||
|                             "assigned_application_name": "allowed", |                             "assigned_application_name": "allowed", | ||||||
|                             "assigned_application_slug": "allowed", |                             "assigned_application_slug": "allowed", | ||||||
|                             "assigned_backchannel_application_name": "", |  | ||||||
|                             "assigned_backchannel_application_slug": "", |  | ||||||
|                             "authentication_flow": None, |                             "authentication_flow": None, | ||||||
|                             "invalidation_flow": None, |                             "invalidation_flow": None, | ||||||
|                             "authorization_flow": str(self.provider.authorization_flow.pk), |                             "authorization_flow": str(self.provider.authorization_flow.pk), | ||||||
| @ -188,8 +186,6 @@ class TestApplicationsAPI(APITestCase): | |||||||
|                         "provider_obj": { |                         "provider_obj": { | ||||||
|                             "assigned_application_name": "allowed", |                             "assigned_application_name": "allowed", | ||||||
|                             "assigned_application_slug": "allowed", |                             "assigned_application_slug": "allowed", | ||||||
|                             "assigned_backchannel_application_name": "", |  | ||||||
|                             "assigned_backchannel_application_slug": "", |  | ||||||
|                             "authentication_flow": None, |                             "authentication_flow": None, | ||||||
|                             "invalidation_flow": None, |                             "invalidation_flow": None, | ||||||
|                             "authorization_flow": str(self.provider.authorization_flow.pk), |                             "authorization_flow": str(self.provider.authorization_flow.pk), | ||||||
|  | |||||||
| @ -3,8 +3,7 @@ | |||||||
| from django.urls import reverse | from django.urls import reverse | ||||||
| from rest_framework.test import APITestCase | from rest_framework.test import APITestCase | ||||||
|  |  | ||||||
| from authentik.core.api.providers import ProviderSerializer | from authentik.core.models import PropertyMapping | ||||||
| from authentik.core.models import Application, PropertyMapping, Provider |  | ||||||
| from authentik.core.tests.utils import create_test_admin_user | from authentik.core.tests.utils import create_test_admin_user | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -25,51 +24,3 @@ class TestProvidersAPI(APITestCase): | |||||||
|             reverse("authentik_api:provider-types"), |             reverse("authentik_api:provider-types"), | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|  |  | ||||||
|     def test_provider_serializer_without_application(self): |  | ||||||
|         """Test that Provider serializer handles missing application gracefully""" |  | ||||||
|         # Create a provider without an application |  | ||||||
|         provider = Provider.objects.create(name="test-provider") |  | ||||||
|  |  | ||||||
|         serializer = ProviderSerializer(instance=provider) |  | ||||||
|         serialized_data = serializer.data |  | ||||||
|  |  | ||||||
|         # Check that fields return empty strings when no application exists |  | ||||||
|         self.assertEqual(serialized_data["assigned_application_slug"], "") |  | ||||||
|         self.assertEqual(serialized_data["assigned_application_name"], "") |  | ||||||
|         self.assertEqual(serialized_data["assigned_backchannel_application_slug"], "") |  | ||||||
|         self.assertEqual(serialized_data["assigned_backchannel_application_name"], "") |  | ||||||
|  |  | ||||||
|     def test_provider_serializer_with_application(self): |  | ||||||
|         """Test that Provider serializer correctly includes application data""" |  | ||||||
|         # Create an application |  | ||||||
|         app = Application.objects.create(name="Test App", slug="test-app") |  | ||||||
|  |  | ||||||
|         # Create a provider with an application |  | ||||||
|         provider = Provider.objects.create(name="test-provider-with-app") |  | ||||||
|         app.provider = provider |  | ||||||
|         app.save() |  | ||||||
|  |  | ||||||
|         serializer = ProviderSerializer(instance=provider) |  | ||||||
|         serialized_data = serializer.data |  | ||||||
|  |  | ||||||
|         # Check that fields return correct values when application exists |  | ||||||
|         self.assertEqual(serialized_data["assigned_application_slug"], "test-app") |  | ||||||
|         self.assertEqual(serialized_data["assigned_application_name"], "Test App") |  | ||||||
|         self.assertEqual(serialized_data["assigned_backchannel_application_slug"], "") |  | ||||||
|         self.assertEqual(serialized_data["assigned_backchannel_application_name"], "") |  | ||||||
|  |  | ||||||
|     def test_provider_api_response(self): |  | ||||||
|         """Test that the API response includes empty strings for missing applications""" |  | ||||||
|         # Create a provider without an application |  | ||||||
|         provider = Provider.objects.create(name="test-provider-api") |  | ||||||
|  |  | ||||||
|         response = self.client.get( |  | ||||||
|             reverse("authentik_api:provider-detail", kwargs={"pk": provider.pk}), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         self.assertEqual(response.status_code, 200) |  | ||||||
|         self.assertEqual(response.data["assigned_application_slug"], "") |  | ||||||
|         self.assertEqual(response.data["assigned_application_name"], "") |  | ||||||
|         self.assertEqual(response.data["assigned_backchannel_application_slug"], "") |  | ||||||
|         self.assertEqual(response.data["assigned_backchannel_application_name"], "") |  | ||||||
|  | |||||||
| @ -55,7 +55,7 @@ class RedirectToAppLaunch(View): | |||||||
|             ) |             ) | ||||||
|         except FlowNonApplicableException: |         except FlowNonApplicableException: | ||||||
|             raise Http404 from None |             raise Http404 from None | ||||||
|         plan.append_stage(in_memory_stage(RedirectToAppStage)) |         plan.insert_stage(in_memory_stage(RedirectToAppStage)) | ||||||
|         return plan.to_redirect(request, flow) |         return plan.to_redirect(request, flow) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -37,7 +37,6 @@ class GoogleWorkspaceProviderSerializer(EnterpriseRequiredMixin, ProviderSeriali | |||||||
|             "user_delete_action", |             "user_delete_action", | ||||||
|             "group_delete_action", |             "group_delete_action", | ||||||
|             "default_group_email_domain", |             "default_group_email_domain", | ||||||
|             "dry_run", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = {} |         extra_kwargs = {} | ||||||
|  |  | ||||||
|  | |||||||
| @ -8,10 +8,9 @@ from httplib2 import HttpLib2Error, HttpLib2ErrorWithResponse | |||||||
|  |  | ||||||
| from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider | from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider | ||||||
| from authentik.lib.sync.outgoing import HTTP_CONFLICT | from authentik.lib.sync.outgoing import HTTP_CONFLICT | ||||||
| from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient | from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient | ||||||
| from authentik.lib.sync.outgoing.exceptions import ( | from authentik.lib.sync.outgoing.exceptions import ( | ||||||
|     BadRequestSyncException, |     BadRequestSyncException, | ||||||
|     DryRunRejected, |  | ||||||
|     NotFoundSyncException, |     NotFoundSyncException, | ||||||
|     ObjectExistsSyncException, |     ObjectExistsSyncException, | ||||||
|     StopSync, |     StopSync, | ||||||
| @ -44,8 +43,6 @@ class GoogleWorkspaceSyncClient[TModel: Model, TConnection: Model, TSchema: dict | |||||||
|             self.domains.append(domain_name) |             self.domains.append(domain_name) | ||||||
|  |  | ||||||
|     def _request(self, request: HttpRequest): |     def _request(self, request: HttpRequest): | ||||||
|         if self.provider.dry_run and request.method.upper() not in SAFE_METHODS: |  | ||||||
|             raise DryRunRejected(request.uri, request.method, request.body) |  | ||||||
|         try: |         try: | ||||||
|             response = request.execute() |             response = request.execute() | ||||||
|         except GoogleAuthError as exc: |         except GoogleAuthError as exc: | ||||||
|  | |||||||
| @ -1,24 +0,0 @@ | |||||||
| # Generated by Django 5.0.12 on 2025-02-24 19:43 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ( |  | ||||||
|             "authentik_providers_google_workspace", |  | ||||||
|             "0003_googleworkspaceprovidergroup_attributes_and_more", |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="googleworkspaceprovider", |  | ||||||
|             name="dry_run", |  | ||||||
|             field=models.BooleanField( |  | ||||||
|                 default=False, |  | ||||||
|                 help_text="When enabled, provider will not modify or create objects in the remote system.", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -36,7 +36,6 @@ class MicrosoftEntraProviderSerializer(EnterpriseRequiredMixin, ProviderSerializ | |||||||
|             "filter_group", |             "filter_group", | ||||||
|             "user_delete_action", |             "user_delete_action", | ||||||
|             "group_delete_action", |             "group_delete_action", | ||||||
|             "dry_run", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = {} |         extra_kwargs = {} | ||||||
|  |  | ||||||
|  | |||||||
| @ -3,7 +3,6 @@ from collections.abc import Coroutine | |||||||
| from dataclasses import asdict | from dataclasses import asdict | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| import httpx |  | ||||||
| from azure.core.exceptions import ( | from azure.core.exceptions import ( | ||||||
|     ClientAuthenticationError, |     ClientAuthenticationError, | ||||||
|     ServiceRequestError, |     ServiceRequestError, | ||||||
| @ -13,7 +12,6 @@ from azure.identity.aio import ClientSecretCredential | |||||||
| from django.db.models import Model | from django.db.models import Model | ||||||
| from django.http import HttpResponseBadRequest, HttpResponseNotFound | from django.http import HttpResponseBadRequest, HttpResponseNotFound | ||||||
| from kiota_abstractions.api_error import APIError | from kiota_abstractions.api_error import APIError | ||||||
| from kiota_abstractions.request_information import RequestInformation |  | ||||||
| from kiota_authentication_azure.azure_identity_authentication_provider import ( | from kiota_authentication_azure.azure_identity_authentication_provider import ( | ||||||
|     AzureIdentityAuthenticationProvider, |     AzureIdentityAuthenticationProvider, | ||||||
| ) | ) | ||||||
| @ -23,15 +21,13 @@ from msgraph.generated.models.o_data_errors.o_data_error import ODataError | |||||||
| from msgraph.graph_request_adapter import GraphRequestAdapter, options | from msgraph.graph_request_adapter import GraphRequestAdapter, options | ||||||
| from msgraph.graph_service_client import GraphServiceClient | from msgraph.graph_service_client import GraphServiceClient | ||||||
| from msgraph_core import GraphClientFactory | from msgraph_core import GraphClientFactory | ||||||
| from opentelemetry import trace |  | ||||||
|  |  | ||||||
| from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider | from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider | ||||||
| from authentik.events.utils import sanitize_item | from authentik.events.utils import sanitize_item | ||||||
| from authentik.lib.sync.outgoing import HTTP_CONFLICT | from authentik.lib.sync.outgoing import HTTP_CONFLICT | ||||||
| from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient | from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient | ||||||
| from authentik.lib.sync.outgoing.exceptions import ( | from authentik.lib.sync.outgoing.exceptions import ( | ||||||
|     BadRequestSyncException, |     BadRequestSyncException, | ||||||
|     DryRunRejected, |  | ||||||
|     NotFoundSyncException, |     NotFoundSyncException, | ||||||
|     ObjectExistsSyncException, |     ObjectExistsSyncException, | ||||||
|     StopSync, |     StopSync, | ||||||
| @ -39,24 +35,20 @@ from authentik.lib.sync.outgoing.exceptions import ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikRequestAdapter(GraphRequestAdapter): | def get_request_adapter( | ||||||
|     def __init__(self, auth_provider, provider: MicrosoftEntraProvider, client=None): |     credentials: ClientSecretCredential, scopes: list[str] | None = None | ||||||
|         super().__init__(auth_provider, client) | ) -> GraphRequestAdapter: | ||||||
|         self._provider = provider |     if scopes: | ||||||
|  |         auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials, scopes=scopes) | ||||||
|  |     else: | ||||||
|  |         auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials) | ||||||
|  |  | ||||||
|     async def get_http_response_message( |     return GraphRequestAdapter( | ||||||
|         self, |         auth_provider=auth_provider, | ||||||
|         request_info: RequestInformation, |         client=GraphClientFactory.create_with_default_middleware( | ||||||
|         parent_span: trace.Span, |             options=options, client=KiotaClientFactory.get_default_client() | ||||||
|         claims: str = "", |         ), | ||||||
|     ) -> httpx.Response: |     ) | ||||||
|         if self._provider.dry_run and request_info.http_method.value.upper() not in SAFE_METHODS: |  | ||||||
|             raise DryRunRejected( |  | ||||||
|                 url=request_info.url, |  | ||||||
|                 method=request_info.http_method.value, |  | ||||||
|                 body=request_info.content.decode("utf-8"), |  | ||||||
|             ) |  | ||||||
|         return await super().get_http_response_message(request_info, parent_span, claims=claims) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict]( | class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict]( | ||||||
| @ -71,27 +63,9 @@ class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict] | |||||||
|         self.credentials = provider.microsoft_credentials() |         self.credentials = provider.microsoft_credentials() | ||||||
|         self.__prefetch_domains() |         self.__prefetch_domains() | ||||||
|  |  | ||||||
|     def get_request_adapter( |  | ||||||
|         self, credentials: ClientSecretCredential, scopes: list[str] | None = None |  | ||||||
|     ) -> AuthentikRequestAdapter: |  | ||||||
|         if scopes: |  | ||||||
|             auth_provider = AzureIdentityAuthenticationProvider( |  | ||||||
|                 credentials=credentials, scopes=scopes |  | ||||||
|             ) |  | ||||||
|         else: |  | ||||||
|             auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials) |  | ||||||
|  |  | ||||||
|         return AuthentikRequestAdapter( |  | ||||||
|             auth_provider=auth_provider, |  | ||||||
|             provider=self.provider, |  | ||||||
|             client=GraphClientFactory.create_with_default_middleware( |  | ||||||
|                 options=options, client=KiotaClientFactory.get_default_client() |  | ||||||
|             ), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def client(self): |     def client(self): | ||||||
|         return GraphServiceClient(request_adapter=self.get_request_adapter(**self.credentials)) |         return GraphServiceClient(request_adapter=get_request_adapter(**self.credentials)) | ||||||
|  |  | ||||||
|     def _request[T](self, request: Coroutine[Any, Any, T]) -> T: |     def _request[T](self, request: Coroutine[Any, Any, T]) -> T: | ||||||
|         try: |         try: | ||||||
|  | |||||||
| @ -1,24 +0,0 @@ | |||||||
| # Generated by Django 5.0.12 on 2025-02-24 19:43 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ( |  | ||||||
|             "authentik_providers_microsoft_entra", |  | ||||||
|             "0002_microsoftentraprovidergroup_attributes_and_more", |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="microsoftentraprovider", |  | ||||||
|             name="dry_run", |  | ||||||
|             field=models.BooleanField( |  | ||||||
|                 default=False, |  | ||||||
|                 help_text="When enabled, provider will not modify or create objects in the remote system.", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -32,6 +32,7 @@ class MicrosoftEntraUserTests(APITestCase): | |||||||
|  |  | ||||||
|     @apply_blueprint("system/providers-microsoft-entra.yaml") |     @apply_blueprint("system/providers-microsoft-entra.yaml") | ||||||
|     def setUp(self) -> None: |     def setUp(self) -> None: | ||||||
|  |  | ||||||
|         # Delete all users and groups as the mocked HTTP responses only return one ID |         # Delete all users and groups as the mocked HTTP responses only return one ID | ||||||
|         # which will cause errors with multiple users |         # which will cause errors with multiple users | ||||||
|         Tenant.objects.update(avatars="none") |         Tenant.objects.update(avatars="none") | ||||||
| @ -96,38 +97,6 @@ class MicrosoftEntraUserTests(APITestCase): | |||||||
|             self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists()) |             self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists()) | ||||||
|             user_create.assert_called_once() |             user_create.assert_called_once() | ||||||
|  |  | ||||||
|     def test_user_create_dry_run(self): |  | ||||||
|         """Test user creation (dry run)""" |  | ||||||
|         self.provider.dry_run = True |  | ||||||
|         self.provider.save() |  | ||||||
|         uid = generate_id() |  | ||||||
|         with ( |  | ||||||
|             patch( |  | ||||||
|                 "authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials", |  | ||||||
|                 MagicMock(return_value={"credentials": self.creds}), |  | ||||||
|             ), |  | ||||||
|             patch( |  | ||||||
|                 "msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get", |  | ||||||
|                 AsyncMock( |  | ||||||
|                     return_value=OrganizationCollectionResponse( |  | ||||||
|                         value=[ |  | ||||||
|                             Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")]) |  | ||||||
|                         ] |  | ||||||
|                     ) |  | ||||||
|                 ), |  | ||||||
|             ), |  | ||||||
|         ): |  | ||||||
|             user = User.objects.create( |  | ||||||
|                 username=uid, |  | ||||||
|                 name=f"{uid} {uid}", |  | ||||||
|                 email=f"{uid}@goauthentik.io", |  | ||||||
|             ) |  | ||||||
|             microsoft_user = MicrosoftEntraProviderUser.objects.filter( |  | ||||||
|                 provider=self.provider, user=user |  | ||||||
|             ).first() |  | ||||||
|             self.assertIsNone(microsoft_user) |  | ||||||
|             self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists()) |  | ||||||
|  |  | ||||||
|     def test_user_not_created(self): |     def test_user_not_created(self): | ||||||
|         """Test without property mappings, no group is created""" |         """Test without property mappings, no group is created""" | ||||||
|         self.provider.property_mappings.clear() |         self.provider.property_mappings.clear() | ||||||
|  | |||||||
| @ -11,14 +11,13 @@ from guardian.shortcuts import get_anonymous_user | |||||||
| from authentik.core.models import Source, User | from authentik.core.models import Source, User | ||||||
| from authentik.core.sources.flow_manager import ( | from authentik.core.sources.flow_manager import ( | ||||||
|     SESSION_KEY_OVERRIDE_FLOW_TOKEN, |     SESSION_KEY_OVERRIDE_FLOW_TOKEN, | ||||||
|     SESSION_KEY_SOURCE_FLOW_CONTEXT, |  | ||||||
|     SESSION_KEY_SOURCE_FLOW_STAGES, |     SESSION_KEY_SOURCE_FLOW_STAGES, | ||||||
| ) | ) | ||||||
| from authentik.core.types import UILoginButton | from authentik.core.types import UILoginButton | ||||||
| from authentik.enterprise.stages.source.models import SourceStage | from authentik.enterprise.stages.source.models import SourceStage | ||||||
| from authentik.flows.challenge import Challenge, ChallengeResponse | from authentik.flows.challenge import Challenge, ChallengeResponse | ||||||
| from authentik.flows.models import FlowToken, in_memory_stage | from authentik.flows.models import FlowToken, in_memory_stage | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_IS_REDIRECTED, PLAN_CONTEXT_IS_RESTORED | from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED | ||||||
| from authentik.flows.stage import ChallengeStageView, StageView | from authentik.flows.stage import ChallengeStageView, StageView | ||||||
| from authentik.lib.utils.time import timedelta_from_string | from authentik.lib.utils.time import timedelta_from_string | ||||||
|  |  | ||||||
| @ -54,9 +53,6 @@ class SourceStageView(ChallengeStageView): | |||||||
|         resume_token = self.create_flow_token() |         resume_token = self.create_flow_token() | ||||||
|         self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token |         self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token | ||||||
|         self.request.session[SESSION_KEY_SOURCE_FLOW_STAGES] = [in_memory_stage(SourceStageFinal)] |         self.request.session[SESSION_KEY_SOURCE_FLOW_STAGES] = [in_memory_stage(SourceStageFinal)] | ||||||
|         self.request.session[SESSION_KEY_SOURCE_FLOW_CONTEXT] = { |  | ||||||
|             PLAN_CONTEXT_IS_REDIRECTED: self.executor.flow, |  | ||||||
|         } |  | ||||||
|         return self.login_button.challenge |         return self.login_button.challenge | ||||||
|  |  | ||||||
|     def create_flow_token(self) -> FlowToken: |     def create_flow_token(self) -> FlowToken: | ||||||
| @ -93,9 +89,9 @@ class SourceStageFinal(StageView): | |||||||
|     This stage uses the override flow token to resume execution of the initial flow the |     This stage uses the override flow token to resume execution of the initial flow the | ||||||
|     source stage is bound to.""" |     source stage is bound to.""" | ||||||
|  |  | ||||||
|     def dispatch(self, *args, **kwargs): |     def dispatch(self): | ||||||
|         token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) |         token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) | ||||||
|         self.logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) |         self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) | ||||||
|         plan = token.plan |         plan = token.plan | ||||||
|         plan.context[PLAN_CONTEXT_IS_RESTORED] = token |         plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||||
|         response = plan.to_redirect(self.request, token.flow) |         response = plan.to_redirect(self.request, token.flow) | ||||||
|  | |||||||
| @ -4,8 +4,7 @@ from django.urls import reverse | |||||||
|  |  | ||||||
| from authentik.core.tests.utils import create_test_flow, create_test_user | from authentik.core.tests.utils import create_test_flow, create_test_user | ||||||
| from authentik.enterprise.stages.source.models import SourceStage | from authentik.enterprise.stages.source.models import SourceStage | ||||||
| from authentik.enterprise.stages.source.stage import SourceStageFinal | from authentik.flows.models import FlowDesignation, FlowStageBinding, FlowToken | ||||||
| from authentik.flows.models import FlowDesignation, FlowStageBinding, FlowToken, in_memory_stage |  | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, FlowPlan | from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, FlowPlan | ||||||
| from authentik.flows.tests import FlowTestCase | from authentik.flows.tests import FlowTestCase | ||||||
| from authentik.flows.views.executor import SESSION_KEY_PLAN | from authentik.flows.views.executor import SESSION_KEY_PLAN | ||||||
| @ -88,7 +87,6 @@ class TestSourceStage(FlowTestCase): | |||||||
|         self.assertIsNotNone(flow_token) |         self.assertIsNotNone(flow_token) | ||||||
|         session = self.client.session |         session = self.client.session | ||||||
|         plan: FlowPlan = session[SESSION_KEY_PLAN] |         plan: FlowPlan = session[SESSION_KEY_PLAN] | ||||||
|         plan.insert_stage(in_memory_stage(SourceStageFinal), index=0) |  | ||||||
|         plan.context[PLAN_CONTEXT_IS_RESTORED] = flow_token |         plan.context[PLAN_CONTEXT_IS_RESTORED] = flow_token | ||||||
|         session[SESSION_KEY_PLAN] = plan |         session[SESSION_KEY_PLAN] = plan | ||||||
|         session.save() |         session.save() | ||||||
| @ -98,6 +96,4 @@ class TestSourceStage(FlowTestCase): | |||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), follow=True |             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), follow=True | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertStageRedirects( |         self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) | ||||||
|             response, reverse("authentik_core:if-flow", kwargs={"flow_slug": flow.slug}) |  | ||||||
|         ) |  | ||||||
|  | |||||||
| @ -50,8 +50,7 @@ class NotificationTransportSerializer(ModelSerializer): | |||||||
|             "mode", |             "mode", | ||||||
|             "mode_verbose", |             "mode_verbose", | ||||||
|             "webhook_url", |             "webhook_url", | ||||||
|             "webhook_mapping_body", |             "webhook_mapping", | ||||||
|             "webhook_mapping_headers", |  | ||||||
|             "send_once", |             "send_once", | ||||||
|         ] |         ] | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,43 +0,0 @@ | |||||||
| # Generated by Django 5.0.13 on 2025-03-20 19:54 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_events", "0008_event_authentik_e_expires_8c73a8_idx_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.RenameField( |  | ||||||
|             model_name="notificationtransport", |  | ||||||
|             old_name="webhook_mapping", |  | ||||||
|             new_name="webhook_mapping_body", |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="notificationtransport", |  | ||||||
|             name="webhook_mapping_body", |  | ||||||
|             field=models.ForeignKey( |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Customize the body of the request. Mapping should return data that is JSON-serializable.", |  | ||||||
|                 null=True, |  | ||||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, |  | ||||||
|                 related_name="+", |  | ||||||
|                 to="authentik_events.notificationwebhookmapping", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="notificationtransport", |  | ||||||
|             name="webhook_mapping_headers", |  | ||||||
|             field=models.ForeignKey( |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Configure additional headers to be sent. Mapping should return a dictionary of key-value pairs", |  | ||||||
|                 null=True, |  | ||||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, |  | ||||||
|                 related_name="+", |  | ||||||
|                 to="authentik_events.notificationwebhookmapping", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -336,27 +336,8 @@ class NotificationTransport(SerializerModel): | |||||||
|     mode = models.TextField(choices=TransportMode.choices, default=TransportMode.LOCAL) |     mode = models.TextField(choices=TransportMode.choices, default=TransportMode.LOCAL) | ||||||
|  |  | ||||||
|     webhook_url = models.TextField(blank=True, validators=[DomainlessURLValidator()]) |     webhook_url = models.TextField(blank=True, validators=[DomainlessURLValidator()]) | ||||||
|     webhook_mapping_body = models.ForeignKey( |     webhook_mapping = models.ForeignKey( | ||||||
|         "NotificationWebhookMapping", |         "NotificationWebhookMapping", on_delete=models.SET_DEFAULT, null=True, default=None | ||||||
|         on_delete=models.SET_DEFAULT, |  | ||||||
|         null=True, |  | ||||||
|         default=None, |  | ||||||
|         related_name="+", |  | ||||||
|         help_text=_( |  | ||||||
|             "Customize the body of the request. " |  | ||||||
|             "Mapping should return data that is JSON-serializable." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|     webhook_mapping_headers = models.ForeignKey( |  | ||||||
|         "NotificationWebhookMapping", |  | ||||||
|         on_delete=models.SET_DEFAULT, |  | ||||||
|         null=True, |  | ||||||
|         default=None, |  | ||||||
|         related_name="+", |  | ||||||
|         help_text=_( |  | ||||||
|             "Configure additional headers to be sent. " |  | ||||||
|             "Mapping should return a dictionary of key-value pairs" |  | ||||||
|         ), |  | ||||||
|     ) |     ) | ||||||
|     send_once = models.BooleanField( |     send_once = models.BooleanField( | ||||||
|         default=False, |         default=False, | ||||||
| @ -379,8 +360,8 @@ class NotificationTransport(SerializerModel): | |||||||
|  |  | ||||||
|     def send_local(self, notification: "Notification") -> list[str]: |     def send_local(self, notification: "Notification") -> list[str]: | ||||||
|         """Local notification delivery""" |         """Local notification delivery""" | ||||||
|         if self.webhook_mapping_body: |         if self.webhook_mapping: | ||||||
|             self.webhook_mapping_body.evaluate( |             self.webhook_mapping.evaluate( | ||||||
|                 user=notification.user, |                 user=notification.user, | ||||||
|                 request=None, |                 request=None, | ||||||
|                 notification=notification, |                 notification=notification, | ||||||
| @ -399,18 +380,9 @@ class NotificationTransport(SerializerModel): | |||||||
|         if notification.event and notification.event.user: |         if notification.event and notification.event.user: | ||||||
|             default_body["event_user_email"] = notification.event.user.get("email", None) |             default_body["event_user_email"] = notification.event.user.get("email", None) | ||||||
|             default_body["event_user_username"] = notification.event.user.get("username", None) |             default_body["event_user_username"] = notification.event.user.get("username", None) | ||||||
|         headers = {} |         if self.webhook_mapping: | ||||||
|         if self.webhook_mapping_body: |  | ||||||
|             default_body = sanitize_item( |             default_body = sanitize_item( | ||||||
|                 self.webhook_mapping_body.evaluate( |                 self.webhook_mapping.evaluate( | ||||||
|                     user=notification.user, |  | ||||||
|                     request=None, |  | ||||||
|                     notification=notification, |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|         if self.webhook_mapping_headers: |  | ||||||
|             headers = sanitize_item( |  | ||||||
|                 self.webhook_mapping_headers.evaluate( |  | ||||||
|                     user=notification.user, |                     user=notification.user, | ||||||
|                     request=None, |                     request=None, | ||||||
|                     notification=notification, |                     notification=notification, | ||||||
| @ -420,7 +392,6 @@ class NotificationTransport(SerializerModel): | |||||||
|             response = get_http_session().post( |             response = get_http_session().post( | ||||||
|                 self.webhook_url, |                 self.webhook_url, | ||||||
|                 json=default_body, |                 json=default_body, | ||||||
|                 headers=headers, |  | ||||||
|             ) |             ) | ||||||
|             response.raise_for_status() |             response.raise_for_status() | ||||||
|         except RequestException as exc: |         except RequestException as exc: | ||||||
|  | |||||||
| @ -120,7 +120,7 @@ class TestEventsNotifications(APITestCase): | |||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         transport = NotificationTransport.objects.create( |         transport = NotificationTransport.objects.create( | ||||||
|             name=generate_id(), webhook_mapping_body=mapping, mode=TransportMode.LOCAL |             name=generate_id(), webhook_mapping=mapping, mode=TransportMode.LOCAL | ||||||
|         ) |         ) | ||||||
|         NotificationRule.objects.filter(name__startswith="default").delete() |         NotificationRule.objects.filter(name__startswith="default").delete() | ||||||
|         trigger = NotificationRule.objects.create(name=generate_id(), group=self.group) |         trigger = NotificationRule.objects.create(name=generate_id(), group=self.group) | ||||||
|  | |||||||
| @ -60,25 +60,20 @@ class TestEventTransports(TestCase): | |||||||
|  |  | ||||||
|     def test_transport_webhook_mapping(self): |     def test_transport_webhook_mapping(self): | ||||||
|         """Test webhook transport with custom mapping""" |         """Test webhook transport with custom mapping""" | ||||||
|         mapping_body = NotificationWebhookMapping.objects.create( |         mapping = NotificationWebhookMapping.objects.create( | ||||||
|             name=generate_id(), expression="return request.user" |             name=generate_id(), expression="return request.user" | ||||||
|         ) |         ) | ||||||
|         mapping_headers = NotificationWebhookMapping.objects.create( |  | ||||||
|             name=generate_id(), expression="""return {"foo": "bar"}""" |  | ||||||
|         ) |  | ||||||
|         transport: NotificationTransport = NotificationTransport.objects.create( |         transport: NotificationTransport = NotificationTransport.objects.create( | ||||||
|             name=generate_id(), |             name=generate_id(), | ||||||
|             mode=TransportMode.WEBHOOK, |             mode=TransportMode.WEBHOOK, | ||||||
|             webhook_url="http://localhost:1234/test", |             webhook_url="http://localhost:1234/test", | ||||||
|             webhook_mapping_body=mapping_body, |             webhook_mapping=mapping, | ||||||
|             webhook_mapping_headers=mapping_headers, |  | ||||||
|         ) |         ) | ||||||
|         with Mocker() as mocker: |         with Mocker() as mocker: | ||||||
|             mocker.post("http://localhost:1234/test") |             mocker.post("http://localhost:1234/test") | ||||||
|             transport.send(self.notification) |             transport.send(self.notification) | ||||||
|             self.assertEqual(mocker.call_count, 1) |             self.assertEqual(mocker.call_count, 1) | ||||||
|             self.assertEqual(mocker.request_history[0].method, "POST") |             self.assertEqual(mocker.request_history[0].method, "POST") | ||||||
|             self.assertEqual(mocker.request_history[0].headers["foo"], "bar") |  | ||||||
|             self.assertJSONEqual( |             self.assertJSONEqual( | ||||||
|                 mocker.request_history[0].body.decode(), |                 mocker.request_history[0].body.decode(), | ||||||
|                 {"email": self.user.email, "pk": self.user.pk, "username": self.user.username}, |                 {"email": self.user.email, "pk": self.user.pk, "username": self.user.username}, | ||||||
|  | |||||||
| @ -8,7 +8,13 @@ from uuid import UUID | |||||||
| from django.core.serializers.json import DjangoJSONEncoder | from django.core.serializers.json import DjangoJSONEncoder | ||||||
| from django.db import models | from django.db import models | ||||||
| from django.http import JsonResponse | from django.http import JsonResponse | ||||||
| from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField | from rest_framework.fields import ( | ||||||
|  |     BooleanField, | ||||||
|  |     CharField, | ||||||
|  |     ChoiceField, | ||||||
|  |     DictField, | ||||||
|  |     ListField, | ||||||
|  | ) | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
|  |  | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| @ -39,6 +45,12 @@ class ErrorDetailSerializer(PassiveSerializer): | |||||||
|     code = CharField() |     code = CharField() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class MessageSerializer(PassiveSerializer): | ||||||
|  |     message = CharField() | ||||||
|  |     level = CharField() | ||||||
|  |     tags = ListField(child=CharField()) | ||||||
|  |  | ||||||
|  |  | ||||||
| class ContextualFlowInfo(PassiveSerializer): | class ContextualFlowInfo(PassiveSerializer): | ||||||
|     """Contextual flow information for a challenge""" |     """Contextual flow information for a challenge""" | ||||||
|  |  | ||||||
| @ -55,6 +67,7 @@ class Challenge(PassiveSerializer): | |||||||
|     flow_info = ContextualFlowInfo(required=False) |     flow_info = ContextualFlowInfo(required=False) | ||||||
|     component = CharField(default="") |     component = CharField(default="") | ||||||
|  |  | ||||||
|  |     messages = ListField(child=MessageSerializer(), allow_empty=True, required=False) | ||||||
|     response_errors = DictField( |     response_errors = DictField( | ||||||
|         child=ErrorDetailSerializer(many=True), allow_empty=True, required=False |         child=ErrorDetailSerializer(many=True), allow_empty=True, required=False | ||||||
|     ) |     ) | ||||||
| @ -170,7 +183,6 @@ class FrameChallenge(Challenge): | |||||||
|  |  | ||||||
|  |  | ||||||
| class FrameChallengeResponse(ChallengeResponse): | class FrameChallengeResponse(ChallengeResponse): | ||||||
|  |  | ||||||
|     component = CharField(default="xak-flow-frame") |     component = CharField(default="xak-flow-frame") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -6,7 +6,6 @@ from typing import TYPE_CHECKING | |||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from django.db import models | from django.db import models | ||||||
| from django.http import HttpRequest |  | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from model_utils.managers import InheritanceManager | from model_utils.managers import InheritanceManager | ||||||
| from rest_framework.serializers import BaseSerializer | from rest_framework.serializers import BaseSerializer | ||||||
| @ -179,12 +178,11 @@ class Flow(SerializerModel, PolicyBindingModel): | |||||||
|         help_text=_("Required level of authentication and authorization to access a flow."), |         help_text=_("Required level of authentication and authorization to access a flow."), | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     def background_url(self, request: HttpRequest | None = None) -> str: |     @property | ||||||
|  |     def background_url(self) -> str: | ||||||
|         """Get the URL to the background image. If the name is /static or starts with http |         """Get the URL to the background image. If the name is /static or starts with http | ||||||
|         it is returned as-is""" |         it is returned as-is""" | ||||||
|         if not self.background: |         if not self.background: | ||||||
|             if request: |  | ||||||
|                 return request.brand.branding_default_flow_background_url() |  | ||||||
|             return ( |             return ( | ||||||
|                 CONFIG.get("web.path", "/")[:-1] + "/static/dist/assets/images/flow_background.jpg" |                 CONFIG.get("web.path", "/")[:-1] + "/static/dist/assets/images/flow_background.jpg" | ||||||
|             ) |             ) | ||||||
|  | |||||||
| @ -76,10 +76,10 @@ class FlowPlan: | |||||||
|         self.bindings.append(binding) |         self.bindings.append(binding) | ||||||
|         self.markers.append(marker or StageMarker()) |         self.markers.append(marker or StageMarker()) | ||||||
|  |  | ||||||
|     def insert_stage(self, stage: Stage, marker: StageMarker | None = None, index=1): |     def insert_stage(self, stage: Stage, marker: StageMarker | None = None): | ||||||
|         """Insert stage into plan, as immediate next stage""" |         """Insert stage into plan, as immediate next stage""" | ||||||
|         self.bindings.insert(index, FlowStageBinding(stage=stage, order=0)) |         self.bindings.insert(1, FlowStageBinding(stage=stage, order=0)) | ||||||
|         self.markers.insert(index, marker or StageMarker()) |         self.markers.insert(1, marker or StageMarker()) | ||||||
|  |  | ||||||
|     def redirect(self, destination: str): |     def redirect(self, destination: str): | ||||||
|         """Insert a redirect stage as next stage""" |         """Insert a redirect stage as next stage""" | ||||||
|  | |||||||
| @ -4,6 +4,7 @@ from typing import TYPE_CHECKING | |||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from django.contrib.auth.models import AnonymousUser | from django.contrib.auth.models import AnonymousUser | ||||||
|  | from django.contrib.messages import get_messages | ||||||
| from django.http import HttpRequest | from django.http import HttpRequest | ||||||
| from django.http.request import QueryDict | from django.http.request import QueryDict | ||||||
| from django.http.response import HttpResponse | from django.http.response import HttpResponse | ||||||
| @ -21,6 +22,7 @@ from authentik.flows.challenge import ( | |||||||
|     ChallengeResponse, |     ChallengeResponse, | ||||||
|     ContextualFlowInfo, |     ContextualFlowInfo, | ||||||
|     HttpChallengeResponse, |     HttpChallengeResponse, | ||||||
|  |     MessageSerializer, | ||||||
|     RedirectChallenge, |     RedirectChallenge, | ||||||
|     SessionEndChallenge, |     SessionEndChallenge, | ||||||
|     WithUserInfoChallenge, |     WithUserInfoChallenge, | ||||||
| @ -184,13 +186,29 @@ class ChallengeStageView(StageView): | |||||||
|                 flow_info = ContextualFlowInfo( |                 flow_info = ContextualFlowInfo( | ||||||
|                     data={ |                     data={ | ||||||
|                         "title": self.format_title(), |                         "title": self.format_title(), | ||||||
|                         "background": self.executor.flow.background_url(self.request), |                         "background": self.executor.flow.background_url, | ||||||
|                         "cancel_url": reverse("authentik_flows:cancel"), |                         "cancel_url": reverse("authentik_flows:cancel"), | ||||||
|                         "layout": self.executor.flow.layout, |                         "layout": self.executor.flow.layout, | ||||||
|                     } |                     } | ||||||
|                 ) |                 ) | ||||||
|                 flow_info.is_valid() |                 flow_info.is_valid() | ||||||
|                 challenge.initial_data["flow_info"] = flow_info.data |                 challenge.initial_data["flow_info"] = flow_info.data | ||||||
|  |             if "messages" not in challenge.initial_data and not isinstance( | ||||||
|  |                 challenge, RedirectStage | ||||||
|  |             ): | ||||||
|  |                 messages = MessageSerializer( | ||||||
|  |                     data=[ | ||||||
|  |                         { | ||||||
|  |                             "message": message.message, | ||||||
|  |                             "level": message.level_tag, | ||||||
|  |                             "tags": message.tags, | ||||||
|  |                         } | ||||||
|  |                         for message in get_messages(self.request) | ||||||
|  |                     ], | ||||||
|  |                     many=True, | ||||||
|  |                 ) | ||||||
|  |                 messages.is_valid() | ||||||
|  |                 challenge.initial_data["messages"] = messages.data | ||||||
|             if isinstance(challenge, WithUserInfoChallenge): |             if isinstance(challenge, WithUserInfoChallenge): | ||||||
|                 # If there's a pending user, update the `username` field |                 # If there's a pending user, update the `username` field | ||||||
|                 # this field is only used by password managers. |                 # this field is only used by password managers. | ||||||
|  | |||||||
| @ -27,6 +27,7 @@ class FlowTestCase(APITestCase): | |||||||
|         self.assertIsNotNone(raw_response["component"]) |         self.assertIsNotNone(raw_response["component"]) | ||||||
|         if flow: |         if flow: | ||||||
|             self.assertIn("flow_info", raw_response) |             self.assertIn("flow_info", raw_response) | ||||||
|  |             self.assertEqual(raw_response["flow_info"]["background"], flow.background_url) | ||||||
|             self.assertEqual( |             self.assertEqual( | ||||||
|                 raw_response["flow_info"]["cancel_url"], reverse("authentik_flows:cancel") |                 raw_response["flow_info"]["cancel_url"], reverse("authentik_flows:cancel") | ||||||
|             ) |             ) | ||||||
|  | |||||||
| @ -1,11 +1,9 @@ | |||||||
| """API flow tests""" | """API flow tests""" | ||||||
|  |  | ||||||
| from json import loads |  | ||||||
|  |  | ||||||
| from django.urls import reverse | from django.urls import reverse | ||||||
| from rest_framework.test import APITestCase | from rest_framework.test import APITestCase | ||||||
|  |  | ||||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_flow | from authentik.core.tests.utils import create_test_admin_user | ||||||
| from authentik.flows.api.stages import StageSerializer, StageViewSet | from authentik.flows.api.stages import StageSerializer, StageViewSet | ||||||
| from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, Stage | from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, Stage | ||||||
| from authentik.lib.generators import generate_id | from authentik.lib.generators import generate_id | ||||||
| @ -79,22 +77,6 @@ class TestFlowsAPI(APITestCase): | |||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertJSONEqual(response.content, {"diagram": DIAGRAM_EXPECTED}) |         self.assertJSONEqual(response.content, {"diagram": DIAGRAM_EXPECTED}) | ||||||
|  |  | ||||||
|     def test_api_background(self): |  | ||||||
|         """Test custom background""" |  | ||||||
|         user = create_test_admin_user() |  | ||||||
|         self.client.force_login(user) |  | ||||||
|  |  | ||||||
|         flow = create_test_flow() |  | ||||||
|         response = self.client.get(reverse("authentik_api:flow-detail", kwargs={"slug": flow.slug})) |  | ||||||
|         body = loads(response.content.decode()) |  | ||||||
|         self.assertEqual(body["background"], "/static/dist/assets/images/flow_background.jpg") |  | ||||||
|  |  | ||||||
|         flow.background = "https://goauthentik.io/img/icon.png" |  | ||||||
|         flow.save() |  | ||||||
|         response = self.client.get(reverse("authentik_api:flow-detail", kwargs={"slug": flow.slug})) |  | ||||||
|         body = loads(response.content.decode()) |  | ||||||
|         self.assertEqual(body["background"], "https://goauthentik.io/img/icon.png") |  | ||||||
|  |  | ||||||
|     def test_api_diagram_no_stages(self): |     def test_api_diagram_no_stages(self): | ||||||
|         """Test flow diagram with no stages.""" |         """Test flow diagram with no stages.""" | ||||||
|         user = create_test_admin_user() |         user = create_test_admin_user() | ||||||
|  | |||||||
| @ -49,12 +49,13 @@ class TestFlowInspector(APITestCase): | |||||||
|                 "captcha_stage": None, |                 "captcha_stage": None, | ||||||
|                 "component": "ak-stage-identification", |                 "component": "ak-stage-identification", | ||||||
|                 "flow_info": { |                 "flow_info": { | ||||||
|                     "background": "/static/dist/assets/images/flow_background.jpg", |                     "background": flow.background_url, | ||||||
|                     "cancel_url": reverse("authentik_flows:cancel"), |                     "cancel_url": reverse("authentik_flows:cancel"), | ||||||
|                     "title": flow.title, |                     "title": flow.title, | ||||||
|                     "layout": "stacked", |                     "layout": "stacked", | ||||||
|                 }, |                 }, | ||||||
|                 "flow_designation": "authentication", |                 "flow_designation": "authentication", | ||||||
|  |                 "messages": [], | ||||||
|                 "password_fields": False, |                 "password_fields": False, | ||||||
|                 "primary_action": "Log in", |                 "primary_action": "Log in", | ||||||
|                 "sources": [], |                 "sources": [], | ||||||
|  | |||||||
| @ -282,14 +282,16 @@ class ConfigLoader: | |||||||
|  |  | ||||||
|     def get_optional_int(self, path: str, default=None) -> int | None: |     def get_optional_int(self, path: str, default=None) -> int | None: | ||||||
|         """Wrapper for get that converts value into int or None if set""" |         """Wrapper for get that converts value into int or None if set""" | ||||||
|         value = self.get(path, UNSET) |         value = self.get(path, default) | ||||||
|         if value is UNSET: |         if value is UNSET: | ||||||
|             return default |             return default | ||||||
|         try: |         try: | ||||||
|             return int(value) |             return int(value) | ||||||
|         except (ValueError, TypeError) as exc: |         except (ValueError, TypeError) as exc: | ||||||
|             if value is None or (isinstance(value, str) and value.lower() == "null"): |             if value is None or (isinstance(value, str) and value.lower() == "null"): | ||||||
|                 return None |                 return default | ||||||
|  |             if value is UNSET: | ||||||
|  |                 return default | ||||||
|             self.log("warning", "Failed to parse config as int", path=path, exc=str(exc)) |             self.log("warning", "Failed to parse config as int", path=path, exc=str(exc)) | ||||||
|             return default |             return default | ||||||
|  |  | ||||||
| @ -370,9 +372,9 @@ def django_db_config(config: ConfigLoader | None = None) -> dict: | |||||||
|                 "sslcert": config.get("postgresql.sslcert"), |                 "sslcert": config.get("postgresql.sslcert"), | ||||||
|                 "sslkey": config.get("postgresql.sslkey"), |                 "sslkey": config.get("postgresql.sslkey"), | ||||||
|             }, |             }, | ||||||
|             "CONN_MAX_AGE": config.get_optional_int("postgresql.conn_max_age", 0), |             "CONN_MAX_AGE": CONFIG.get_optional_int("postgresql.conn_max_age", 0), | ||||||
|             "CONN_HEALTH_CHECKS": config.get_bool("postgresql.conn_health_checks", False), |             "CONN_HEALTH_CHECKS": CONFIG.get_bool("postgresql.conn_health_checks", False), | ||||||
|             "DISABLE_SERVER_SIDE_CURSORS": config.get_bool( |             "DISABLE_SERVER_SIDE_CURSORS": CONFIG.get_bool( | ||||||
|                 "postgresql.disable_server_side_cursors", False |                 "postgresql.disable_server_side_cursors", False | ||||||
|             ), |             ), | ||||||
|             "TEST": { |             "TEST": { | ||||||
| @ -381,8 +383,8 @@ def django_db_config(config: ConfigLoader | None = None) -> dict: | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     conn_max_age = config.get_optional_int("postgresql.conn_max_age", UNSET) |     conn_max_age = CONFIG.get_optional_int("postgresql.conn_max_age", UNSET) | ||||||
|     disable_server_side_cursors = config.get_bool("postgresql.disable_server_side_cursors", UNSET) |     disable_server_side_cursors = CONFIG.get_bool("postgresql.disable_server_side_cursors", UNSET) | ||||||
|     if config.get_bool("postgresql.use_pgpool", False): |     if config.get_bool("postgresql.use_pgpool", False): | ||||||
|         db["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True |         db["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True | ||||||
|         if disable_server_side_cursors is not UNSET: |         if disable_server_side_cursors is not UNSET: | ||||||
|  | |||||||
| @ -1,20 +1,5 @@ | |||||||
| # authentik configuration | # update website/docs/install-config/configuration/configuration.mdx | ||||||
| # | # This is the default configuration file | ||||||
| # https://docs.goauthentik.io/docs/install-config/configuration/ |  | ||||||
| # |  | ||||||
| # To override the settings in this file, run the following command from the repository root: |  | ||||||
| # |  | ||||||
| # ```shell |  | ||||||
| # make gen-dev-config |  | ||||||
| # ``` |  | ||||||
| # |  | ||||||
| # You may edit the generated file to override the configuration below.   |  | ||||||
| # |  | ||||||
| # When making modifying the default configuration file,  |  | ||||||
| # ensure that the corresponding documentation is updated to match. |  | ||||||
| # |  | ||||||
| # @see {@link ../../website/docs/install-config/configuration/configuration.mdx Configuration documentation} for more information. |  | ||||||
|  |  | ||||||
| postgresql: | postgresql: | ||||||
|   host: localhost |   host: localhost | ||||||
|   name: authentik |   name: authentik | ||||||
| @ -60,8 +45,6 @@ redis: | |||||||
| #   url: "" | #   url: "" | ||||||
| #   transport_options: "" | #   transport_options: "" | ||||||
|  |  | ||||||
| http_timeout: 30 |  | ||||||
|  |  | ||||||
| cache: | cache: | ||||||
|   # url: "" |   # url: "" | ||||||
|   timeout: 300 |   timeout: 300 | ||||||
|  | |||||||
| @ -33,7 +33,6 @@ class SyncObjectSerializer(PassiveSerializer): | |||||||
|         ) |         ) | ||||||
|     ) |     ) | ||||||
|     sync_object_id = CharField() |     sync_object_id = CharField() | ||||||
|     override_dry_run = BooleanField(default=False) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SyncObjectResultSerializer(PassiveSerializer): | class SyncObjectResultSerializer(PassiveSerializer): | ||||||
| @ -99,7 +98,6 @@ class OutgoingSyncProviderStatusMixin: | |||||||
|             page=1, |             page=1, | ||||||
|             provider_pk=provider.pk, |             provider_pk=provider.pk, | ||||||
|             pk=params.validated_data["sync_object_id"], |             pk=params.validated_data["sync_object_id"], | ||||||
|             override_dry_run=params.validated_data["override_dry_run"], |  | ||||||
|         ).get() |         ).get() | ||||||
|         return Response(SyncObjectResultSerializer(instance={"messages": res}).data) |         return Response(SyncObjectResultSerializer(instance={"messages": res}).data) | ||||||
|  |  | ||||||
|  | |||||||
| @ -28,14 +28,6 @@ class Direction(StrEnum): | |||||||
|     remove = "remove" |     remove = "remove" | ||||||
|  |  | ||||||
|  |  | ||||||
| SAFE_METHODS = [ |  | ||||||
|     "GET", |  | ||||||
|     "HEAD", |  | ||||||
|     "OPTIONS", |  | ||||||
|     "TRACE", |  | ||||||
| ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseOutgoingSyncClient[ | class BaseOutgoingSyncClient[ | ||||||
|     TModel: "Model", TConnection: "Model", TSchema: dict, TProvider: "OutgoingSyncProvider" |     TModel: "Model", TConnection: "Model", TSchema: dict, TProvider: "OutgoingSyncProvider" | ||||||
| ]: | ]: | ||||||
|  | |||||||
| @ -21,22 +21,6 @@ class BadRequestSyncException(BaseSyncException): | |||||||
|     """Exception when invalid data was sent to the remote system""" |     """Exception when invalid data was sent to the remote system""" | ||||||
|  |  | ||||||
|  |  | ||||||
| class DryRunRejected(BaseSyncException): |  | ||||||
|     """When dry_run is enabled and a provider dropped a mutating request""" |  | ||||||
|  |  | ||||||
|     def __init__(self, url: str, method: str, body: dict): |  | ||||||
|         super().__init__() |  | ||||||
|         self.url = url |  | ||||||
|         self.method = method |  | ||||||
|         self.body = body |  | ||||||
|  |  | ||||||
|     def __repr__(self): |  | ||||||
|         return self.__str__() |  | ||||||
|  |  | ||||||
|     def __str__(self): |  | ||||||
|         return f"Dry-run rejected request: {self.method} {self.url}" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class StopSync(BaseSyncException): | class StopSync(BaseSyncException): | ||||||
|     """Exception raised when a configuration error should stop the sync process""" |     """Exception raised when a configuration error should stop the sync process""" | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,9 +1,8 @@ | |||||||
| from typing import Any, Self | from typing import Any, Self | ||||||
|  |  | ||||||
| import pglock | import pglock | ||||||
| from django.db import connection, models | from django.db import connection | ||||||
| from django.db.models import Model, QuerySet, TextChoices | from django.db.models import Model, QuerySet, TextChoices | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
|  |  | ||||||
| from authentik.core.models import Group, User | from authentik.core.models import Group, User | ||||||
| from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient | from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient | ||||||
| @ -19,14 +18,6 @@ class OutgoingSyncDeleteAction(TextChoices): | |||||||
|  |  | ||||||
|  |  | ||||||
| class OutgoingSyncProvider(Model): | class OutgoingSyncProvider(Model): | ||||||
|     """Base abstract models for providers implementing outgoing sync""" |  | ||||||
|  |  | ||||||
|     dry_run = models.BooleanField( |  | ||||||
|         default=False, |  | ||||||
|         help_text=_( |  | ||||||
|             "When enabled, provider will not modify or create objects in the remote system." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         abstract = True |         abstract = True | ||||||
| @ -41,7 +32,7 @@ class OutgoingSyncProvider(Model): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def sync_lock(self) -> pglock.advisory: |     def sync_lock(self) -> pglock.advisory: | ||||||
|         """Postgres lock for syncing to prevent multiple parallel syncs happening""" |         """Postgres lock for syncing SCIM to prevent multiple parallel syncs happening""" | ||||||
|         return pglock.advisory( |         return pglock.advisory( | ||||||
|             lock_id=f"goauthentik.io/{connection.schema_name}/providers/outgoing-sync/{str(self.pk)}", |             lock_id=f"goauthentik.io/{connection.schema_name}/providers/outgoing-sync/{str(self.pk)}", | ||||||
|             timeout=0, |             timeout=0, | ||||||
|  | |||||||
| @ -20,7 +20,6 @@ from authentik.lib.sync.outgoing import PAGE_SIZE, PAGE_TIMEOUT | |||||||
| from authentik.lib.sync.outgoing.base import Direction | from authentik.lib.sync.outgoing.base import Direction | ||||||
| from authentik.lib.sync.outgoing.exceptions import ( | from authentik.lib.sync.outgoing.exceptions import ( | ||||||
|     BadRequestSyncException, |     BadRequestSyncException, | ||||||
|     DryRunRejected, |  | ||||||
|     StopSync, |     StopSync, | ||||||
|     TransientSyncException, |     TransientSyncException, | ||||||
| ) | ) | ||||||
| @ -106,9 +105,7 @@ class SyncTasks: | |||||||
|                 return |                 return | ||||||
|         task.set_status(TaskStatus.SUCCESSFUL, *messages) |         task.set_status(TaskStatus.SUCCESSFUL, *messages) | ||||||
|  |  | ||||||
|     def sync_objects( |     def sync_objects(self, object_type: str, page: int, provider_pk: int, **filter): | ||||||
|         self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter |  | ||||||
|     ): |  | ||||||
|         _object_type = path_to_class(object_type) |         _object_type = path_to_class(object_type) | ||||||
|         self.logger = get_logger().bind( |         self.logger = get_logger().bind( | ||||||
|             provider_type=class_to_path(self._provider_model), |             provider_type=class_to_path(self._provider_model), | ||||||
| @ -119,10 +116,6 @@ class SyncTasks: | |||||||
|         provider = self._provider_model.objects.filter(pk=provider_pk).first() |         provider = self._provider_model.objects.filter(pk=provider_pk).first() | ||||||
|         if not provider: |         if not provider: | ||||||
|             return messages |             return messages | ||||||
|         # Override dry run mode if requested, however don't save the provider |  | ||||||
|         # so that scheduled sync tasks still run in dry_run mode |  | ||||||
|         if override_dry_run: |  | ||||||
|             provider.dry_run = False |  | ||||||
|         try: |         try: | ||||||
|             client = provider.client_for_model(_object_type) |             client = provider.client_for_model(_object_type) | ||||||
|         except TransientSyncException: |         except TransientSyncException: | ||||||
| @ -139,22 +132,6 @@ class SyncTasks: | |||||||
|             except SkipObjectException: |             except SkipObjectException: | ||||||
|                 self.logger.debug("skipping object due to SkipObject", obj=obj) |                 self.logger.debug("skipping object due to SkipObject", obj=obj) | ||||||
|                 continue |                 continue | ||||||
|             except DryRunRejected as exc: |  | ||||||
|                 messages.append( |  | ||||||
|                     asdict( |  | ||||||
|                         LogEvent( |  | ||||||
|                             _("Dropping mutating request due to dry run"), |  | ||||||
|                             log_level="info", |  | ||||||
|                             logger=f"{provider._meta.verbose_name}@{object_type}", |  | ||||||
|                             attributes={ |  | ||||||
|                                 "obj": sanitize_item(obj), |  | ||||||
|                                 "method": exc.method, |  | ||||||
|                                 "url": exc.url, |  | ||||||
|                                 "body": exc.body, |  | ||||||
|                             }, |  | ||||||
|                         ) |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
|             except BadRequestSyncException as exc: |             except BadRequestSyncException as exc: | ||||||
|                 self.logger.warning("failed to sync object", exc=exc, obj=obj) |                 self.logger.warning("failed to sync object", exc=exc, obj=obj) | ||||||
|                 messages.append( |                 messages.append( | ||||||
| @ -254,10 +231,8 @@ class SyncTasks: | |||||||
|                 raise Retry() from exc |                 raise Retry() from exc | ||||||
|             except SkipObjectException: |             except SkipObjectException: | ||||||
|                 continue |                 continue | ||||||
|             except DryRunRejected as exc: |  | ||||||
|                 self.logger.info("Rejected dry-run event", exc=exc) |  | ||||||
|             except StopSync as exc: |             except StopSync as exc: | ||||||
|                 self.logger.warning("Stopping sync", exc=exc, provider_pk=provider.pk) |                 self.logger.warning(exc, provider_pk=provider.pk) | ||||||
|  |  | ||||||
|     def sync_signal_m2m(self, group_pk: str, action: str, pk_set: list[int]): |     def sync_signal_m2m(self, group_pk: str, action: str, pk_set: list[int]): | ||||||
|         self.logger = get_logger().bind( |         self.logger = get_logger().bind( | ||||||
| @ -288,7 +263,5 @@ class SyncTasks: | |||||||
|                 raise Retry() from exc |                 raise Retry() from exc | ||||||
|             except SkipObjectException: |             except SkipObjectException: | ||||||
|                 continue |                 continue | ||||||
|             except DryRunRejected as exc: |  | ||||||
|                 self.logger.info("Rejected dry-run event", exc=exc) |  | ||||||
|             except StopSync as exc: |             except StopSync as exc: | ||||||
|                 self.logger.warning("Stopping sync", exc=exc, provider_pk=provider.pk) |                 self.logger.warning(exc, provider_pk=provider.pk) | ||||||
|  | |||||||
| @ -158,18 +158,6 @@ class TestConfig(TestCase): | |||||||
|             test_obj = Test() |             test_obj = Test() | ||||||
|             dumps(test_obj, indent=4, cls=AttrEncoder) |             dumps(test_obj, indent=4, cls=AttrEncoder) | ||||||
|  |  | ||||||
|     def test_get_optional_int(self): |  | ||||||
|         config = ConfigLoader() |  | ||||||
|         self.assertEqual(config.get_optional_int("foo", 21), 21) |  | ||||||
|         self.assertEqual(config.get_optional_int("foo"), None) |  | ||||||
|         config.set("foo", "21") |  | ||||||
|         self.assertEqual(config.get_optional_int("foo"), 21) |  | ||||||
|         self.assertEqual(config.get_optional_int("foo", 0), 21) |  | ||||||
|         self.assertEqual(config.get_optional_int("foo", "null"), 21) |  | ||||||
|         config.set("foo", "null") |  | ||||||
|         self.assertEqual(config.get_optional_int("foo"), None) |  | ||||||
|         self.assertEqual(config.get_optional_int("foo", 21), None) |  | ||||||
|  |  | ||||||
|     @mock.patch.dict(environ, check_deprecations_env_vars) |     @mock.patch.dict(environ, check_deprecations_env_vars) | ||||||
|     def test_check_deprecations(self): |     def test_check_deprecations(self): | ||||||
|         """Test config key re-write for deprecated env vars""" |         """Test config key re-write for deprecated env vars""" | ||||||
| @ -233,16 +221,6 @@ class TestConfig(TestCase): | |||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_db_conn_max_age(self): |  | ||||||
|         """Test DB conn_max_age Config""" |  | ||||||
|         config = ConfigLoader() |  | ||||||
|         config.set("postgresql.conn_max_age", "null") |  | ||||||
|         conf = django_db_config(config) |  | ||||||
|         self.assertEqual( |  | ||||||
|             conf["default"]["CONN_MAX_AGE"], |  | ||||||
|             None, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_db_read_replicas(self): |     def test_db_read_replicas(self): | ||||||
|         """Test read replicas""" |         """Test read replicas""" | ||||||
|         config = ConfigLoader() |         config = ConfigLoader() | ||||||
|  | |||||||
| @ -16,40 +16,7 @@ def authentik_user_agent() -> str: | |||||||
|     return f"authentik@{get_full_version()}" |     return f"authentik@{get_full_version()}" | ||||||
|  |  | ||||||
|  |  | ||||||
| class TimeoutSession(Session): | class DebugSession(Session): | ||||||
|     """Always set a default HTTP request timeout""" |  | ||||||
|  |  | ||||||
|     def __init__(self, default_timeout=None): |  | ||||||
|         super().__init__() |  | ||||||
|         self.timeout = default_timeout |  | ||||||
|  |  | ||||||
|     def send( |  | ||||||
|         self, |  | ||||||
|         request, |  | ||||||
|         *, |  | ||||||
|         stream=..., |  | ||||||
|         verify=..., |  | ||||||
|         proxies=..., |  | ||||||
|         cert=..., |  | ||||||
|         timeout=..., |  | ||||||
|         allow_redirects=..., |  | ||||||
|         **kwargs, |  | ||||||
|     ): |  | ||||||
|         if not timeout and self.timeout: |  | ||||||
|             timeout = self.timeout |  | ||||||
|         return super().send( |  | ||||||
|             request, |  | ||||||
|             stream=stream, |  | ||||||
|             verify=verify, |  | ||||||
|             proxies=proxies, |  | ||||||
|             cert=cert, |  | ||||||
|             timeout=timeout, |  | ||||||
|             allow_redirects=allow_redirects, |  | ||||||
|             **kwargs, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DebugSession(TimeoutSession): |  | ||||||
|     """requests session which logs http requests and responses""" |     """requests session which logs http requests and responses""" | ||||||
|  |  | ||||||
|     def send(self, req: PreparedRequest, *args, **kwargs): |     def send(self, req: PreparedRequest, *args, **kwargs): | ||||||
| @ -75,9 +42,8 @@ class DebugSession(TimeoutSession): | |||||||
|  |  | ||||||
| def get_http_session() -> Session: | def get_http_session() -> Session: | ||||||
|     """Get a requests session with common headers""" |     """Get a requests session with common headers""" | ||||||
|     session = TimeoutSession() |     session = Session() | ||||||
|     if CONFIG.get_bool("debug") or CONFIG.get("log_level") == "trace": |     if CONFIG.get_bool("debug") or CONFIG.get("log_level") == "trace": | ||||||
|         session = DebugSession() |         session = DebugSession() | ||||||
|     session.headers["User-Agent"] = authentik_user_agent() |     session.headers["User-Agent"] = authentik_user_agent() | ||||||
|     session.timeout = CONFIG.get_optional_int("http_timeout") |  | ||||||
|     return session |     return session | ||||||
|  | |||||||
| @ -13,7 +13,6 @@ from paramiko.ssh_exception import SSHException | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| from yaml import safe_dump | from yaml import safe_dump | ||||||
|  |  | ||||||
| from authentik import __version__ |  | ||||||
| from authentik.outposts.apps import MANAGED_OUTPOST | from authentik.outposts.apps import MANAGED_OUTPOST | ||||||
| from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException | from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException | ||||||
| from authentik.outposts.docker_ssh import DockerInlineSSH, SSHManagedExternallyException | from authentik.outposts.docker_ssh import DockerInlineSSH, SSHManagedExternallyException | ||||||
| @ -185,7 +184,7 @@ class DockerController(BaseController): | |||||||
|         try: |         try: | ||||||
|             self.client.images.pull(image) |             self.client.images.pull(image) | ||||||
|         except DockerException:  # pragma: no cover |         except DockerException:  # pragma: no cover | ||||||
|             image = f"ghcr.io/goauthentik/{self.outpost.type}:{__version__}" |             image = f"ghcr.io/goauthentik/{self.outpost.type}:latest" | ||||||
|             self.client.images.pull(image) |             self.client.images.pull(image) | ||||||
|         return image |         return image | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,6 +1,5 @@ | |||||||
| """Base Kubernetes Reconciler""" | """Base Kubernetes Reconciler""" | ||||||
|  |  | ||||||
| import re |  | ||||||
| from dataclasses import asdict | from dataclasses import asdict | ||||||
| from json import dumps | from json import dumps | ||||||
| from typing import TYPE_CHECKING, Generic, TypeVar | from typing import TYPE_CHECKING, Generic, TypeVar | ||||||
| @ -68,8 +67,7 @@ class KubernetesObjectReconciler(Generic[T]): | |||||||
|     @property |     @property | ||||||
|     def name(self) -> str: |     def name(self) -> str: | ||||||
|         """Get the name of the object this reconciler manages""" |         """Get the name of the object this reconciler manages""" | ||||||
|  |         return ( | ||||||
|         base_name = ( |  | ||||||
|             self.controller.outpost.config.object_naming_template |             self.controller.outpost.config.object_naming_template | ||||||
|             % { |             % { | ||||||
|                 "name": slugify(self.controller.outpost.name), |                 "name": slugify(self.controller.outpost.name), | ||||||
| @ -77,16 +75,6 @@ class KubernetesObjectReconciler(Generic[T]): | |||||||
|             } |             } | ||||||
|         ).lower() |         ).lower() | ||||||
|  |  | ||||||
|         formatted = slugify(base_name) |  | ||||||
|         formatted = re.sub(r"[^a-z0-9-]", "-", formatted) |  | ||||||
|         formatted = re.sub(r"-+", "-", formatted) |  | ||||||
|         formatted = formatted[:63] |  | ||||||
|  |  | ||||||
|         if not formatted: |  | ||||||
|             formatted = f"outpost-{self.controller.outpost.uuid.hex}"[:63] |  | ||||||
|  |  | ||||||
|         return formatted |  | ||||||
|  |  | ||||||
|     def get_patched_reference_object(self) -> T: |     def get_patched_reference_object(self) -> T: | ||||||
|         """Get patched reference object""" |         """Get patched reference object""" | ||||||
|         reference = self.get_reference_object() |         reference = self.get_reference_object() | ||||||
| @ -124,6 +112,7 @@ class KubernetesObjectReconciler(Generic[T]): | |||||||
|             try: |             try: | ||||||
|                 current = self.retrieve() |                 current = self.retrieve() | ||||||
|             except (OpenApiException, HTTPError) as exc: |             except (OpenApiException, HTTPError) as exc: | ||||||
|  |  | ||||||
|                 if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code: |                 if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code: | ||||||
|                     self.logger.debug("Failed to get current, triggering recreate") |                     self.logger.debug("Failed to get current, triggering recreate") | ||||||
|                     raise NeedsRecreate from exc |                     raise NeedsRecreate from exc | ||||||
| @ -167,6 +156,7 @@ class KubernetesObjectReconciler(Generic[T]): | |||||||
|             self.delete(current) |             self.delete(current) | ||||||
|             self.logger.debug("Removing") |             self.logger.debug("Removing") | ||||||
|         except (OpenApiException, HTTPError) as exc: |         except (OpenApiException, HTTPError) as exc: | ||||||
|  |  | ||||||
|             if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code: |             if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code: | ||||||
|                 self.logger.debug("Failed to get current, assuming non-existent") |                 self.logger.debug("Failed to get current, assuming non-existent") | ||||||
|                 return |                 return | ||||||
|  | |||||||
| @ -61,14 +61,9 @@ class KubernetesController(BaseController): | |||||||
|     client: KubernetesClient |     client: KubernetesClient | ||||||
|     connection: KubernetesServiceConnection |     connection: KubernetesServiceConnection | ||||||
|  |  | ||||||
|     def __init__( |     def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection) -> None: | ||||||
|         self, |  | ||||||
|         outpost: Outpost, |  | ||||||
|         connection: KubernetesServiceConnection, |  | ||||||
|         client: KubernetesClient | None = None, |  | ||||||
|     ) -> None: |  | ||||||
|         super().__init__(outpost, connection) |         super().__init__(outpost, connection) | ||||||
|         self.client = client if client else KubernetesClient(connection) |         self.client = KubernetesClient(connection) | ||||||
|         self.reconcilers = { |         self.reconcilers = { | ||||||
|             SecretReconciler.reconciler_name(): SecretReconciler, |             SecretReconciler.reconciler_name(): SecretReconciler, | ||||||
|             DeploymentReconciler.reconciler_name(): DeploymentReconciler, |             DeploymentReconciler.reconciler_name(): DeploymentReconciler, | ||||||
|  | |||||||
| @ -1,44 +0,0 @@ | |||||||
| """Kubernetes controller tests""" |  | ||||||
|  |  | ||||||
| from django.test import TestCase |  | ||||||
|  |  | ||||||
| from authentik.blueprints.tests import reconcile_app |  | ||||||
| from authentik.lib.generators import generate_id |  | ||||||
| from authentik.outposts.apps import MANAGED_OUTPOST |  | ||||||
| from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler |  | ||||||
| from authentik.outposts.controllers.kubernetes import KubernetesController |  | ||||||
| from authentik.outposts.models import KubernetesServiceConnection, Outpost, OutpostType |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class KubernetesControllerTests(TestCase): |  | ||||||
|     """Kubernetes controller tests""" |  | ||||||
|  |  | ||||||
|     @reconcile_app("authentik_outposts") |  | ||||||
|     def setUp(self) -> None: |  | ||||||
|         self.outpost = Outpost.objects.create( |  | ||||||
|             name="test", |  | ||||||
|             type=OutpostType.PROXY, |  | ||||||
|         ) |  | ||||||
|         self.integration = KubernetesServiceConnection(name="test") |  | ||||||
|  |  | ||||||
|     def test_gen_name(self): |  | ||||||
|         """Ensure the generated name is valid""" |  | ||||||
|         controller = KubernetesController( |  | ||||||
|             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), |  | ||||||
|             self.integration, |  | ||||||
|             # Pass something not-none as client so we don't |  | ||||||
|             # attempt to connect to K8s as that's not needed |  | ||||||
|             client=self, |  | ||||||
|         ) |  | ||||||
|         rec = DeploymentReconciler(controller) |  | ||||||
|         self.assertEqual(rec.name, "ak-outpost-authentik-embedded-outpost") |  | ||||||
|  |  | ||||||
|         controller.outpost.name = generate_id() |  | ||||||
|         self.assertLess(len(rec.name), 64) |  | ||||||
|  |  | ||||||
|         # Test custom naming template |  | ||||||
|         _cfg = controller.outpost.config |  | ||||||
|         _cfg.object_naming_template = "" |  | ||||||
|         controller.outpost.config = _cfg |  | ||||||
|         self.assertEqual(rec.name, f"outpost-{controller.outpost.uuid.hex}") |  | ||||||
|         self.assertLess(len(rec.name), 64) |  | ||||||
| @ -9,12 +9,7 @@ from hashlib import sha256 | |||||||
| from typing import Any | from typing import Any | ||||||
| from urllib.parse import urlparse, urlunparse | from urllib.parse import urlparse, urlunparse | ||||||
|  |  | ||||||
| from cryptography.hazmat.primitives.asymmetric.ec import ( | from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey | ||||||
|     SECP256R1, |  | ||||||
|     SECP384R1, |  | ||||||
|     SECP521R1, |  | ||||||
|     EllipticCurvePrivateKey, |  | ||||||
| ) |  | ||||||
| from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey | from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey | ||||||
| from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes | from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes | ||||||
| from dacite import Config | from dacite import Config | ||||||
| @ -119,22 +114,6 @@ class JWTAlgorithms(models.TextChoices): | |||||||
|     HS256 = "HS256", _("HS256 (Symmetric Encryption)") |     HS256 = "HS256", _("HS256 (Symmetric Encryption)") | ||||||
|     RS256 = "RS256", _("RS256 (Asymmetric Encryption)") |     RS256 = "RS256", _("RS256 (Asymmetric Encryption)") | ||||||
|     ES256 = "ES256", _("ES256 (Asymmetric Encryption)") |     ES256 = "ES256", _("ES256 (Asymmetric Encryption)") | ||||||
|     ES384 = "ES384", _("ES384 (Asymmetric Encryption)") |  | ||||||
|     ES512 = "ES512", _("ES512 (Asymmetric Encryption)") |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def from_private_key(cls, private_key: PrivateKeyTypes | None) -> str: |  | ||||||
|         if isinstance(private_key, RSAPrivateKey): |  | ||||||
|             return cls.RS256 |  | ||||||
|         if isinstance(private_key, EllipticCurvePrivateKey): |  | ||||||
|             curve = private_key.curve |  | ||||||
|             if isinstance(curve, SECP256R1): |  | ||||||
|                 return cls.ES256 |  | ||||||
|             if isinstance(curve, SECP384R1): |  | ||||||
|                 return cls.ES384 |  | ||||||
|             if isinstance(curve, SECP521R1): |  | ||||||
|                 return cls.ES512 |  | ||||||
|         raise ValueError(f"Invalid private key type: {type(private_key)}") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ScopeMapping(PropertyMapping): | class ScopeMapping(PropertyMapping): | ||||||
| @ -284,7 +263,11 @@ class OAuth2Provider(WebfingerProvider, Provider): | |||||||
|             return self.client_secret, JWTAlgorithms.HS256 |             return self.client_secret, JWTAlgorithms.HS256 | ||||||
|         key: CertificateKeyPair = self.signing_key |         key: CertificateKeyPair = self.signing_key | ||||||
|         private_key = key.private_key |         private_key = key.private_key | ||||||
|         return private_key, JWTAlgorithms.from_private_key(private_key) |         if isinstance(private_key, RSAPrivateKey): | ||||||
|  |             return private_key, JWTAlgorithms.RS256 | ||||||
|  |         if isinstance(private_key, EllipticCurvePrivateKey): | ||||||
|  |             return private_key, JWTAlgorithms.ES256 | ||||||
|  |         raise ValueError(f"Invalid private key type: {type(private_key)}") | ||||||
|  |  | ||||||
|     def get_issuer(self, request: HttpRequest) -> str | None: |     def get_issuer(self, request: HttpRequest) -> str | None: | ||||||
|         """Get issuer, based on request""" |         """Get issuer, based on request""" | ||||||
|  | |||||||
| @ -88,6 +88,7 @@ class TesOAuth2DeviceInit(OAuthTestCase): | |||||||
|                     "layout": "stacked", |                     "layout": "stacked", | ||||||
|                     "title": self.device_flow.title, |                     "title": self.device_flow.title, | ||||||
|                 }, |                 }, | ||||||
|  |                 "messages": [], | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  | |||||||
| @ -254,10 +254,10 @@ class OAuthAuthorizationParams: | |||||||
|             raise AuthorizeError(self.redirect_uri, "invalid_scope", self.grant_type, self.state) |             raise AuthorizeError(self.redirect_uri, "invalid_scope", self.grant_type, self.state) | ||||||
|         if SCOPE_OFFLINE_ACCESS in self.scope: |         if SCOPE_OFFLINE_ACCESS in self.scope: | ||||||
|             # https://openid.net/specs/openid-connect-core-1_0.html#OfflineAccess |             # https://openid.net/specs/openid-connect-core-1_0.html#OfflineAccess | ||||||
|             # Don't explicitly request consent with offline_access, as the spec allows for |             if PROMPT_CONSENT not in self.prompt: | ||||||
|             # "other conditions for processing the request permitting offline access to the |                 # Instead of ignoring the `offline_access` scope when `prompt` | ||||||
|             # requested resources are in place" |                 # isn't set to `consent`, we set override it ourselves | ||||||
|             # which we interpret as "the admin picks an authorization flow with or without consent" |                 self.prompt.add(PROMPT_CONSENT) | ||||||
|             if self.response_type not in [ |             if self.response_type not in [ | ||||||
|                 ResponseTypes.CODE, |                 ResponseTypes.CODE, | ||||||
|                 ResponseTypes.CODE_TOKEN, |                 ResponseTypes.CODE_TOKEN, | ||||||
|  | |||||||
| @ -71,7 +71,7 @@ class CodeValidatorView(PolicyAccessView): | |||||||
|         except FlowNonApplicableException: |         except FlowNonApplicableException: | ||||||
|             LOGGER.warning("Flow not applicable to user") |             LOGGER.warning("Flow not applicable to user") | ||||||
|             return None |             return None | ||||||
|         plan.append_stage(in_memory_stage(OAuthDeviceCodeFinishStage)) |         plan.insert_stage(in_memory_stage(OAuthDeviceCodeFinishStage)) | ||||||
|         return plan.to_redirect(self.request, self.token.provider.authorization_flow) |         return plan.to_redirect(self.request, self.token.provider.authorization_flow) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -34,5 +34,5 @@ class EndSessionView(PolicyAccessView): | |||||||
|                 PLAN_CONTEXT_APPLICATION: self.application, |                 PLAN_CONTEXT_APPLICATION: self.application, | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         plan.append_stage(in_memory_stage(SessionEndStage)) |         plan.insert_stage(in_memory_stage(SessionEndStage)) | ||||||
|         return plan.to_redirect(self.request, self.flow) |         return plan.to_redirect(self.request, self.flow) | ||||||
|  | |||||||
| @ -75,7 +75,10 @@ class JWKSView(View): | |||||||
|         key_data = {} |         key_data = {} | ||||||
|  |  | ||||||
|         if use == "sig": |         if use == "sig": | ||||||
|             key_data["alg"] = JWTAlgorithms.from_private_key(private_key) |             if isinstance(private_key, RSAPrivateKey): | ||||||
|  |                 key_data["alg"] = JWTAlgorithms.RS256 | ||||||
|  |             elif isinstance(private_key, EllipticCurvePrivateKey): | ||||||
|  |                 key_data["alg"] = JWTAlgorithms.ES256 | ||||||
|         elif use == "enc": |         elif use == "enc": | ||||||
|             key_data["alg"] = "RSA-OAEP-256" |             key_data["alg"] = "RSA-OAEP-256" | ||||||
|             key_data["enc"] = "A256CBC-HS512" |             key_data["enc"] = "A256CBC-HS512" | ||||||
|  | |||||||
| @ -36,17 +36,17 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]): | |||||||
|     def reconciler_name() -> str: |     def reconciler_name() -> str: | ||||||
|         return "ingress" |         return "ingress" | ||||||
|  |  | ||||||
|     def _check_annotations(self, current: V1Ingress, reference: V1Ingress): |     def _check_annotations(self, reference: V1Ingress): | ||||||
|         """Check that all annotations *we* set are correct""" |         """Check that all annotations *we* set are correct""" | ||||||
|         for key, value in reference.metadata.annotations.items(): |         for key, value in self.get_ingress_annotations().items(): | ||||||
|             if key not in current.metadata.annotations: |             if key not in reference.metadata.annotations: | ||||||
|                 raise NeedsUpdate() |                 raise NeedsUpdate() | ||||||
|             if current.metadata.annotations[key] != value: |             if reference.metadata.annotations[key] != value: | ||||||
|                 raise NeedsUpdate() |                 raise NeedsUpdate() | ||||||
|  |  | ||||||
|     def reconcile(self, current: V1Ingress, reference: V1Ingress): |     def reconcile(self, current: V1Ingress, reference: V1Ingress): | ||||||
|         super().reconcile(current, reference) |         super().reconcile(current, reference) | ||||||
|         self._check_annotations(current, reference) |         self._check_annotations(reference) | ||||||
|         # Create a list of all expected host and tls hosts |         # Create a list of all expected host and tls hosts | ||||||
|         expected_hosts = [] |         expected_hosts = [] | ||||||
|         expected_hosts_tls = [] |         expected_hosts_tls = [] | ||||||
|  | |||||||
| @ -1,9 +1,9 @@ | |||||||
| """RAC app config""" | """RAC app config""" | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig | from django.apps import AppConfig | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikProviderRAC(ManagedAppConfig): | class AuthentikProviderRAC(AppConfig): | ||||||
|     """authentik rac app config""" |     """authentik rac app config""" | ||||||
|  |  | ||||||
|     name = "authentik.providers.rac" |     name = "authentik.providers.rac" | ||||||
|  | |||||||
| @ -4,7 +4,8 @@ from asgiref.sync import async_to_sync | |||||||
| from channels.layers import get_channel_layer | from channels.layers import get_channel_layer | ||||||
| from django.contrib.auth.signals import user_logged_out | from django.contrib.auth.signals import user_logged_out | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.db.models.signals import post_delete, post_save, pre_delete | from django.db.models import Model | ||||||
|  | from django.db.models.signals import post_save, pre_delete | ||||||
| from django.dispatch import receiver | from django.dispatch import receiver | ||||||
| from django.http import HttpRequest | from django.http import HttpRequest | ||||||
|  |  | ||||||
| @ -45,8 +46,12 @@ def pre_delete_connection_token_disconnect(sender, instance: ConnectionToken, ** | |||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @receiver([post_save, post_delete], sender=Endpoint) | @receiver(post_save, sender=Endpoint) | ||||||
| def post_save_post_delete_endpoint(**_): | def post_save_endpoint(sender: type[Model], instance, created: bool, **_): | ||||||
|     """Clear user's endpoint cache upon endpoint creation or deletion""" |     """Clear user's endpoint cache upon endpoint creation""" | ||||||
|  |     if not created:  # pragma: no cover | ||||||
|  |         return | ||||||
|  |  | ||||||
|  |     # Delete user endpoint cache | ||||||
|     keys = cache.keys(user_endpoint_cache_key("*")) |     keys = cache.keys(user_endpoint_cache_key("*")) | ||||||
|     cache.delete_many(keys) |     cache.delete_many(keys) | ||||||
|  | |||||||
| @ -74,8 +74,6 @@ class TestEndpointsAPI(APITestCase): | |||||||
|                             "component": "ak-provider-rac-form", |                             "component": "ak-provider-rac-form", | ||||||
|                             "assigned_application_slug": self.app.slug, |                             "assigned_application_slug": self.app.slug, | ||||||
|                             "assigned_application_name": self.app.name, |                             "assigned_application_name": self.app.name, | ||||||
|                             "assigned_backchannel_application_slug": "", |  | ||||||
|                             "assigned_backchannel_application_name": "", |  | ||||||
|                             "verbose_name": "RAC Provider", |                             "verbose_name": "RAC Provider", | ||||||
|                             "verbose_name_plural": "RAC Providers", |                             "verbose_name_plural": "RAC Providers", | ||||||
|                             "meta_model_name": "authentik_providers_rac.racprovider", |                             "meta_model_name": "authentik_providers_rac.racprovider", | ||||||
| @ -126,8 +124,6 @@ class TestEndpointsAPI(APITestCase): | |||||||
|                             "component": "ak-provider-rac-form", |                             "component": "ak-provider-rac-form", | ||||||
|                             "assigned_application_slug": self.app.slug, |                             "assigned_application_slug": self.app.slug, | ||||||
|                             "assigned_application_name": self.app.name, |                             "assigned_application_name": self.app.name, | ||||||
|                             "assigned_backchannel_application_slug": "", |  | ||||||
|                             "assigned_backchannel_application_name": "", |  | ||||||
|                             "connection_expiry": "hours=8", |                             "connection_expiry": "hours=8", | ||||||
|                             "delete_token_on_disconnect": False, |                             "delete_token_on_disconnect": False, | ||||||
|                             "verbose_name": "RAC Provider", |                             "verbose_name": "RAC Provider", | ||||||
| @ -157,8 +153,6 @@ class TestEndpointsAPI(APITestCase): | |||||||
|                             "component": "ak-provider-rac-form", |                             "component": "ak-provider-rac-form", | ||||||
|                             "assigned_application_slug": self.app.slug, |                             "assigned_application_slug": self.app.slug, | ||||||
|                             "assigned_application_name": self.app.name, |                             "assigned_application_name": self.app.name, | ||||||
|                             "assigned_backchannel_application_slug": "", |  | ||||||
|                             "assigned_backchannel_application_name": "", |  | ||||||
|                             "connection_expiry": "hours=8", |                             "connection_expiry": "hours=8", | ||||||
|                             "delete_token_on_disconnect": False, |                             "delete_token_on_disconnect": False, | ||||||
|                             "verbose_name": "RAC Provider", |                             "verbose_name": "RAC Provider", | ||||||
|  | |||||||
| @ -46,7 +46,7 @@ class RACStartView(PolicyAccessView): | |||||||
|             ) |             ) | ||||||
|         except FlowNonApplicableException: |         except FlowNonApplicableException: | ||||||
|             raise Http404 from None |             raise Http404 from None | ||||||
|         plan.append_stage( |         plan.insert_stage( | ||||||
|             in_memory_stage( |             in_memory_stage( | ||||||
|                 RACFinalStage, |                 RACFinalStage, | ||||||
|                 application=self.application, |                 application=self.application, | ||||||
|  | |||||||
| @ -180,7 +180,6 @@ class SAMLProviderSerializer(ProviderSerializer): | |||||||
|             "session_valid_not_on_or_after", |             "session_valid_not_on_or_after", | ||||||
|             "property_mappings", |             "property_mappings", | ||||||
|             "name_id_mapping", |             "name_id_mapping", | ||||||
|             "authn_context_class_ref_mapping", |  | ||||||
|             "digest_algorithm", |             "digest_algorithm", | ||||||
|             "signature_algorithm", |             "signature_algorithm", | ||||||
|             "signing_kp", |             "signing_kp", | ||||||
|  | |||||||
| @ -1,28 +0,0 @@ | |||||||
| # Generated by Django 5.0.13 on 2025-03-18 17:41 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_providers_saml", "0016_samlprovider_encryption_kp_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="samlprovider", |  | ||||||
|             name="authn_context_class_ref_mapping", |  | ||||||
|             field=models.ForeignKey( |  | ||||||
|                 blank=True, |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Configure how the AuthnContextClassRef value will be created. When left empty, the AuthnContextClassRef will be set based on which authentication methods the user used to authenticate.", |  | ||||||
|                 null=True, |  | ||||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, |  | ||||||
|                 related_name="+", |  | ||||||
|                 to="authentik_providers_saml.samlpropertymapping", |  | ||||||
|                 verbose_name="AuthnContextClassRef Property Mapping", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -71,20 +71,6 @@ class SAMLProvider(Provider): | |||||||
|             "the NameIDPolicy of the incoming request will be considered" |             "the NameIDPolicy of the incoming request will be considered" | ||||||
|         ), |         ), | ||||||
|     ) |     ) | ||||||
|     authn_context_class_ref_mapping = models.ForeignKey( |  | ||||||
|         "SAMLPropertyMapping", |  | ||||||
|         default=None, |  | ||||||
|         blank=True, |  | ||||||
|         null=True, |  | ||||||
|         on_delete=models.SET_DEFAULT, |  | ||||||
|         verbose_name=_("AuthnContextClassRef Property Mapping"), |  | ||||||
|         related_name="+", |  | ||||||
|         help_text=_( |  | ||||||
|             "Configure how the AuthnContextClassRef value will be created. When left empty, " |  | ||||||
|             "the AuthnContextClassRef will be set based on which authentication methods the user " |  | ||||||
|             "used to authenticate." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     assertion_valid_not_before = models.TextField( |     assertion_valid_not_before = models.TextField( | ||||||
|         default="minutes=-5", |         default="minutes=-5", | ||||||
| @ -184,6 +170,7 @@ class SAMLProvider(Provider): | |||||||
|     def launch_url(self) -> str | None: |     def launch_url(self) -> str | None: | ||||||
|         """Use IDP-Initiated SAML flow as launch URL""" |         """Use IDP-Initiated SAML flow as launch URL""" | ||||||
|         try: |         try: | ||||||
|  |  | ||||||
|             return reverse( |             return reverse( | ||||||
|                 "authentik_providers_saml:sso-init", |                 "authentik_providers_saml:sso-init", | ||||||
|                 kwargs={"application_slug": self.application.slug}, |                 kwargs={"application_slug": self.application.slug}, | ||||||
|  | |||||||
| @ -1,6 +1,5 @@ | |||||||
| """SAML Assertion generator""" | """SAML Assertion generator""" | ||||||
|  |  | ||||||
| from datetime import datetime |  | ||||||
| from hashlib import sha256 | from hashlib import sha256 | ||||||
| from types import GeneratorType | from types import GeneratorType | ||||||
|  |  | ||||||
| @ -53,7 +52,6 @@ class AssertionProcessor: | |||||||
|     _assertion_id: str |     _assertion_id: str | ||||||
|     _response_id: str |     _response_id: str | ||||||
|  |  | ||||||
|     _auth_instant: str |  | ||||||
|     _valid_not_before: str |     _valid_not_before: str | ||||||
|     _session_not_on_or_after: str |     _session_not_on_or_after: str | ||||||
|     _valid_not_on_or_after: str |     _valid_not_on_or_after: str | ||||||
| @ -67,11 +65,6 @@ class AssertionProcessor: | |||||||
|         self._assertion_id = get_random_id() |         self._assertion_id = get_random_id() | ||||||
|         self._response_id = get_random_id() |         self._response_id = get_random_id() | ||||||
|  |  | ||||||
|         _login_event = get_login_event(self.http_request) |  | ||||||
|         _login_time = datetime.now() |  | ||||||
|         if _login_event: |  | ||||||
|             _login_time = _login_event.created |  | ||||||
|         self._auth_instant = get_time_string(_login_time) |  | ||||||
|         self._valid_not_before = get_time_string( |         self._valid_not_before = get_time_string( | ||||||
|             timedelta_from_string(self.provider.assertion_valid_not_before) |             timedelta_from_string(self.provider.assertion_valid_not_before) | ||||||
|         ) |         ) | ||||||
| @ -138,7 +131,7 @@ class AssertionProcessor: | |||||||
|     def get_assertion_auth_n_statement(self) -> Element: |     def get_assertion_auth_n_statement(self) -> Element: | ||||||
|         """Generate AuthnStatement with AuthnContext and ContextClassRef Elements.""" |         """Generate AuthnStatement with AuthnContext and ContextClassRef Elements.""" | ||||||
|         auth_n_statement = Element(f"{{{NS_SAML_ASSERTION}}}AuthnStatement") |         auth_n_statement = Element(f"{{{NS_SAML_ASSERTION}}}AuthnStatement") | ||||||
|         auth_n_statement.attrib["AuthnInstant"] = self._auth_instant |         auth_n_statement.attrib["AuthnInstant"] = self._valid_not_before | ||||||
|         auth_n_statement.attrib["SessionIndex"] = sha256( |         auth_n_statement.attrib["SessionIndex"] = sha256( | ||||||
|             self.http_request.session.session_key.encode("ascii") |             self.http_request.session.session_key.encode("ascii") | ||||||
|         ).hexdigest() |         ).hexdigest() | ||||||
| @ -165,28 +158,6 @@ class AssertionProcessor: | |||||||
|                 auth_n_context_class_ref.text = ( |                 auth_n_context_class_ref.text = ( | ||||||
|                     "urn:oasis:names:tc:SAML:2.0:ac:classes:MobileOneFactorContract" |                     "urn:oasis:names:tc:SAML:2.0:ac:classes:MobileOneFactorContract" | ||||||
|                 ) |                 ) | ||||||
|         if self.provider.authn_context_class_ref_mapping: |  | ||||||
|             try: |  | ||||||
|                 value = self.provider.authn_context_class_ref_mapping.evaluate( |  | ||||||
|                     user=self.http_request.user, |  | ||||||
|                     request=self.http_request, |  | ||||||
|                     provider=self.provider, |  | ||||||
|                 ) |  | ||||||
|                 if value is not None: |  | ||||||
|                     auth_n_context_class_ref.text = str(value) |  | ||||||
|                 return auth_n_statement |  | ||||||
|             except PropertyMappingExpressionException as exc: |  | ||||||
|                 Event.new( |  | ||||||
|                     EventAction.CONFIGURATION_ERROR, |  | ||||||
|                     message=( |  | ||||||
|                         "Failed to evaluate property-mapping: " |  | ||||||
|                         f"'{self.provider.authn_context_class_ref_mapping.name}'" |  | ||||||
|                     ), |  | ||||||
|                     provider=self.provider, |  | ||||||
|                     mapping=self.provider.authn_context_class_ref_mapping, |  | ||||||
|                 ).from_http(self.http_request) |  | ||||||
|                 LOGGER.warning("Failed to evaluate property mapping", exc=exc) |  | ||||||
|                 return auth_n_statement |  | ||||||
|         return auth_n_statement |         return auth_n_statement | ||||||
|  |  | ||||||
|     def get_assertion_conditions(self) -> Element: |     def get_assertion_conditions(self) -> Element: | ||||||
|  | |||||||
| @ -294,61 +294,6 @@ class TestAuthNRequest(TestCase): | |||||||
|         self.assertEqual(parsed_request.id, "aws_LDxLGeubpc5lx12gxCgS6uPbix1yd5re") |         self.assertEqual(parsed_request.id, "aws_LDxLGeubpc5lx12gxCgS6uPbix1yd5re") | ||||||
|         self.assertEqual(parsed_request.name_id_policy, SAML_NAME_ID_FORMAT_EMAIL) |         self.assertEqual(parsed_request.name_id_policy, SAML_NAME_ID_FORMAT_EMAIL) | ||||||
|  |  | ||||||
|     def test_authn_context_class_ref_mapping(self): |  | ||||||
|         """Test custom authn_context_class_ref""" |  | ||||||
|         authn_context_class_ref = generate_id() |  | ||||||
|         mapping = SAMLPropertyMapping.objects.create( |  | ||||||
|             name=generate_id(), expression=f"""return '{authn_context_class_ref}'""" |  | ||||||
|         ) |  | ||||||
|         self.provider.authn_context_class_ref_mapping = mapping |  | ||||||
|         self.provider.save() |  | ||||||
|         user = create_test_admin_user() |  | ||||||
|         http_request = get_request("/", user=user) |  | ||||||
|  |  | ||||||
|         # First create an AuthNRequest |  | ||||||
|         request_proc = RequestProcessor(self.source, http_request, "test_state") |  | ||||||
|         request = request_proc.build_auth_n() |  | ||||||
|  |  | ||||||
|         # To get an assertion we need a parsed request (parsed by provider) |  | ||||||
|         parsed_request = AuthNRequestParser(self.provider).parse( |  | ||||||
|             b64encode(request.encode()).decode(), "test_state" |  | ||||||
|         ) |  | ||||||
|         # Now create a response and convert it to string (provider) |  | ||||||
|         response_proc = AssertionProcessor(self.provider, http_request, parsed_request) |  | ||||||
|         response = response_proc.build_response() |  | ||||||
|         self.assertIn(user.username, response) |  | ||||||
|         self.assertIn(authn_context_class_ref, response) |  | ||||||
|  |  | ||||||
|     def test_authn_context_class_ref_mapping_invalid(self): |  | ||||||
|         """Test custom authn_context_class_ref (invalid)""" |  | ||||||
|         mapping = SAMLPropertyMapping.objects.create(name=generate_id(), expression="q") |  | ||||||
|         self.provider.authn_context_class_ref_mapping = mapping |  | ||||||
|         self.provider.save() |  | ||||||
|         user = create_test_admin_user() |  | ||||||
|         http_request = get_request("/", user=user) |  | ||||||
|  |  | ||||||
|         # First create an AuthNRequest |  | ||||||
|         request_proc = RequestProcessor(self.source, http_request, "test_state") |  | ||||||
|         request = request_proc.build_auth_n() |  | ||||||
|  |  | ||||||
|         # To get an assertion we need a parsed request (parsed by provider) |  | ||||||
|         parsed_request = AuthNRequestParser(self.provider).parse( |  | ||||||
|             b64encode(request.encode()).decode(), "test_state" |  | ||||||
|         ) |  | ||||||
|         # Now create a response and convert it to string (provider) |  | ||||||
|         response_proc = AssertionProcessor(self.provider, http_request, parsed_request) |  | ||||||
|         response = response_proc.build_response() |  | ||||||
|         self.assertIn(user.username, response) |  | ||||||
|  |  | ||||||
|         events = Event.objects.filter( |  | ||||||
|             action=EventAction.CONFIGURATION_ERROR, |  | ||||||
|         ) |  | ||||||
|         self.assertTrue(events.exists()) |  | ||||||
|         self.assertEqual( |  | ||||||
|             events.first().context["message"], |  | ||||||
|             f"Failed to evaluate property-mapping: '{mapping.name}'", |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_request_attributes(self): |     def test_request_attributes(self): | ||||||
|         """Test full SAML Request/Response flow, fully signed""" |         """Test full SAML Request/Response flow, fully signed""" | ||||||
|         user = create_test_admin_user() |         user = create_test_admin_user() | ||||||
| @ -376,10 +321,8 @@ class TestAuthNRequest(TestCase): | |||||||
|         request = request_proc.build_auth_n() |         request = request_proc.build_auth_n() | ||||||
|  |  | ||||||
|         # Create invalid PropertyMapping |         # Create invalid PropertyMapping | ||||||
|         mapping = SAMLPropertyMapping.objects.create( |         scope = SAMLPropertyMapping.objects.create(name="test", saml_name="test", expression="q") | ||||||
|             name=generate_id(), saml_name="test", expression="q" |         self.provider.property_mappings.add(scope) | ||||||
|         ) |  | ||||||
|         self.provider.property_mappings.add(mapping) |  | ||||||
|  |  | ||||||
|         # To get an assertion we need a parsed request (parsed by provider) |         # To get an assertion we need a parsed request (parsed by provider) | ||||||
|         parsed_request = AuthNRequestParser(self.provider).parse( |         parsed_request = AuthNRequestParser(self.provider).parse( | ||||||
| @ -395,7 +338,7 @@ class TestAuthNRequest(TestCase): | |||||||
|         self.assertTrue(events.exists()) |         self.assertTrue(events.exists()) | ||||||
|         self.assertEqual( |         self.assertEqual( | ||||||
|             events.first().context["message"], |             events.first().context["message"], | ||||||
|             f"Failed to evaluate property-mapping: '{mapping.name}'", |             "Failed to evaluate property-mapping: 'test'", | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_idp_initiated(self): |     def test_idp_initiated(self): | ||||||
|  | |||||||
| @ -1,16 +1,12 @@ | |||||||
| """Time utilities""" | """Time utilities""" | ||||||
|  |  | ||||||
| from datetime import datetime, timedelta | import datetime | ||||||
|  |  | ||||||
| from django.utils.timezone import now |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_time_string(delta: timedelta | datetime | None = None) -> str: | def get_time_string(delta: datetime.timedelta | None = None) -> str: | ||||||
|     """Get Data formatted in SAML format""" |     """Get Data formatted in SAML format""" | ||||||
|     if delta is None: |     if delta is None: | ||||||
|         delta = timedelta() |         delta = datetime.timedelta() | ||||||
|     if isinstance(delta, timedelta): |     now = datetime.datetime.now() | ||||||
|         final = now() + delta |     final = now + delta | ||||||
|     else: |  | ||||||
|         final = delta |  | ||||||
|     return final.strftime("%Y-%m-%dT%H:%M:%SZ") |     return final.strftime("%Y-%m-%dT%H:%M:%SZ") | ||||||
|  | |||||||
| @ -61,7 +61,7 @@ class SAMLSLOView(PolicyAccessView): | |||||||
|                 PLAN_CONTEXT_APPLICATION: self.application, |                 PLAN_CONTEXT_APPLICATION: self.application, | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         plan.append_stage(in_memory_stage(SessionEndStage)) |         plan.insert_stage(in_memory_stage(SessionEndStage)) | ||||||
|         return plan.to_redirect(self.request, self.flow) |         return plan.to_redirect(self.request, self.flow) | ||||||
|  |  | ||||||
|     def post(self, request: HttpRequest, application_slug: str) -> HttpResponse: |     def post(self, request: HttpRequest, application_slug: str) -> HttpResponse: | ||||||
|  | |||||||
| @ -24,9 +24,7 @@ class SCIMProviderGroupSerializer(ModelSerializer): | |||||||
|             "group", |             "group", | ||||||
|             "group_obj", |             "group_obj", | ||||||
|             "provider", |             "provider", | ||||||
|             "attributes", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = {"attributes": {"read_only": True}} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SCIMProviderGroupViewSet( | class SCIMProviderGroupViewSet( | ||||||
|  | |||||||
| @ -28,10 +28,8 @@ class SCIMProviderSerializer(ProviderSerializer): | |||||||
|             "url", |             "url", | ||||||
|             "verify_certificates", |             "verify_certificates", | ||||||
|             "token", |             "token", | ||||||
|             "compatibility_mode", |  | ||||||
|             "exclude_users_service_account", |             "exclude_users_service_account", | ||||||
|             "filter_group", |             "filter_group", | ||||||
|             "dry_run", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = {} |         extra_kwargs = {} | ||||||
|  |  | ||||||
|  | |||||||
| @ -24,9 +24,7 @@ class SCIMProviderUserSerializer(ModelSerializer): | |||||||
|             "user", |             "user", | ||||||
|             "user_obj", |             "user_obj", | ||||||
|             "provider", |             "provider", | ||||||
|             "attributes", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = {"attributes": {"read_only": True}} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SCIMProviderUserViewSet( | class SCIMProviderUserViewSet( | ||||||
|  | |||||||
| @ -12,9 +12,8 @@ from authentik.lib.sync.outgoing import ( | |||||||
|     HTTP_SERVICE_UNAVAILABLE, |     HTTP_SERVICE_UNAVAILABLE, | ||||||
|     HTTP_TOO_MANY_REQUESTS, |     HTTP_TOO_MANY_REQUESTS, | ||||||
| ) | ) | ||||||
| from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient | from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient | ||||||
| from authentik.lib.sync.outgoing.exceptions import ( | from authentik.lib.sync.outgoing.exceptions import ( | ||||||
|     DryRunRejected, |  | ||||||
|     NotFoundSyncException, |     NotFoundSyncException, | ||||||
|     ObjectExistsSyncException, |     ObjectExistsSyncException, | ||||||
|     TransientSyncException, |     TransientSyncException, | ||||||
| @ -22,7 +21,7 @@ from authentik.lib.sync.outgoing.exceptions import ( | |||||||
| from authentik.lib.utils.http import get_http_session | from authentik.lib.utils.http import get_http_session | ||||||
| from authentik.providers.scim.clients.exceptions import SCIMRequestException | from authentik.providers.scim.clients.exceptions import SCIMRequestException | ||||||
| from authentik.providers.scim.clients.schema import ServiceProviderConfiguration | from authentik.providers.scim.clients.schema import ServiceProviderConfiguration | ||||||
| from authentik.providers.scim.models import SCIMCompatibilityMode, SCIMProvider | from authentik.providers.scim.models import SCIMProvider | ||||||
|  |  | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from django.db.models import Model |     from django.db.models import Model | ||||||
| @ -55,8 +54,6 @@ class SCIMClient[TModel: "Model", TConnection: "Model", TSchema: "BaseModel"]( | |||||||
|  |  | ||||||
|     def _request(self, method: str, path: str, **kwargs) -> dict: |     def _request(self, method: str, path: str, **kwargs) -> dict: | ||||||
|         """Wrapper to send a request to the full URL""" |         """Wrapper to send a request to the full URL""" | ||||||
|         if self.provider.dry_run and method.upper() not in SAFE_METHODS: |  | ||||||
|             raise DryRunRejected(f"{self.base_url}{path}", method, body=kwargs.get("json")) |  | ||||||
|         try: |         try: | ||||||
|             response = self._session.request( |             response = self._session.request( | ||||||
|                 method, |                 method, | ||||||
| @ -90,14 +87,9 @@ class SCIMClient[TModel: "Model", TConnection: "Model", TSchema: "BaseModel"]( | |||||||
|         """Get Service provider config""" |         """Get Service provider config""" | ||||||
|         default_config = ServiceProviderConfiguration.default() |         default_config = ServiceProviderConfiguration.default() | ||||||
|         try: |         try: | ||||||
|             config = ServiceProviderConfiguration.model_validate( |             return ServiceProviderConfiguration.model_validate( | ||||||
|                 self._request("GET", "/ServiceProviderConfig") |                 self._request("GET", "/ServiceProviderConfig") | ||||||
|             ) |             ) | ||||||
|             if self.provider.compatibility_mode == SCIMCompatibilityMode.AWS: |  | ||||||
|                 config.patch.supported = False |  | ||||||
|             if self.provider.compatibility_mode == SCIMCompatibilityMode.SLACK: |  | ||||||
|                 config.filter.supported = True |  | ||||||
|             return config |  | ||||||
|         except (ValidationError, SCIMRequestException, NotFoundSyncException) as exc: |         except (ValidationError, SCIMRequestException, NotFoundSyncException) as exc: | ||||||
|             self.logger.warning("failed to get ServiceProviderConfig", exc=exc) |             self.logger.warning("failed to get ServiceProviderConfig", exc=exc) | ||||||
|             return default_config |             return default_config | ||||||
|  | |||||||
| @ -102,7 +102,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]): | |||||||
|         if not scim_id or scim_id == "": |         if not scim_id or scim_id == "": | ||||||
|             raise StopSync("SCIM Response with missing or invalid `id`") |             raise StopSync("SCIM Response with missing or invalid `id`") | ||||||
|         connection = SCIMProviderGroup.objects.create( |         connection = SCIMProviderGroup.objects.create( | ||||||
|             provider=self.provider, group=group, scim_id=scim_id, attributes=response |             provider=self.provider, group=group, scim_id=scim_id | ||||||
|         ) |         ) | ||||||
|         users = list(group.users.order_by("id").values_list("id", flat=True)) |         users = list(group.users.order_by("id").values_list("id", flat=True)) | ||||||
|         self._patch_add_users(connection, users) |         self._patch_add_users(connection, users) | ||||||
| @ -243,10 +243,9 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]): | |||||||
|             if user.value not in users_should: |             if user.value not in users_should: | ||||||
|                 users_to_remove.append(user.value) |                 users_to_remove.append(user.value) | ||||||
|         # Check users that should be in the group and add them |         # Check users that should be in the group and add them | ||||||
|         if current_group.members is not None: |         for user in users_should: | ||||||
|             for user in users_should: |             if len([x for x in current_group.members if x.value == user]) < 1: | ||||||
|                 if len([x for x in current_group.members if x.value == user]) < 1: |                 users_to_add.append(user) | ||||||
|                     users_to_add.append(user) |  | ||||||
|         # Only send request if we need to make changes |         # Only send request if we need to make changes | ||||||
|         if len(users_to_add) < 1 and len(users_to_remove) < 1: |         if len(users_to_add) < 1 and len(users_to_remove) < 1: | ||||||
|             return |             return | ||||||
|  | |||||||
| @ -1,12 +1,10 @@ | |||||||
| """User client""" | """User client""" | ||||||
|  |  | ||||||
| from django.db import transaction |  | ||||||
| from django.utils.http import urlencode |  | ||||||
| from pydantic import ValidationError | from pydantic import ValidationError | ||||||
|  |  | ||||||
| from authentik.core.models import User | from authentik.core.models import User | ||||||
| from authentik.lib.sync.mapper import PropertyMappingManager | from authentik.lib.sync.mapper import PropertyMappingManager | ||||||
| from authentik.lib.sync.outgoing.exceptions import ObjectExistsSyncException, StopSync | from authentik.lib.sync.outgoing.exceptions import StopSync | ||||||
| from authentik.policies.utils import delete_none_values | from authentik.policies.utils import delete_none_values | ||||||
| from authentik.providers.scim.clients.base import SCIMClient | from authentik.providers.scim.clients.base import SCIMClient | ||||||
| from authentik.providers.scim.clients.schema import SCIM_USER_SCHEMA | from authentik.providers.scim.clients.schema import SCIM_USER_SCHEMA | ||||||
| @ -57,44 +55,24 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]): | |||||||
|     def create(self, user: User): |     def create(self, user: User): | ||||||
|         """Create user from scratch and create a connection object""" |         """Create user from scratch and create a connection object""" | ||||||
|         scim_user = self.to_schema(user, None) |         scim_user = self.to_schema(user, None) | ||||||
|         with transaction.atomic(): |         response = self._request( | ||||||
|             try: |             "POST", | ||||||
|                 response = self._request( |             "/Users", | ||||||
|                     "POST", |             json=scim_user.model_dump( | ||||||
|                     "/Users", |                 mode="json", | ||||||
|                     json=scim_user.model_dump( |                 exclude_unset=True, | ||||||
|                         mode="json", |             ), | ||||||
|                         exclude_unset=True, |         ) | ||||||
|                     ), |         scim_id = response.get("id") | ||||||
|                 ) |         if not scim_id or scim_id == "": | ||||||
|             except ObjectExistsSyncException as exc: |             raise StopSync("SCIM Response with missing or invalid `id`") | ||||||
|                 if not self._config.filter.supported: |         return SCIMProviderUser.objects.create(provider=self.provider, user=user, scim_id=scim_id) | ||||||
|                     raise exc |  | ||||||
|                 users = self._request( |  | ||||||
|                     "GET", f"/Users?{urlencode({'filter': f'userName eq {scim_user.userName}'})}" |  | ||||||
|                 ) |  | ||||||
|                 users_res = users.get("Resources", []) |  | ||||||
|                 if len(users_res) < 1: |  | ||||||
|                     raise exc |  | ||||||
|                 return SCIMProviderUser.objects.create( |  | ||||||
|                     provider=self.provider, |  | ||||||
|                     user=user, |  | ||||||
|                     scim_id=users_res[0]["id"], |  | ||||||
|                     attributes=users_res[0], |  | ||||||
|                 ) |  | ||||||
|             else: |  | ||||||
|                 scim_id = response.get("id") |  | ||||||
|                 if not scim_id or scim_id == "": |  | ||||||
|                     raise StopSync("SCIM Response with missing or invalid `id`") |  | ||||||
|                 return SCIMProviderUser.objects.create( |  | ||||||
|                     provider=self.provider, user=user, scim_id=scim_id, attributes=response |  | ||||||
|                 ) |  | ||||||
|  |  | ||||||
|     def update(self, user: User, connection: SCIMProviderUser): |     def update(self, user: User, connection: SCIMProviderUser): | ||||||
|         """Update existing user""" |         """Update existing user""" | ||||||
|         scim_user = self.to_schema(user, connection) |         scim_user = self.to_schema(user, connection) | ||||||
|         scim_user.id = connection.scim_id |         scim_user.id = connection.scim_id | ||||||
|         response = self._request( |         self._request( | ||||||
|             "PUT", |             "PUT", | ||||||
|             f"/Users/{connection.scim_id}", |             f"/Users/{connection.scim_id}", | ||||||
|             json=scim_user.model_dump( |             json=scim_user.model_dump( | ||||||
| @ -102,5 +80,3 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]): | |||||||
|                 exclude_unset=True, |                 exclude_unset=True, | ||||||
|             ), |             ), | ||||||
|         ) |         ) | ||||||
|         connection.attributes = response |  | ||||||
|         connection.save() |  | ||||||
|  | |||||||
| @ -1,21 +0,0 @@ | |||||||
| # Generated by Django 5.0.12 on 2025-02-24 19:43 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_providers_scim", "0010_scimprovider_verify_certificates"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="scimprovider", |  | ||||||
|             name="dry_run", |  | ||||||
|             field=models.BooleanField( |  | ||||||
|                 default=False, |  | ||||||
|                 help_text="When enabled, provider will not modify or create objects in the remote system.", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	