Compare commits
	
		
			1 Commits
		
	
	
		
			website/do
			...
			safari-loc
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 9deed34479 | 
| @ -1,5 +1,5 @@ | |||||||
| [bumpversion] | [bumpversion] | ||||||
| current_version = 2025.6.0 | current_version = 2025.2.4 | ||||||
| tag = True | tag = True | ||||||
| commit = True | commit = True | ||||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -36,7 +36,7 @@ runs: | |||||||
|       with: |       with: | ||||||
|         go-version-file: "go.mod" |         go-version-file: "go.mod" | ||||||
|     - name: Setup docker cache |     - name: Setup docker cache | ||||||
|       uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 |       uses: ScribeMD/docker-cache@0.5.0 | ||||||
|       with: |       with: | ||||||
|         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} |         key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} | ||||||
|     - name: Setup dependencies |     - name: Setup dependencies | ||||||
|  | |||||||
							
								
								
									
										26
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										26
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -23,13 +23,7 @@ updates: | |||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directories: |     directories: | ||||||
|       - "/web" |       - "/web" | ||||||
|       - "/web/packages/sfe" |       - "/web/sfe" | ||||||
|       - "/web/packages/core" |  | ||||||
|       - "/web/packages/esbuild-plugin-live-reload" |  | ||||||
|       - "/packages/prettier-config" |  | ||||||
|       - "/packages/tsconfig" |  | ||||||
|       - "/packages/docusaurus-config" |  | ||||||
|       - "/packages/eslint-config" |  | ||||||
|     schedule: |     schedule: | ||||||
|       interval: daily |       interval: daily | ||||||
|       time: "04:00" |       time: "04:00" | ||||||
| @ -74,9 +68,6 @@ updates: | |||||||
|       wdio: |       wdio: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@wdio/*" |           - "@wdio/*" | ||||||
|       goauthentik: |  | ||||||
|         patterns: |  | ||||||
|           - "@goauthentik/*" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/website" |     directory: "/website" | ||||||
|     schedule: |     schedule: | ||||||
| @ -97,9 +88,6 @@ updates: | |||||||
|           - "swc-*" |           - "swc-*" | ||||||
|           - "lightningcss*" |           - "lightningcss*" | ||||||
|           - "@rspack/binding*" |           - "@rspack/binding*" | ||||||
|       goauthentik: |  | ||||||
|         patterns: |  | ||||||
|           - "@goauthentik/*" |  | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directory: "/lifecycle/aws" |     directory: "/lifecycle/aws" | ||||||
|     schedule: |     schedule: | ||||||
| @ -130,15 +118,3 @@ updates: | |||||||
|       prefix: "core:" |       prefix: "core:" | ||||||
|     labels: |     labels: | ||||||
|       - dependencies |       - dependencies | ||||||
|   - package-ecosystem: docker-compose |  | ||||||
|     directories: |  | ||||||
|       # - /scripts # Maybe |  | ||||||
|       - /tests/e2e |  | ||||||
|     schedule: |  | ||||||
|       interval: daily |  | ||||||
|       time: "04:00" |  | ||||||
|     open-pull-requests-limit: 10 |  | ||||||
|     commit-message: |  | ||||||
|       prefix: "core:" |  | ||||||
|     labels: |  | ||||||
|       - dependencies |  | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | |||||||
|           signoff: true |           signoff: true | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |           # ID from https://api.github.com/users/authentik-automation[bot] | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										17
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										17
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -62,7 +62,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -71,18 +70,22 @@ jobs: | |||||||
|       - name: checkout stable |       - name: checkout stable | ||||||
|         run: | |         run: | | ||||||
|           # Copy current, latest config to local |           # Copy current, latest config to local | ||||||
|  |           # Temporarly comment the .github backup while migrating to uv | ||||||
|           cp authentik/lib/default.yml local.env.yml |           cp authentik/lib/default.yml local.env.yml | ||||||
|           cp -R .github .. |           # cp -R .github .. | ||||||
|           cp -R scripts .. |           cp -R scripts .. | ||||||
|           git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) |           git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) | ||||||
|           rm -rf .github/ scripts/ |           # rm -rf .github/ scripts/ | ||||||
|           mv ../.github ../scripts . |           # mv ../.github ../scripts . | ||||||
|  |           rm -rf scripts/ | ||||||
|  |           mv ../scripts . | ||||||
|       - name: Setup authentik env (stable) |       - name: Setup authentik env (stable) | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|         with: |         with: | ||||||
|           postgresql_version: ${{ matrix.psql }} |           postgresql_version: ${{ matrix.psql }} | ||||||
|  |         continue-on-error: true | ||||||
|       - name: run migrations to stable |       - name: run migrations to stable | ||||||
|         run: uv run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|       - name: checkout current code |       - name: checkout current code | ||||||
|         run: | |         run: | | ||||||
|           set -x |           set -x | ||||||
| @ -117,7 +120,6 @@ jobs: | |||||||
|         psql: |         psql: | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|           - 17-alpine |  | ||||||
|         run_id: [1, 2, 3, 4, 5] |         run_id: [1, 2, 3, 4, 5] | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -202,7 +204,7 @@ jobs: | |||||||
|         uses: actions/cache@v4 |         uses: actions/cache@v4 | ||||||
|         with: |         with: | ||||||
|           path: web/dist |           path: web/dist | ||||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b |           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }} | ||||||
|       - name: prepare web ui |       - name: prepare web ui | ||||||
|         if: steps.cache-web.outputs.cache-hit != 'true' |         if: steps.cache-web.outputs.cache-hit != 'true' | ||||||
|         working-directory: web |         working-directory: web | ||||||
| @ -210,7 +212,6 @@ jobs: | |||||||
|           npm ci |           npm ci | ||||||
|           make -C .. gen-client-ts |           make -C .. gen-client-ts | ||||||
|           npm run build |           npm run build | ||||||
|           npm run build:sfe |  | ||||||
|       - name: run e2e |       - name: run e2e | ||||||
|         run: | |         run: | | ||||||
|           uv run coverage run manage.py test ${{ matrix.job.glob }} |           uv run coverage run manage.py test ${{ matrix.job.glob }} | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -29,7 +29,7 @@ jobs: | |||||||
|       - name: Generate API |       - name: Generate API | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: golangci-lint |       - name: golangci-lint | ||||||
|         uses: golangci/golangci-lint-action@v8 |         uses: golangci/golangci-lint-action@v7 | ||||||
|         with: |         with: | ||||||
|           version: latest |           version: latest | ||||||
|           args: --timeout 5000s --verbose |           args: --timeout 5000s --verbose | ||||||
|  | |||||||
| @ -37,7 +37,6 @@ jobs: | |||||||
|           signoff: true |           signoff: true | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |           # ID from https://api.github.com/users/authentik-automation[bot] | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -53,7 +53,6 @@ jobs: | |||||||
|           body: ${{ steps.compress.outputs.markdown }} |           body: ${{ steps.compress.outputs.markdown }} | ||||||
|           delete-branch: true |           delete-branch: true | ||||||
|           signoff: true |           signoff: true | ||||||
|           labels: dependencies |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" |         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										24
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										24
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -3,11 +3,10 @@ on: | |||||||
|   push: |   push: | ||||||
|     branches: [main] |     branches: [main] | ||||||
|     paths: |     paths: | ||||||
|       - packages/docusaurus-config/** |       - packages/docusaurus-config | ||||||
|       - packages/eslint-config/** |       - packages/eslint-config | ||||||
|       - packages/prettier-config/** |       - packages/prettier-config | ||||||
|       - packages/tsconfig/** |       - packages/tsconfig | ||||||
|       - web/packages/esbuild-plugin-live-reload/** |  | ||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
| jobs: | jobs: | ||||||
|   publish: |   publish: | ||||||
| @ -17,28 +16,27 @@ jobs: | |||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         package: |         package: | ||||||
|           - packages/docusaurus-config |           - docusaurus-config | ||||||
|           - packages/eslint-config |           - eslint-config | ||||||
|           - packages/prettier-config |           - prettier-config | ||||||
|           - packages/tsconfig |           - tsconfig | ||||||
|           - web/packages/esbuild-plugin-live-reload |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|         with: |         with: | ||||||
|           fetch-depth: 2 |           fetch-depth: 2 | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
|         with: |         with: | ||||||
|           node-version-file: ${{ matrix.package }}/package.json |           node-version-file: packages/${{ matrix.package }}/package.json | ||||||
|           registry-url: "https://registry.npmjs.org" |           registry-url: "https://registry.npmjs.org" | ||||||
|       - name: Get changed files |       - name: Get changed files | ||||||
|         id: changed-files |         id: changed-files | ||||||
|         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c |         uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c | ||||||
|         with: |         with: | ||||||
|           files: | |           files: | | ||||||
|             ${{ matrix.package }}/package.json |             packages/${{ matrix.package }}/package.json | ||||||
|       - name: Publish package |       - name: Publish package | ||||||
|         if: steps.changed-files.outputs.any_changed == 'true' |         if: steps.changed-files.outputs.any_changed == 'true' | ||||||
|         working-directory: ${{ matrix.package }} |         working-directory: packages/${{ matrix.package}} | ||||||
|         run: | |         run: | | ||||||
|           npm ci |           npm ci | ||||||
|           npm run build |           npm run build | ||||||
|  | |||||||
| @ -52,6 +52,3 @@ jobs: | |||||||
|           body: "core, web: update translations" |           body: "core, web: update translations" | ||||||
|           delete-branch: true |           delete-branch: true | ||||||
|           signoff: true |           signoff: true | ||||||
|           labels: dependencies |  | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |  | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |  | ||||||
|  | |||||||
							
								
								
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							| @ -15,7 +15,6 @@ jobs: | |||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} |     if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - id: generate_token |       - id: generate_token | ||||||
|         uses: tibdex/github-app-token@v2 |         uses: tibdex/github-app-token@v2 | ||||||
|         with: |         with: | ||||||
| @ -26,13 +25,23 @@ jobs: | |||||||
|         env: |         env: | ||||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} |           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||||
|         run: | |         run: | | ||||||
|           title=$(gh pr view ${{ github.event.pull_request.number }} --json  "title" -q ".title") |           title=$(curl -q -L \ | ||||||
|  |             -H "Accept: application/vnd.github+json" \ | ||||||
|  |             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||||
|  |             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||||
|  |             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title) | ||||||
|           echo "title=${title}" >> "$GITHUB_OUTPUT" |           echo "title=${title}" >> "$GITHUB_OUTPUT" | ||||||
|       - name: Rename |       - name: Rename | ||||||
|         env: |         env: | ||||||
|           GH_TOKEN: ${{ steps.generate_token.outputs.token }} |           GH_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||||
|         run: | |         run: | | ||||||
|           gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies |           curl -L \ | ||||||
|  |             -X PATCH \ | ||||||
|  |             -H "Accept: application/vnd.github+json" \ | ||||||
|  |             -H "Authorization: Bearer ${GH_TOKEN}" \ | ||||||
|  |             -H "X-GitHub-Api-Version: 2022-11-28" \ | ||||||
|  |             https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \ | ||||||
|  |             -d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}" | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										6
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -16,7 +16,7 @@ | |||||||
|     ], |     ], | ||||||
|     "typescript.preferences.importModuleSpecifier": "non-relative", |     "typescript.preferences.importModuleSpecifier": "non-relative", | ||||||
|     "typescript.preferences.importModuleSpecifierEnding": "index", |     "typescript.preferences.importModuleSpecifierEnding": "index", | ||||||
|     "typescript.tsdk": "./node_modules/typescript/lib", |     "typescript.tsdk": "./web/node_modules/typescript/lib", | ||||||
|     "typescript.enablePromptUseWorkspaceTsdk": true, |     "typescript.enablePromptUseWorkspaceTsdk": true, | ||||||
|     "yaml.schemas": { |     "yaml.schemas": { | ||||||
|         "./blueprints/schema.json": "blueprints/**/*.yaml" |         "./blueprints/schema.json": "blueprints/**/*.yaml" | ||||||
| @ -30,5 +30,7 @@ | |||||||
|         } |         } | ||||||
|     ], |     ], | ||||||
|     "go.testFlags": ["-count=1"], |     "go.testFlags": ["-count=1"], | ||||||
|     "github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"] |     "github-actions.workflows.pinned.workflows": [ | ||||||
|  |         ".github/workflows/ci-main.yml" | ||||||
|  |     ] | ||||||
| } | } | ||||||
|  | |||||||
							
								
								
									
										14
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,7 @@ | |||||||
| # syntax=docker/dockerfile:1 | # syntax=docker/dockerfile:1 | ||||||
|  |  | ||||||
| # Stage 1: Build website | # Stage 1: Build website | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS website-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | ||||||
|  |  | ||||||
| ENV NODE_ENV=production | ENV NODE_ENV=production | ||||||
|  |  | ||||||
| @ -20,7 +20,7 @@ COPY ./SECURITY.md /work/ | |||||||
| RUN npm run build-bundled | RUN npm run build-bundled | ||||||
|  |  | ||||||
| # Stage 2: Build webui | # Stage 2: Build webui | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS web-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | ||||||
|  |  | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||||
| @ -40,8 +40,7 @@ COPY ./web /work/web/ | |||||||
| COPY ./website /work/website/ | COPY ./website /work/website/ | ||||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||||
|  |  | ||||||
| RUN npm run build && \ | RUN npm run build | ||||||
|     npm run build:sfe |  | ||||||
|  |  | ||||||
| # Stage 3: Build go proxy | # Stage 3: Build go proxy | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder | ||||||
| @ -86,17 +85,18 @@ FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip | |||||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||||
| ENV GEOIPUPDATE_VERBOSE="1" | ENV GEOIPUPDATE_VERBOSE="1" | ||||||
| ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ||||||
|  | ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY" | ||||||
|  |  | ||||||
| USER root | USER root | ||||||
| RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||||
|     --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ |     --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ | ||||||
|     mkdir -p /usr/share/GeoIP && \ |     mkdir -p /usr/share/GeoIP && \ | ||||||
|     /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" |     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||||
|  |  | ||||||
| # Stage 5: Download uv | # Stage 5: Download uv | ||||||
| FROM ghcr.io/astral-sh/uv:0.7.10 AS uv | FROM ghcr.io/astral-sh/uv:0.6.16 AS uv | ||||||
| # Stage 6: Base python image | # Stage 6: Base python image | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base | FROM ghcr.io/goauthentik/fips-python:3.12.10-slim-bookworm-fips AS python-base | ||||||
|  |  | ||||||
| ENV VENV_PATH="/ak-root/.venv" \ | ENV VENV_PATH="/ak-root/.venv" \ | ||||||
|     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ |     PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ | ||||||
|  | |||||||
							
								
								
									
										51
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										51
									
								
								Makefile
									
									
									
									
									
								
							| @ -1,7 +1,6 @@ | |||||||
| .PHONY: gen dev-reset all clean test web website | .PHONY: gen dev-reset all clean test web website | ||||||
|  |  | ||||||
| SHELL := /usr/bin/env bash | .SHELLFLAGS += ${SHELLFLAGS} -e | ||||||
| .SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail |  | ||||||
| PWD = $(shell pwd) | PWD = $(shell pwd) | ||||||
| UID = $(shell id -u) | UID = $(shell id -u) | ||||||
| GID = $(shell id -g) | GID = $(shell id -g) | ||||||
| @ -9,9 +8,9 @@ NPM_VERSION = $(shell python -m scripts.generate_semver) | |||||||
| PY_SOURCES = authentik tests scripts lifecycle .github | PY_SOURCES = authentik tests scripts lifecycle .github | ||||||
| DOCKER_IMAGE ?= "authentik:test" | DOCKER_IMAGE ?= "authentik:test" | ||||||
|  |  | ||||||
| GEN_API_TS = gen-ts-api | GEN_API_TS = "gen-ts-api" | ||||||
| GEN_API_PY = gen-py-api | GEN_API_PY = "gen-py-api" | ||||||
| GEN_API_GO = gen-go-api | GEN_API_GO = "gen-go-api" | ||||||
|  |  | ||||||
| pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||||
| pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||||
| @ -118,19 +117,14 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a | |||||||
| 	npx prettier --write diff.md | 	npx prettier --write diff.md | ||||||
|  |  | ||||||
| gen-clean-ts:  ## Remove generated API client for Typescript | gen-clean-ts:  ## Remove generated API client for Typescript | ||||||
| 	rm -rf ${PWD}/${GEN_API_TS}/ | 	rm -rf ./${GEN_API_TS}/ | ||||||
| 	rm -rf ${PWD}/web/node_modules/@goauthentik/api/ | 	rm -rf ./web/node_modules/@goauthentik/api/ | ||||||
|  |  | ||||||
| gen-clean-go:  ## Remove generated API client for Go | gen-clean-go:  ## Remove generated API client for Go | ||||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | 	rm -rf ./${GEN_API_GO}/ | ||||||
| ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) |  | ||||||
| 	make -C ${PWD}/${GEN_API_GO} clean |  | ||||||
| else |  | ||||||
| 	rm -rf ${PWD}/${GEN_API_GO} |  | ||||||
| endif |  | ||||||
|  |  | ||||||
| gen-clean-py:  ## Remove generated API client for Python | gen-clean-py:  ## Remove generated API client for Python | ||||||
| 	rm -rf ${PWD}/${GEN_API_PY}/ | 	rm -rf ./${GEN_API_PY}/ | ||||||
|  |  | ||||||
| gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | ||||||
|  |  | ||||||
| @ -147,8 +141,8 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | |||||||
| 		--git-repo-id authentik \ | 		--git-repo-id authentik \ | ||||||
| 		--git-user-id goauthentik | 		--git-user-id goauthentik | ||||||
| 	mkdir -p web/node_modules/@goauthentik/api | 	mkdir -p web/node_modules/@goauthentik/api | ||||||
| 	cd ${PWD}/${GEN_API_TS} && npm i | 	cd ./${GEN_API_TS} && npm i | ||||||
| 	\cp -rf ${PWD}/${GEN_API_TS}/* web/node_modules/@goauthentik/api | 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||||
|  |  | ||||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python | gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||||
| 	docker run \ | 	docker run \ | ||||||
| @ -162,17 +156,24 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python | |||||||
| 		--additional-properties=packageVersion=${NPM_VERSION} \ | 		--additional-properties=packageVersion=${NPM_VERSION} \ | ||||||
| 		--git-repo-id authentik \ | 		--git-repo-id authentik \ | ||||||
| 		--git-user-id goauthentik | 		--git-user-id goauthentik | ||||||
|  | 	pip install ./${GEN_API_PY} | ||||||
|  |  | ||||||
| gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||||
| 	mkdir -p ${PWD}/${GEN_API_GO} | 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | ||||||
| ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | ||||||
| 	git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO} | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache | ||||||
| else | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache | ||||||
| 	cd ${PWD}/${GEN_API_GO} && git pull | 	cp schema.yml ./${GEN_API_GO}/ | ||||||
| endif | 	docker run \ | ||||||
| 	cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO} | 		--rm -v ${PWD}/${GEN_API_GO}:/local \ | ||||||
| 	make -C ${PWD}/${GEN_API_GO} build | 		--user ${UID}:${GID} \ | ||||||
|  | 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||||
|  | 		-i /local/schema.yml \ | ||||||
|  | 		-g go \ | ||||||
|  | 		-o /local/ \ | ||||||
|  | 		-c /local/config.yaml | ||||||
| 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | ||||||
|  | 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | ||||||
|  |  | ||||||
| gen-dev-config:  ## Generate a local development config file | gen-dev-config:  ## Generate a local development config file | ||||||
| 	uv run scripts/generate_config.py | 	uv run scripts/generate_config.py | ||||||
| @ -243,7 +244,7 @@ docker:  ## Build a docker image of the current source tree | |||||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||||
|  |  | ||||||
| test-docker: | test-docker: | ||||||
| 	BUILD=true ${PWD}/scripts/test_docker.sh | 	BUILD=true ./scripts/test_docker.sh | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| ## CI | ## CI | ||||||
|  | |||||||
| @ -42,4 +42,4 @@ See [SECURITY.md](SECURITY.md) | |||||||
|  |  | ||||||
| ## Adoption and Contributions | ## Adoption and Contributions | ||||||
|  |  | ||||||
| Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github). | Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md). | ||||||
|  | |||||||
| @ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | |||||||
|  |  | ||||||
| | Version   | Supported | | | Version   | Supported | | ||||||
| | --------- | --------- | | | --------- | --------- | | ||||||
| | 2025.4.x  | ✅        | | | 2024.12.x | ✅        | | ||||||
| | 2025.6.x  | ✅        | | | 2025.2.x  | ✅        | | ||||||
|  |  | ||||||
| ## Reporting a Vulnerability | ## Reporting a Vulnerability | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
|  |  | ||||||
| from os import environ | from os import environ | ||||||
|  |  | ||||||
| __version__ = "2025.6.0" | __version__ = "2025.2.4" | ||||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,12 +1,9 @@ | |||||||
| """API Authentication""" | """API Authentication""" | ||||||
|  |  | ||||||
| from hmac import compare_digest | from hmac import compare_digest | ||||||
| from pathlib import Path |  | ||||||
| from tempfile import gettempdir |  | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from django.contrib.auth.models import AnonymousUser |  | ||||||
| from drf_spectacular.extensions import OpenApiAuthenticationExtension | from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||||
| from rest_framework.exceptions import AuthenticationFailed | from rest_framework.exceptions import AuthenticationFailed | ||||||
| @ -14,17 +11,11 @@ from rest_framework.request import Request | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.core.middleware import CTX_AUTH_VIA | from authentik.core.middleware import CTX_AUTH_VIA | ||||||
| from authentik.core.models import Token, TokenIntents, User, UserTypes | from authentik.core.models import Token, TokenIntents, User | ||||||
| from authentik.outposts.models import Outpost | from authentik.outposts.models import Outpost | ||||||
| from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| _tmp = Path(gettempdir()) |  | ||||||
| try: |  | ||||||
|     with open(_tmp / "authentik-core-ipc.key") as _f: |  | ||||||
|         ipc_key = _f.read() |  | ||||||
| except OSError: |  | ||||||
|     ipc_key = None |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def validate_auth(header: bytes) -> str | None: | def validate_auth(header: bytes) -> str | None: | ||||||
| @ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None: | |||||||
|     if user: |     if user: | ||||||
|         CTX_AUTH_VIA.set("secret_key") |         CTX_AUTH_VIA.set("secret_key") | ||||||
|         return user |         return user | ||||||
|     # then try to auth via secret key (for embedded outpost/etc) |  | ||||||
|     user = token_ipc(auth_credentials) |  | ||||||
|     if user: |  | ||||||
|         CTX_AUTH_VIA.set("ipc") |  | ||||||
|         return user |  | ||||||
|     raise AuthenticationFailed("Token invalid/expired") |     raise AuthenticationFailed("Token invalid/expired") | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None: | |||||||
|     return outpost.user |     return outpost.user | ||||||
|  |  | ||||||
|  |  | ||||||
| class IPCUser(AnonymousUser): |  | ||||||
|     """'Virtual' user for IPC communication between authentik core and the authentik router""" |  | ||||||
|  |  | ||||||
|     username = "authentik:system" |  | ||||||
|     is_active = True |  | ||||||
|     is_superuser = True |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def type(self): |  | ||||||
|         return UserTypes.INTERNAL_SERVICE_ACCOUNT |  | ||||||
|  |  | ||||||
|     def has_perm(self, perm, obj=None): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def has_perms(self, perm_list, obj=None): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def has_module_perms(self, module): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_anonymous(self): |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_authenticated(self): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def token_ipc(value: str) -> User | None: |  | ||||||
|     """Check if the token is the secret key |  | ||||||
|     and return the service account for the managed outpost""" |  | ||||||
|     if not ipc_key or not compare_digest(value, ipc_key): |  | ||||||
|         return None |  | ||||||
|     return IPCUser() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TokenAuthentication(BaseAuthentication): | class TokenAuthentication(BaseAuthentication): | ||||||
|     """Token-based authentication using HTTP Bearer authentication""" |     """Token-based authentication using HTTP Bearer authentication""" | ||||||
|  |  | ||||||
|  | |||||||
| @ -54,7 +54,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom | |||||||
|     return component |     return component | ||||||
|  |  | ||||||
|  |  | ||||||
| def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): | def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):  # noqa: W0613 | ||||||
|     """Workaround to set a default response for endpoints. |     """Workaround to set a default response for endpoints. | ||||||
|     Workaround suggested at |     Workaround suggested at | ||||||
|     <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> |     <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> | ||||||
|  | |||||||
| @ -164,7 +164,9 @@ class BlueprintEntry: | |||||||
|         """Get the blueprint model, with yaml tags resolved if present""" |         """Get the blueprint model, with yaml tags resolved if present""" | ||||||
|         return str(self.tag_resolver(self.model, blueprint)) |         return str(self.tag_resolver(self.model, blueprint)) | ||||||
|  |  | ||||||
|     def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]: |     def get_permissions( | ||||||
|  |         self, blueprint: "Blueprint" | ||||||
|  |     ) -> Generator[BlueprintEntryPermission, None, None]: | ||||||
|         """Get permissions of this entry, with all yaml tags resolved""" |         """Get permissions of this entry, with all yaml tags resolved""" | ||||||
|         for perm in self.permissions: |         for perm in self.permissions: | ||||||
|             yield BlueprintEntryPermission( |             yield BlueprintEntryPermission( | ||||||
|  | |||||||
| @ -59,7 +59,6 @@ class BrandSerializer(ModelSerializer): | |||||||
|             "flow_device_code", |             "flow_device_code", | ||||||
|             "default_application", |             "default_application", | ||||||
|             "web_certificate", |             "web_certificate", | ||||||
|             "client_certificates", |  | ||||||
|             "attributes", |             "attributes", | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
| @ -121,7 +120,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "domain", |         "domain", | ||||||
|         "branding_title", |         "branding_title", | ||||||
|         "web_certificate__name", |         "web_certificate__name", | ||||||
|         "client_certificates__name", |  | ||||||
|     ] |     ] | ||||||
|     filterset_fields = [ |     filterset_fields = [ | ||||||
|         "brand_uuid", |         "brand_uuid", | ||||||
| @ -138,7 +136,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "flow_user_settings", |         "flow_user_settings", | ||||||
|         "flow_device_code", |         "flow_device_code", | ||||||
|         "web_certificate", |         "web_certificate", | ||||||
|         "client_certificates", |  | ||||||
|     ] |     ] | ||||||
|     ordering = ["domain"] |     ordering = ["domain"] | ||||||
|  |  | ||||||
|  | |||||||
| @ -16,7 +16,7 @@ def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | |||||||
|     if not path.exists(): |     if not path.exists(): | ||||||
|         return |         return | ||||||
|     css = path.read_text() |     css = path.read_text() | ||||||
|     Brand.objects.using(db_alias).all().update(branding_custom_css=css) |     Brand.objects.using(db_alias).update(branding_custom_css=css) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  | |||||||
| @ -1,37 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-19 15:09 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0009_brand_branding_default_flow_background"), |  | ||||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="client_certificates", |  | ||||||
|             field=models.ManyToManyField( |  | ||||||
|                 blank=True, |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Certificates used for client authentication.", |  | ||||||
|                 to="authentik_crypto.certificatekeypair", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="web_certificate", |  | ||||||
|             field=models.ForeignKey( |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Web Certificate used by the authentik Core webserver.", |  | ||||||
|                 null=True, |  | ||||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, |  | ||||||
|                 related_name="+", |  | ||||||
|                 to="authentik_crypto.certificatekeypair", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -73,13 +73,6 @@ class Brand(SerializerModel): | |||||||
|         default=None, |         default=None, | ||||||
|         on_delete=models.SET_DEFAULT, |         on_delete=models.SET_DEFAULT, | ||||||
|         help_text=_("Web Certificate used by the authentik Core webserver."), |         help_text=_("Web Certificate used by the authentik Core webserver."), | ||||||
|         related_name="+", |  | ||||||
|     ) |  | ||||||
|     client_certificates = models.ManyToManyField( |  | ||||||
|         CertificateKeyPair, |  | ||||||
|         default=None, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_("Certificates used for client authentication."), |  | ||||||
|     ) |     ) | ||||||
|     attributes = models.JSONField(default=dict, blank=True) |     attributes = models.JSONField(default=dict, blank=True) | ||||||
|  |  | ||||||
|  | |||||||
| @ -5,10 +5,10 @@ from typing import Any | |||||||
| from django.db.models import F, Q | from django.db.models import F, Q | ||||||
| from django.db.models import Value as V | from django.db.models import Value as V | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
|  | from sentry_sdk import get_current_span | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import get_full_version | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.lib.sentry import get_http_meta |  | ||||||
| from authentik.tenants.models import Tenant | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| _q_default = Q(default=True) | _q_default = Q(default=True) | ||||||
| @ -32,9 +32,13 @@ def context_processor(request: HttpRequest) -> dict[str, Any]: | |||||||
|     """Context Processor that injects brand object into every template""" |     """Context Processor that injects brand object into every template""" | ||||||
|     brand = getattr(request, "brand", DEFAULT_BRAND) |     brand = getattr(request, "brand", DEFAULT_BRAND) | ||||||
|     tenant = getattr(request, "tenant", Tenant()) |     tenant = getattr(request, "tenant", Tenant()) | ||||||
|  |     trace = "" | ||||||
|  |     span = get_current_span() | ||||||
|  |     if span: | ||||||
|  |         trace = span.to_traceparent() | ||||||
|     return { |     return { | ||||||
|         "brand": brand, |         "brand": brand, | ||||||
|         "footer_links": tenant.footer_links, |         "footer_links": tenant.footer_links, | ||||||
|         "html_meta": {**get_http_meta()}, |         "sentry_trace": trace, | ||||||
|         "version": get_full_version(), |         "version": get_full_version(), | ||||||
|     } |     } | ||||||
|  | |||||||
| @ -99,17 +99,18 @@ class GroupSerializer(ModelSerializer): | |||||||
|             if superuser |             if superuser | ||||||
|             else "authentik_core.disable_group_superuser" |             else "authentik_core.disable_group_superuser" | ||||||
|         ) |         ) | ||||||
|         if self.instance or superuser: |         has_perm = user.has_perm(perm) | ||||||
|             has_perm = user.has_perm(perm) or user.has_perm(perm, self.instance) |         if self.instance and not has_perm: | ||||||
|             if not has_perm: |             has_perm = user.has_perm(perm, self.instance) | ||||||
|                 raise ValidationError( |         if not has_perm: | ||||||
|                     _( |             raise ValidationError( | ||||||
|                         ( |                 _( | ||||||
|                             "User does not have permission to set " |                     ( | ||||||
|                             "superuser status to {superuser_status}." |                         "User does not have permission to set " | ||||||
|                         ).format_map({"superuser_status": superuser}) |                         "superuser status to {superuser_status}." | ||||||
|                     ) |                     ).format_map({"superuser_status": superuser}) | ||||||
|                 ) |                 ) | ||||||
|  |             ) | ||||||
|         return superuser |         return superuser | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|  | |||||||
| @ -84,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN | |||||||
| from authentik.lib.avatars import get_avatar | from authentik.lib.avatars import get_avatar | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
| from authentik.rbac.models import get_permission_choices | from authentik.rbac.models import get_permission_choices | ||||||
| from authentik.stages.email.flow import pickle_flow_token_for_email |  | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.stages.email.tasks import send_mails | from authentik.stages.email.tasks import send_mails | ||||||
| from authentik.stages.email.utils import TemplateEmailMessage | from authentik.stages.email.utils import TemplateEmailMessage | ||||||
| @ -452,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def list(self, request, *args, **kwargs): |     def list(self, request, *args, **kwargs): | ||||||
|         return super().list(request, *args, **kwargs) |         return super().list(request, *args, **kwargs) | ||||||
|  |  | ||||||
|     def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: |     def _create_recovery_link(self) -> tuple[str, Token]: | ||||||
|         """Create a recovery link (when the current brand has a recovery flow set), |         """Create a recovery link (when the current brand has a recovery flow set), | ||||||
|         that can either be shown to an admin or sent to the user directly""" |         that can either be shown to an admin or sent to the user directly""" | ||||||
|         brand: Brand = self.request._request.brand |         brand: Brand = self.request._request.brand | ||||||
| @ -474,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|             raise ValidationError( |             raise ValidationError( | ||||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} |                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||||
|             ) from None |             ) from None | ||||||
|         _plan = FlowToken.pickle(plan) |  | ||||||
|         if for_email: |  | ||||||
|             _plan = pickle_flow_token_for_email(plan) |  | ||||||
|         token, __ = FlowToken.objects.update_or_create( |         token, __ = FlowToken.objects.update_or_create( | ||||||
|             identifier=f"{user.uid}-password-reset", |             identifier=f"{user.uid}-password-reset", | ||||||
|             defaults={ |             defaults={ | ||||||
|                 "user": user, |                 "user": user, | ||||||
|                 "flow": flow, |                 "flow": flow, | ||||||
|                 "_plan": _plan, |                 "_plan": FlowToken.pickle(plan), | ||||||
|                 "revoke_on_execution": not for_email, |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         querystring = urlencode({QS_KEY_TOKEN: token.key}) |         querystring = urlencode({QS_KEY_TOKEN: token.key}) | ||||||
| @ -653,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|         if for_user.email == "": |         if for_user.email == "": | ||||||
|             LOGGER.debug("User doesn't have an email address") |             LOGGER.debug("User doesn't have an email address") | ||||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) |             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||||
|         link, token = self._create_recovery_link(for_email=True) |         link, token = self._create_recovery_link() | ||||||
|         # Lookup the email stage to assure the current user can access it |         # Lookup the email stage to assure the current user can access it | ||||||
|         stages = get_objects_for_user( |         stages = get_objects_for_user( | ||||||
|             request.user, "authentik_stages_email.view_emailstage" |             request.user, "authentik_stages_email.view_emailstage" | ||||||
|  | |||||||
| @ -2,7 +2,6 @@ | |||||||
|  |  | ||||||
| from django.apps import apps | from django.apps import apps | ||||||
| from django.contrib.auth.management import create_permissions | from django.contrib.auth.management import create_permissions | ||||||
| from django.core.management import call_command |  | ||||||
| from django.core.management.base import BaseCommand, no_translations | from django.core.management.base import BaseCommand, no_translations | ||||||
| from guardian.management import create_anonymous_user | from guardian.management import create_anonymous_user | ||||||
|  |  | ||||||
| @ -17,10 +16,6 @@ class Command(BaseCommand): | |||||||
|         """Check permissions for all apps""" |         """Check permissions for all apps""" | ||||||
|         for tenant in Tenant.objects.filter(ready=True): |         for tenant in Tenant.objects.filter(ready=True): | ||||||
|             with tenant: |             with tenant: | ||||||
|                 # See https://code.djangoproject.com/ticket/28417 |  | ||||||
|                 # Remove potential lingering old permissions |  | ||||||
|                 call_command("remove_stale_contenttypes", "--no-input") |  | ||||||
|  |  | ||||||
|                 for app in apps.get_app_configs(): |                 for app in apps.get_app_configs(): | ||||||
|                     self.stdout.write(f"Checking app {app.name} ({app.label})\n") |                     self.stdout.write(f"Checking app {app.name} ({app.label})\n") | ||||||
|                     create_permissions(app, verbosity=0) |                     create_permissions(app, verbosity=0) | ||||||
|  | |||||||
| @ -31,10 +31,7 @@ class PickleSerializer: | |||||||
|  |  | ||||||
|     def loads(self, data): |     def loads(self, data): | ||||||
|         """Unpickle data to be loaded from redis""" |         """Unpickle data to be loaded from redis""" | ||||||
|         try: |         return pickle.loads(data)  # nosec | ||||||
|             return pickle.loads(data)  # nosec |  | ||||||
|         except Exception: |  | ||||||
|             return {} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _migrate_session( | def _migrate_session( | ||||||
| @ -79,7 +76,6 @@ def _migrate_session( | |||||||
|         AuthenticatedSession.objects.using(db_alias).create( |         AuthenticatedSession.objects.using(db_alias).create( | ||||||
|             session=session, |             session=session, | ||||||
|             user=old_auth_session.user, |             user=old_auth_session.user, | ||||||
|             uuid=old_auth_session.uuid, |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,103 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-14 11:15 |  | ||||||
|  |  | ||||||
| from django.apps.registry import Apps, apps as global_apps |  | ||||||
| from django.db import migrations |  | ||||||
| from django.contrib.contenttypes.management import create_contenttypes |  | ||||||
| from django.contrib.auth.management import create_permissions |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession""" |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the |  | ||||||
|     # real config for creating permissions and content types |  | ||||||
|     authentik_core_config = global_apps.get_app_config("authentik_core") |  | ||||||
|     # These are only ran by django after all migrations, but we need them right now. |  | ||||||
|     # `global_apps` is needed, |  | ||||||
|     create_permissions(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|     create_contenttypes(authentik_core_config, using=db_alias, verbosity=1) |  | ||||||
|  |  | ||||||
|     # But from now on, this is just a regular migration, so use `apps` |  | ||||||
|     Permission = apps.get_model("auth", "Permission") |  | ||||||
|     ContentType = apps.get_model("contenttypes", "ContentType") |  | ||||||
|  |  | ||||||
|     try: |  | ||||||
|         old_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="oldauthenticatedsession" |  | ||||||
|         ) |  | ||||||
|         new_ct = ContentType.objects.using(db_alias).get( |  | ||||||
|             app_label="authentik_core", model="authenticatedsession" |  | ||||||
|         ) |  | ||||||
|     except ContentType.DoesNotExist: |  | ||||||
|         # This should exist at this point, but if not, let's cut our losses |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     # Get all permissions for the old content type |  | ||||||
|     old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct) |  | ||||||
|  |  | ||||||
|     # Create equivalent permissions for the new content type |  | ||||||
|     for old_perm in old_perms: |  | ||||||
|         new_perm = ( |  | ||||||
|             Permission.objects.using(db_alias) |  | ||||||
|             .filter( |  | ||||||
|                 content_type=new_ct, |  | ||||||
|                 codename=old_perm.codename, |  | ||||||
|             ) |  | ||||||
|             .first() |  | ||||||
|         ) |  | ||||||
|         if not new_perm: |  | ||||||
|             # This should exist at this point, but if not, let's cut our losses |  | ||||||
|             continue |  | ||||||
|  |  | ||||||
|         # Global user permissions |  | ||||||
|         User = apps.get_model("authentik_core", "User") |  | ||||||
|         User.user_permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Global role permissions |  | ||||||
|         DjangoGroup = apps.get_model("auth", "Group") |  | ||||||
|         DjangoGroup.permissions.through.objects.using(db_alias).filter( |  | ||||||
|             permission=old_perm |  | ||||||
|         ).all().update(permission=new_perm) |  | ||||||
|  |  | ||||||
|         # Object user permissions |  | ||||||
|         UserObjectPermission = apps.get_model("guardian", "UserObjectPermission") |  | ||||||
|         UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # Object role permissions |  | ||||||
|         GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission") |  | ||||||
|         GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update( |  | ||||||
|             permission=new_perm, content_type=new_ct |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def remove_old_authenticated_session_content_type( |  | ||||||
|     apps: Apps, schema_editor: BaseDatabaseSchemaEditor |  | ||||||
| ): |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|     ContentType = apps.get_model("contenttypes", "ContentType") |  | ||||||
|  |  | ||||||
|     ContentType.objects.using(db_alias).filter(model="oldauthenticatedsession").delete() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0047_delete_oldauthenticatedsession"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=migrate_authenticated_session_permissions, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython( |  | ||||||
|             code=remove_old_authenticated_session_content_type, |  | ||||||
|             reverse_code=migrations.RunPython.noop, |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -21,9 +21,7 @@ | |||||||
|         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> |         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> | ||||||
|         {% block head %} |         {% block head %} | ||||||
|         {% endblock %} |         {% endblock %} | ||||||
|         {% for key, value in html_meta.items %} |         <meta name="sentry-trace" content="{{ sentry_trace }}" /> | ||||||
|         <meta name="{{key}}" content="{{ value }}" /> |  | ||||||
|         {% endfor %} |  | ||||||
|     </head> |     </head> | ||||||
|     <body> |     <body> | ||||||
|         {% block body %} |         {% block body %} | ||||||
|  | |||||||
| @ -124,16 +124,6 @@ class TestGroupsAPI(APITestCase): | |||||||
|             {"is_superuser": ["User does not have permission to set superuser status to True."]}, |             {"is_superuser": ["User does not have permission to set superuser status to True."]}, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def test_superuser_no_perm_no_superuser(self): |  | ||||||
|         """Test creating a group without permission and without superuser flag""" |  | ||||||
|         assign_perm("authentik_core.add_group", self.login_user) |  | ||||||
|         self.client.force_login(self.login_user) |  | ||||||
|         res = self.client.post( |  | ||||||
|             reverse("authentik_api:group-list"), |  | ||||||
|             data={"name": generate_id(), "is_superuser": False}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 201) |  | ||||||
|  |  | ||||||
|     def test_superuser_update_no_perm(self): |     def test_superuser_update_no_perm(self): | ||||||
|         """Test updating a superuser group without permission""" |         """Test updating a superuser group without permission""" | ||||||
|         group = Group.objects.create(name=generate_id(), is_superuser=True) |         group = Group.objects.create(name=generate_id(), is_superuser=True) | ||||||
|  | |||||||
| @ -30,7 +30,6 @@ from structlog.stdlib import get_logger | |||||||
|  |  | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||||
| from authentik.core.models import UserTypes |  | ||||||
| from authentik.crypto.apps import MANAGED_KEY | from authentik.crypto.apps import MANAGED_KEY | ||||||
| from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | ||||||
| from authentik.crypto.models import CertificateKeyPair | from authentik.crypto.models import CertificateKeyPair | ||||||
| @ -273,12 +272,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def view_certificate(self, request: Request, pk: str) -> Response: |     def view_certificate(self, request: Request, pk: str) -> Response: | ||||||
|         """Return certificate-key pairs certificate and log access""" |         """Return certificate-key pairs certificate and log access""" | ||||||
|         certificate: CertificateKeyPair = self.get_object() |         certificate: CertificateKeyPair = self.get_object() | ||||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: |         Event.new(  # noqa # nosec | ||||||
|             Event.new(  # noqa # nosec |             EventAction.SECRET_VIEW, | ||||||
|                 EventAction.SECRET_VIEW, |             secret=certificate, | ||||||
|                 secret=certificate, |             type="certificate", | ||||||
|                 type="certificate", |         ).from_http(request) | ||||||
|             ).from_http(request) |  | ||||||
|         if "download" in request.query_params: |         if "download" in request.query_params: | ||||||
|             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html |             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html | ||||||
|             response = HttpResponse( |             response = HttpResponse( | ||||||
| @ -304,12 +302,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def view_private_key(self, request: Request, pk: str) -> Response: |     def view_private_key(self, request: Request, pk: str) -> Response: | ||||||
|         """Return certificate-key pairs private key and log access""" |         """Return certificate-key pairs private key and log access""" | ||||||
|         certificate: CertificateKeyPair = self.get_object() |         certificate: CertificateKeyPair = self.get_object() | ||||||
|         if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: |         Event.new(  # noqa # nosec | ||||||
|             Event.new(  # noqa # nosec |             EventAction.SECRET_VIEW, | ||||||
|                 EventAction.SECRET_VIEW, |             secret=certificate, | ||||||
|                 secret=certificate, |             type="private_key", | ||||||
|                 type="private_key", |         ).from_http(request) | ||||||
|             ).from_http(request) |  | ||||||
|         if "download" in request.query_params: |         if "download" in request.query_params: | ||||||
|             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html |             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html | ||||||
|             response = HttpResponse(certificate.key_data, content_type="application/x-pem-file") |             response = HttpResponse(certificate.key_data, content_type="application/x-pem-file") | ||||||
|  | |||||||
| @ -132,14 +132,13 @@ class LicenseKey: | |||||||
|         """Get a summarized version of all (not expired) licenses""" |         """Get a summarized version of all (not expired) licenses""" | ||||||
|         total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0) |         total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0) | ||||||
|         for lic in License.objects.all(): |         for lic in License.objects.all(): | ||||||
|             if lic.is_valid: |             total.internal_users += lic.internal_users | ||||||
|                 total.internal_users += lic.internal_users |             total.external_users += lic.external_users | ||||||
|                 total.external_users += lic.external_users |  | ||||||
|                 total.license_flags.extend(lic.status.license_flags) |  | ||||||
|             exp_ts = int(mktime(lic.expiry.timetuple())) |             exp_ts = int(mktime(lic.expiry.timetuple())) | ||||||
|             if total.exp == 0: |             if total.exp == 0: | ||||||
|                 total.exp = exp_ts |                 total.exp = exp_ts | ||||||
|             total.exp = max(total.exp, exp_ts) |             total.exp = max(total.exp, exp_ts) | ||||||
|  |             total.license_flags.extend(lic.status.license_flags) | ||||||
|         return total |         return total | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|  | |||||||
| @ -39,10 +39,6 @@ class License(SerializerModel): | |||||||
|     internal_users = models.BigIntegerField() |     internal_users = models.BigIntegerField() | ||||||
|     external_users = models.BigIntegerField() |     external_users = models.BigIntegerField() | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def is_valid(self) -> bool: |  | ||||||
|         return self.expiry >= now() |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> type[BaseSerializer]: |     def serializer(self) -> type[BaseSerializer]: | ||||||
|         from authentik.enterprise.api import LicenseSerializer |         from authentik.enterprise.api import LicenseSerializer | ||||||
|  | |||||||
| @ -25,7 +25,7 @@ class GoogleWorkspaceGroupClient( | |||||||
|     """Google client for groups""" |     """Google client for groups""" | ||||||
|  |  | ||||||
|     connection_type = GoogleWorkspaceProviderGroup |     connection_type = GoogleWorkspaceProviderGroup | ||||||
|     connection_attr = "googleworkspaceprovidergroup_set" |     connection_type_query = "group" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: |     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||||
|  | |||||||
| @ -20,7 +20,7 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP | |||||||
|     """Sync authentik users into google workspace""" |     """Sync authentik users into google workspace""" | ||||||
|  |  | ||||||
|     connection_type = GoogleWorkspaceProviderUser |     connection_type = GoogleWorkspaceProviderUser | ||||||
|     connection_attr = "googleworkspaceprovideruser_set" |     connection_type_query = "user" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: GoogleWorkspaceProvider) -> None: |     def __init__(self, provider: GoogleWorkspaceProvider) -> None: | ||||||
|  | |||||||
| @ -132,11 +132,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|         if type == User: |         if type == User: | ||||||
|             # Get queryset of all users with consistent ordering |             # Get queryset of all users with consistent ordering | ||||||
|             # according to the provider's settings |             # according to the provider's settings | ||||||
|             base = ( |             base = User.objects.all().exclude_anonymous() | ||||||
|                 User.objects.prefetch_related("googleworkspaceprovideruser_set") |  | ||||||
|                 .all() |  | ||||||
|                 .exclude_anonymous() |  | ||||||
|             ) |  | ||||||
|             if self.exclude_users_service_account: |             if self.exclude_users_service_account: | ||||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( |                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT |                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||||
| @ -146,11 +142,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|             return base.order_by("pk") |             return base.order_by("pk") | ||||||
|         if type == Group: |         if type == Group: | ||||||
|             # Get queryset of all groups with consistent ordering |             # Get queryset of all groups with consistent ordering | ||||||
|             return ( |             return Group.objects.all().order_by("pk") | ||||||
|                 Group.objects.prefetch_related("googleworkspaceprovidergroup_set") |  | ||||||
|                 .all() |  | ||||||
|                 .order_by("pk") |  | ||||||
|             ) |  | ||||||
|         raise ValueError(f"Invalid type {type}") |         raise ValueError(f"Invalid type {type}") | ||||||
|  |  | ||||||
|     def google_credentials(self): |     def google_credentials(self): | ||||||
|  | |||||||
| @ -29,7 +29,7 @@ class MicrosoftEntraGroupClient( | |||||||
|     """Microsoft client for groups""" |     """Microsoft client for groups""" | ||||||
|  |  | ||||||
|     connection_type = MicrosoftEntraProviderGroup |     connection_type = MicrosoftEntraProviderGroup | ||||||
|     connection_attr = "microsoftentraprovidergroup_set" |     connection_type_query = "group" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: MicrosoftEntraProvider) -> None: |     def __init__(self, provider: MicrosoftEntraProvider) -> None: | ||||||
|  | |||||||
| @ -24,7 +24,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv | |||||||
|     """Sync authentik users into microsoft entra""" |     """Sync authentik users into microsoft entra""" | ||||||
|  |  | ||||||
|     connection_type = MicrosoftEntraProviderUser |     connection_type = MicrosoftEntraProviderUser | ||||||
|     connection_attr = "microsoftentraprovideruser_set" |     connection_type_query = "user" | ||||||
|     can_discover = True |     can_discover = True | ||||||
|  |  | ||||||
|     def __init__(self, provider: MicrosoftEntraProvider) -> None: |     def __init__(self, provider: MicrosoftEntraProvider) -> None: | ||||||
|  | |||||||
| @ -121,11 +121,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|         if type == User: |         if type == User: | ||||||
|             # Get queryset of all users with consistent ordering |             # Get queryset of all users with consistent ordering | ||||||
|             # according to the provider's settings |             # according to the provider's settings | ||||||
|             base = ( |             base = User.objects.all().exclude_anonymous() | ||||||
|                 User.objects.prefetch_related("microsoftentraprovideruser_set") |  | ||||||
|                 .all() |  | ||||||
|                 .exclude_anonymous() |  | ||||||
|             ) |  | ||||||
|             if self.exclude_users_service_account: |             if self.exclude_users_service_account: | ||||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( |                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT |                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||||
| @ -135,11 +131,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|             return base.order_by("pk") |             return base.order_by("pk") | ||||||
|         if type == Group: |         if type == Group: | ||||||
|             # Get queryset of all groups with consistent ordering |             # Get queryset of all groups with consistent ordering | ||||||
|             return ( |             return Group.objects.all().order_by("pk") | ||||||
|                 Group.objects.prefetch_related("microsoftentraprovidergroup_set") |  | ||||||
|                 .all() |  | ||||||
|                 .order_by("pk") |  | ||||||
|             ) |  | ||||||
|         raise ValueError(f"Invalid type {type}") |         raise ValueError(f"Invalid type {type}") | ||||||
|  |  | ||||||
|     def microsoft_credentials(self): |     def microsoft_credentials(self): | ||||||
|  | |||||||
| @ -19,7 +19,6 @@ TENANT_APPS = [ | |||||||
|     "authentik.enterprise.providers.microsoft_entra", |     "authentik.enterprise.providers.microsoft_entra", | ||||||
|     "authentik.enterprise.providers.ssf", |     "authentik.enterprise.providers.ssf", | ||||||
|     "authentik.enterprise.stages.authenticator_endpoint_gdtc", |     "authentik.enterprise.stages.authenticator_endpoint_gdtc", | ||||||
|     "authentik.enterprise.stages.mtls", |  | ||||||
|     "authentik.enterprise.stages.source", |     "authentik.enterprise.stages.source", | ||||||
| ] | ] | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,31 +0,0 @@ | |||||||
| """Mutual TLS Stage API Views""" |  | ||||||
|  |  | ||||||
| from rest_framework.viewsets import ModelViewSet |  | ||||||
|  |  | ||||||
| from authentik.core.api.used_by import UsedByMixin |  | ||||||
| from authentik.enterprise.api import EnterpriseRequiredMixin |  | ||||||
| from authentik.enterprise.stages.mtls.models import MutualTLSStage |  | ||||||
| from authentik.flows.api.stages import StageSerializer |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MutualTLSStageSerializer(EnterpriseRequiredMixin, StageSerializer): |  | ||||||
|     """MutualTLSStage Serializer""" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         model = MutualTLSStage |  | ||||||
|         fields = StageSerializer.Meta.fields + [ |  | ||||||
|             "mode", |  | ||||||
|             "certificate_authorities", |  | ||||||
|             "cert_attribute", |  | ||||||
|             "user_attribute", |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MutualTLSStageViewSet(UsedByMixin, ModelViewSet): |  | ||||||
|     """MutualTLSStage Viewset""" |  | ||||||
|  |  | ||||||
|     queryset = MutualTLSStage.objects.all() |  | ||||||
|     serializer_class = MutualTLSStageSerializer |  | ||||||
|     filterset_fields = "__all__" |  | ||||||
|     ordering = ["name"] |  | ||||||
|     search_fields = ["name"] |  | ||||||
| @ -1,12 +0,0 @@ | |||||||
| """authentik stage app config""" |  | ||||||
|  |  | ||||||
| from authentik.enterprise.apps import EnterpriseConfig |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikEnterpriseStageMTLSConfig(EnterpriseConfig): |  | ||||||
|     """authentik MTLS stage config""" |  | ||||||
|  |  | ||||||
|     name = "authentik.enterprise.stages.mtls" |  | ||||||
|     label = "authentik_stages_mtls" |  | ||||||
|     verbose_name = "authentik Enterprise.Stages.MTLS" |  | ||||||
|     default = True |  | ||||||
| @ -1,68 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-19 18:29 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     initial = True |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), |  | ||||||
|         ("authentik_flows", "0027_auto_20231028_1424"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="MutualTLSStage", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "stage_ptr", |  | ||||||
|                     models.OneToOneField( |  | ||||||
|                         auto_created=True, |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         parent_link=True, |  | ||||||
|                         primary_key=True, |  | ||||||
|                         serialize=False, |  | ||||||
|                         to="authentik_flows.stage", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "mode", |  | ||||||
|                     models.TextField(choices=[("optional", "Optional"), ("required", "Required")]), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "cert_attribute", |  | ||||||
|                     models.TextField( |  | ||||||
|                         choices=[ |  | ||||||
|                             ("subject", "Subject"), |  | ||||||
|                             ("common_name", "Common Name"), |  | ||||||
|                             ("email", "Email"), |  | ||||||
|                         ] |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "user_attribute", |  | ||||||
|                     models.TextField(choices=[("username", "Username"), ("email", "Email")]), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "certificate_authorities", |  | ||||||
|                     models.ManyToManyField( |  | ||||||
|                         blank=True, |  | ||||||
|                         default=None, |  | ||||||
|                         help_text="Configure certificate authorities to validate the certificate against. This option has a higher priority than the `client_certificate` option on `Brand`.", |  | ||||||
|                         to="authentik_crypto.certificatekeypair", |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "verbose_name": "Mutual TLS Stage", |  | ||||||
|                 "verbose_name_plural": "Mutual TLS Stages", |  | ||||||
|                 "permissions": [ |  | ||||||
|                     ("pass_outpost_certificate", "Permissions to pass Certificates for outposts.") |  | ||||||
|                 ], |  | ||||||
|             }, |  | ||||||
|             bases=("authentik_flows.stage",), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,71 +0,0 @@ | |||||||
| from django.db import models |  | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
| from rest_framework.serializers import Serializer |  | ||||||
|  |  | ||||||
| from authentik.crypto.models import CertificateKeyPair |  | ||||||
| from authentik.flows.models import Stage |  | ||||||
| from authentik.flows.stage import StageView |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TLSMode(models.TextChoices): |  | ||||||
|     """Modes the TLS Stage can operate in""" |  | ||||||
|  |  | ||||||
|     OPTIONAL = "optional" |  | ||||||
|     REQUIRED = "required" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class CertAttributes(models.TextChoices): |  | ||||||
|     """Certificate attribute used for user matching""" |  | ||||||
|  |  | ||||||
|     SUBJECT = "subject" |  | ||||||
|     COMMON_NAME = "common_name" |  | ||||||
|     EMAIL = "email" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserAttributes(models.TextChoices): |  | ||||||
|     """User attribute for user matching""" |  | ||||||
|  |  | ||||||
|     USERNAME = "username" |  | ||||||
|     EMAIL = "email" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MutualTLSStage(Stage): |  | ||||||
|     """Authenticate/enroll users using a client-certificate.""" |  | ||||||
|  |  | ||||||
|     mode = models.TextField(choices=TLSMode.choices) |  | ||||||
|  |  | ||||||
|     certificate_authorities = models.ManyToManyField( |  | ||||||
|         CertificateKeyPair, |  | ||||||
|         default=None, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_( |  | ||||||
|             "Configure certificate authorities to validate the certificate against. " |  | ||||||
|             "This option has a higher priority than the `client_certificate` option on `Brand`." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     cert_attribute = models.TextField(choices=CertAttributes.choices) |  | ||||||
|     user_attribute = models.TextField(choices=UserAttributes.choices) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def view(self) -> type[StageView]: |  | ||||||
|         from authentik.enterprise.stages.mtls.stage import MTLSStageView |  | ||||||
|  |  | ||||||
|         return MTLSStageView |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def serializer(self) -> type[Serializer]: |  | ||||||
|         from authentik.enterprise.stages.mtls.api import MutualTLSStageSerializer |  | ||||||
|  |  | ||||||
|         return MutualTLSStageSerializer |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def component(self) -> str: |  | ||||||
|         return "ak-stage-mtls-form" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _("Mutual TLS Stage") |  | ||||||
|         verbose_name_plural = _("Mutual TLS Stages") |  | ||||||
|         permissions = [ |  | ||||||
|             ("pass_outpost_certificate", _("Permissions to pass Certificates for outposts.")), |  | ||||||
|         ] |  | ||||||
| @ -1,230 +0,0 @@ | |||||||
| from binascii import hexlify |  | ||||||
| from urllib.parse import unquote_plus |  | ||||||
|  |  | ||||||
| from cryptography.exceptions import InvalidSignature |  | ||||||
| from cryptography.hazmat.primitives import hashes |  | ||||||
| from cryptography.x509 import ( |  | ||||||
|     Certificate, |  | ||||||
|     NameOID, |  | ||||||
|     ObjectIdentifier, |  | ||||||
|     UnsupportedGeneralNameType, |  | ||||||
|     load_pem_x509_certificate, |  | ||||||
| ) |  | ||||||
| from cryptography.x509.verification import PolicyBuilder, Store, VerificationError |  | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
|  |  | ||||||
| from authentik.brands.models import Brand |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.crypto.models import CertificateKeyPair |  | ||||||
| from authentik.enterprise.stages.mtls.models import ( |  | ||||||
|     CertAttributes, |  | ||||||
|     MutualTLSStage, |  | ||||||
|     TLSMode, |  | ||||||
|     UserAttributes, |  | ||||||
| ) |  | ||||||
| from authentik.flows.challenge import AccessDeniedChallenge |  | ||||||
| from authentik.flows.models import FlowDesignation |  | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER |  | ||||||
| from authentik.flows.stage import ChallengeStageView |  | ||||||
| from authentik.root.middleware import ClientIPMiddleware |  | ||||||
| from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
| # All of these headers must only be accepted from "trusted" reverse proxies |  | ||||||
| # See internal/web/proxy.go:39 |  | ||||||
| HEADER_PROXY_FORWARDED = "X-Forwarded-Client-Cert" |  | ||||||
| HEADER_NGINX_FORWARDED = "SSL-Client-Cert" |  | ||||||
| HEADER_TRAEFIK_FORWARDED = "X-Forwarded-TLS-Client-Cert" |  | ||||||
| HEADER_OUTPOST_FORWARDED = "X-Authentik-Outpost-Certificate" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| PLAN_CONTEXT_CERTIFICATE = "certificate" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MTLSStageView(ChallengeStageView): |  | ||||||
|  |  | ||||||
|     def __parse_single_cert(self, raw: str | None) -> list[Certificate]: |  | ||||||
|         """Helper to parse a single certificate""" |  | ||||||
|         if not raw: |  | ||||||
|             return [] |  | ||||||
|         try: |  | ||||||
|             cert = load_pem_x509_certificate(unquote_plus(raw).encode()) |  | ||||||
|             return [cert] |  | ||||||
|         except ValueError as exc: |  | ||||||
|             self.logger.info("Failed to parse certificate", exc=exc) |  | ||||||
|             return [] |  | ||||||
|  |  | ||||||
|     def _parse_cert_xfcc(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format given to us in |  | ||||||
|         the format of the authentik router/envoy""" |  | ||||||
|         xfcc_raw = self.request.headers.get(HEADER_PROXY_FORWARDED) |  | ||||||
|         if not xfcc_raw: |  | ||||||
|             return [] |  | ||||||
|         certs = [] |  | ||||||
|         for r_cert in xfcc_raw.split(","): |  | ||||||
|             el = r_cert.split(";") |  | ||||||
|             raw_cert = {k.split("=")[0]: k.split("=")[1] for k in el} |  | ||||||
|             if "Cert" not in raw_cert: |  | ||||||
|                 continue |  | ||||||
|             certs.extend(self.__parse_single_cert(raw_cert["Cert"])) |  | ||||||
|         return certs |  | ||||||
|  |  | ||||||
|     def _parse_cert_nginx(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format nginx-ingress gives to us""" |  | ||||||
|         sslcc_raw = self.request.headers.get(HEADER_NGINX_FORWARDED) |  | ||||||
|         return self.__parse_single_cert(sslcc_raw) |  | ||||||
|  |  | ||||||
|     def _parse_cert_traefik(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format traefik gives to us""" |  | ||||||
|         ftcc_raw = self.request.headers.get(HEADER_TRAEFIK_FORWARDED) |  | ||||||
|         return self.__parse_single_cert(ftcc_raw) |  | ||||||
|  |  | ||||||
|     def _parse_cert_outpost(self) -> list[Certificate]: |  | ||||||
|         """Parse certificates in the format outposts give to us. Also authenticates |  | ||||||
|         the outpost to ensure it has the permission to do so""" |  | ||||||
|         user = ClientIPMiddleware.get_outpost_user(self.request) |  | ||||||
|         if not user: |  | ||||||
|             return [] |  | ||||||
|         if not user.has_perm( |  | ||||||
|             "pass_outpost_certificate", self.executor.current_stage |  | ||||||
|         ) and not user.has_perm("authentik_stages_mtls.pass_outpost_certificate"): |  | ||||||
|             return [] |  | ||||||
|         outpost_raw = self.request.headers.get(HEADER_OUTPOST_FORWARDED) |  | ||||||
|         return self.__parse_single_cert(outpost_raw) |  | ||||||
|  |  | ||||||
|     def get_authorities(self) -> list[CertificateKeyPair] | None: |  | ||||||
|         # We can't access `certificate_authorities` on `self.executor.current_stage`, as that would |  | ||||||
|         # load the certificate into the directly referenced foreign key, which we have to pickle |  | ||||||
|         # as part of the flow plan, and cryptography certs can't be pickled |  | ||||||
|         stage: MutualTLSStage = ( |  | ||||||
|             MutualTLSStage.objects.filter(pk=self.executor.current_stage.pk) |  | ||||||
|             .prefetch_related("certificate_authorities") |  | ||||||
|             .first() |  | ||||||
|         ) |  | ||||||
|         if stage.certificate_authorities.exists(): |  | ||||||
|             return stage.certificate_authorities.order_by("name") |  | ||||||
|         brand: Brand = self.request.brand |  | ||||||
|         if brand.client_certificates.exists(): |  | ||||||
|             return brand.client_certificates.order_by("name") |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def validate_cert(self, authorities: list[CertificateKeyPair], certs: list[Certificate]): |  | ||||||
|         authorities_cert = [x.certificate for x in authorities] |  | ||||||
|         for _cert in certs: |  | ||||||
|             try: |  | ||||||
|                 PolicyBuilder().store(Store(authorities_cert)).build_client_verifier().verify( |  | ||||||
|                     _cert, [] |  | ||||||
|                 ) |  | ||||||
|                 return _cert |  | ||||||
|             except ( |  | ||||||
|                 InvalidSignature, |  | ||||||
|                 TypeError, |  | ||||||
|                 ValueError, |  | ||||||
|                 VerificationError, |  | ||||||
|                 UnsupportedGeneralNameType, |  | ||||||
|             ) as exc: |  | ||||||
|                 self.logger.warning("Discarding invalid certificate", cert=_cert, exc=exc) |  | ||||||
|                 continue |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def check_if_user(self, cert: Certificate): |  | ||||||
|         stage: MutualTLSStage = self.executor.current_stage |  | ||||||
|         cert_attr = None |  | ||||||
|         user_attr = None |  | ||||||
|         match stage.cert_attribute: |  | ||||||
|             case CertAttributes.SUBJECT: |  | ||||||
|                 cert_attr = cert.subject.rfc4514_string() |  | ||||||
|             case CertAttributes.COMMON_NAME: |  | ||||||
|                 cert_attr = self.get_cert_attribute(cert, NameOID.COMMON_NAME) |  | ||||||
|             case CertAttributes.EMAIL: |  | ||||||
|                 cert_attr = self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS) |  | ||||||
|         match stage.user_attribute: |  | ||||||
|             case UserAttributes.USERNAME: |  | ||||||
|                 user_attr = "username" |  | ||||||
|             case UserAttributes.EMAIL: |  | ||||||
|                 user_attr = "email" |  | ||||||
|         if not user_attr or not cert_attr: |  | ||||||
|             return None |  | ||||||
|         return User.objects.filter(**{user_attr: cert_attr}).first() |  | ||||||
|  |  | ||||||
|     def _cert_to_dict(self, cert: Certificate) -> dict: |  | ||||||
|         """Represent a certificate in a dictionary, as certificate objects cannot be pickled""" |  | ||||||
|         return { |  | ||||||
|             "serial_number": str(cert.serial_number), |  | ||||||
|             "subject": cert.subject.rfc4514_string(), |  | ||||||
|             "issuer": cert.issuer.rfc4514_string(), |  | ||||||
|             "fingerprint_sha256": hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8"), |  | ||||||
|             "fingerprint_sha1": hexlify(cert.fingerprint(hashes.SHA1()), ":").decode(  # nosec |  | ||||||
|                 "utf-8" |  | ||||||
|             ), |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     def auth_user(self, user: User, cert: Certificate): |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] = user |  | ||||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD, "mtls") |  | ||||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD_ARGS, {}) |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_METHOD_ARGS].update( |  | ||||||
|             {"certificate": self._cert_to_dict(cert)} |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def enroll_prepare_user(self, cert: Certificate): |  | ||||||
|         self.executor.plan.context.setdefault(PLAN_CONTEXT_PROMPT, {}) |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_PROMPT].update( |  | ||||||
|             { |  | ||||||
|                 "email": self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS), |  | ||||||
|                 "name": self.get_cert_attribute(cert, NameOID.COMMON_NAME), |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|         self.executor.plan.context[PLAN_CONTEXT_CERTIFICATE] = self._cert_to_dict(cert) |  | ||||||
|  |  | ||||||
|     def get_cert_attribute(self, cert: Certificate, oid: ObjectIdentifier) -> str | None: |  | ||||||
|         attr = cert.subject.get_attributes_for_oid(oid) |  | ||||||
|         if len(attr) < 1: |  | ||||||
|             return None |  | ||||||
|         return str(attr[0].value) |  | ||||||
|  |  | ||||||
|     def dispatch(self, request, *args, **kwargs): |  | ||||||
|         stage: MutualTLSStage = self.executor.current_stage |  | ||||||
|         certs = [ |  | ||||||
|             *self._parse_cert_xfcc(), |  | ||||||
|             *self._parse_cert_nginx(), |  | ||||||
|             *self._parse_cert_traefik(), |  | ||||||
|             *self._parse_cert_outpost(), |  | ||||||
|         ] |  | ||||||
|         authorities = self.get_authorities() |  | ||||||
|         if not authorities: |  | ||||||
|             self.logger.warning("No Certificate authority found") |  | ||||||
|             if stage.mode == TLSMode.OPTIONAL: |  | ||||||
|                 return self.executor.stage_ok() |  | ||||||
|             if stage.mode == TLSMode.REQUIRED: |  | ||||||
|                 return super().dispatch(request, *args, **kwargs) |  | ||||||
|         cert = self.validate_cert(authorities, certs) |  | ||||||
|         if not cert and stage.mode == TLSMode.REQUIRED: |  | ||||||
|             self.logger.warning("Client certificate required but no certificates given") |  | ||||||
|             return super().dispatch( |  | ||||||
|                 request, |  | ||||||
|                 *args, |  | ||||||
|                 error_message=_("Certificate required but no certificate was given."), |  | ||||||
|                 **kwargs, |  | ||||||
|             ) |  | ||||||
|         if not cert and stage.mode == TLSMode.OPTIONAL: |  | ||||||
|             self.logger.info("No certificate given, continuing") |  | ||||||
|             return self.executor.stage_ok() |  | ||||||
|         existing_user = self.check_if_user(cert) |  | ||||||
|         if self.executor.flow.designation == FlowDesignation.ENROLLMENT: |  | ||||||
|             self.enroll_prepare_user(cert) |  | ||||||
|         elif existing_user: |  | ||||||
|             self.auth_user(existing_user, cert) |  | ||||||
|         else: |  | ||||||
|             return super().dispatch( |  | ||||||
|                 request, *args, error_message=_("No user found for certificate."), **kwargs |  | ||||||
|             ) |  | ||||||
|         return self.executor.stage_ok() |  | ||||||
|  |  | ||||||
|     def get_challenge(self, *args, error_message: str | None = None, **kwargs): |  | ||||||
|         return AccessDeniedChallenge( |  | ||||||
|             data={ |  | ||||||
|                 "component": "ak-stage-access-denied", |  | ||||||
|                 "error_message": str(error_message or "Unknown error"), |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
| @ -1,31 +0,0 @@ | |||||||
| -----BEGIN CERTIFICATE----- |  | ||||||
| MIIFXDCCA0SgAwIBAgIUBmV7zREyC1SPr72/75/L9zpwV18wDQYJKoZIhvcNAQEL |  | ||||||
| BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl |  | ||||||
| bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNDI3MTgzMDUwWhcNMzUw |  | ||||||
| MzA3MTgzMDUwWjBGMRowGAYDVQQDDBFhdXRoZW50aWsgVGVzdCBDQTESMBAGA1UE |  | ||||||
| CgwJYXV0aGVudGlrMRQwEgYDVQQLDAtTZWxmLXNpZ25lZDCCAiIwDQYJKoZIhvcN |  | ||||||
| AQEBBQADggIPADCCAgoCggIBAMc0NxZj7j1mPu0aRToo8oMPdC3T99xgxnqdr18x |  | ||||||
| LV4pWyi/YLghgZHqNQY2xNP6JIlSeUZD6KFUYT2sPL4Av/zSg5zO8bl+/lf7ckje |  | ||||||
| O1/Bt5A8xtL0CpmpMDGiI6ibdDElaywM6AohisbxrV29pygSKGq2wugF/urqGtE+ |  | ||||||
| 5z4y5Kt6qMdKkd0iXT+WagbQTIUlykFKgB0+qqTLzDl01lVDa/DoLl8Hqp45mVx2 |  | ||||||
| pqrGsSa3TCErLIv9hUlZklF7A8UV4ZB4JL20UKcP8dKzQClviNie17tpsUpOuy3A |  | ||||||
| SQ6+guWTHTLJNCSdLn1xIqc5q+f5wd2dIDf8zXCTHj+Xp0bJE3Vgaq5R31K9+b+1 |  | ||||||
| 2dDWz1KcNJaLEnw2+b0O8M64wTMLxhqOv7QfLUr6Pmg1ZymghjLcZ6bnU9e31Vza |  | ||||||
| hlPKhxjqYQUC4Kq+oaYF6qdUeJy+dsYf0iDv5tTC+eReZDWIjxTPrNpwA773ZwT7 |  | ||||||
| WVmL7ULGpuP2g9rNvFBcZiN+i6d7CUoN+jd/iRdo79lrI0dfXiyy4bYgW/2HeZfF |  | ||||||
| HaOsc1xsoqnJdWbWkX/ooyaCjAfm07kS3HiOzz4q3QW4wgGrwV8lEraLPxYYeOQu |  | ||||||
| YcGMOM8NfnVkjc8gmyXUxedCje5Vz/Tu5fKrQEInnCmXxVsWbwr/LzEjMKAM/ivY |  | ||||||
| 0TXxAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0G |  | ||||||
| A1UdDgQWBBTa+Ns6QzqlNvnTGszkouQQtZnVJDANBgkqhkiG9w0BAQsFAAOCAgEA |  | ||||||
| NpJEDMXjuEIzSzafkxSshvjnt5sMYmzmvjNoRlkxgN2YcWvPoxbalGAYzcpyggT2 |  | ||||||
| 6xZY8R4tvB1oNTCArqwf860kkofUoJCr88D/pU3Cv4JhjCWs4pmXTsvSqlBSlJbo |  | ||||||
| +jPBZwbn6it/6jcit6Be3rW2PtHe8tASd9Lf8/2r1ZvupXwPzcR84R4Z10ve2lqV |  | ||||||
| xxcWlMmBh51CaYI0b1/WTe9Ua+wgkCVkxbf9zNcDQXjxw2ICWK+nR/4ld4nmqVm2 |  | ||||||
| C7nhvXwU8FAHl7ZgR2Z3PLrwPuhd+kd6NXQqNkS9A+n+1vSRLbRjmV8pwIPpdPEq |  | ||||||
| nslUAGJJBHDUBArxC3gOJSB+WtmaCfzDu2gepMf9Ng1H2ZhwSF/FH3v3fsJqZkzz |  | ||||||
| NBstT9KuNGQRYiCmAPJaoVAc9BoLa+BFML1govtWtpdmbFk8PZEcuUsP7iAZqFF1 |  | ||||||
| uuldPyZ8huGpQSR6Oq2bILRHowfGY0npTZAyxg0Vs8UMy1HTwNOp9OuRtArMZmsJ |  | ||||||
| jFIx1QzRf9S1i6bYpOzOudoXj4ARkS1KmVExGjJFcIT0xlFSSERie2fEKSeEYOyG |  | ||||||
| G+PA2qRt/F51FGOMm1ZscjPXqk2kt3C4BFbz6Vvxsq7D3lmhvFLn4jVA8+OidsM0 |  | ||||||
| YUrVMtWET/RkjEIbADbgRXxNUNo+jtQZDU9C1IiAdfk= |  | ||||||
| -----END CERTIFICATE----- |  | ||||||
| @ -1,31 +0,0 @@ | |||||||
| -----BEGIN CERTIFICATE----- |  | ||||||
| MIIFWTCCA0GgAwIBAgIUDEnKCSmIXG/akySGes7bhOGrN/8wDQYJKoZIhvcNAQEL |  | ||||||
| BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl |  | ||||||
| bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNTE5MTIzODQ2WhcNMjYw |  | ||||||
| NTE1MTIzODQ2WjARMQ8wDQYDVQQDDAZjbGllbnQwggIiMA0GCSqGSIb3DQEBAQUA |  | ||||||
| A4ICDwAwggIKAoICAQCkPkS1V6l0gj0ulxMznkxkgrw4p9Tjd8teSsGZt02A2Eo6 |  | ||||||
| 7D8FbJ7pp3d5fYW/TWuEKVBLWTID6rijW5EGcdgTM5Jxf/QR+aZTEK6umQxUd4yO |  | ||||||
| mOtp+xVS3KlcsSej2dFpeE5h5VkZizHpvh5xkoAP8W5VtQLOVF0hIeumHnJmaeLj |  | ||||||
| +mhK9PBFpO7k9SFrYYhd/uLrYbIdANihbIO2Q74rNEJHewhFNM7oNSjjEWzRd/7S |  | ||||||
| qNdQij9JGrVG7u8YJJscEQHqyHMYFVCEMjxmsge5BO6Vx5OWmUE3wXPzb5TbyTS4 |  | ||||||
| +yg88g9rYTUXrzz+poCyKpaur45qBsdw35lJ8nq69VJj2xJLGQDwoTgGSXRuPciC |  | ||||||
| 3OilQI+Ma+j8qQGJxJ8WJxISlf1cuhp+V4ZUd1lawlM5hAXyXmHRlH4pun4y+g7O |  | ||||||
| O34+fE3pK25JjVCicMT/rC2A/sb95j/fHTzzJpbB70U0I50maTcIsOkyw6aiF//E |  | ||||||
| 0ShTDz14x22SCMolUc6hxTDZvBB6yrcJHd7d9CCnFH2Sgo13QrtNJ/atXgm13HGh |  | ||||||
| wBzRwK38XUGl/J4pJaxAupTVCPriStUM3m0EYHNelRRUE91pbyeGT0rvOuv00uLw |  | ||||||
| Rj7K7hJZR8avTKWmKrVBVpq+gSojGW1DwBS0NiDNkZs0d/IjB1wkzczEgdZjXwID |  | ||||||
| AQABo3QwcjAfBgNVHSMEGDAWgBTa+Ns6QzqlNvnTGszkouQQtZnVJDAdBgNVHSUE |  | ||||||
| FjAUBggrBgEFBQcDAgYIKwYBBQUHAwEwEQYDVR0RBAowCIIGY2xpZW50MB0GA1Ud |  | ||||||
| DgQWBBT1xg5sXkypRBwvCxBuyfoanaiZ5jANBgkqhkiG9w0BAQsFAAOCAgEAvUAz |  | ||||||
| YwIjxY/0KHZDU8owdILVqKChzfLcy9OHNPyEI3TSOI8X6gNtBO+HE6r8aWGcC9vw |  | ||||||
| zzeIsNQ3UEjvRWi2r+vUVbiPTbFdZboNDSZv6ZmGHxwd85VsjXRGoXV6koCT/9zi |  | ||||||
| 9/lCM1DwqwYSwBphMJdRVFRUMluSYk1oHflGeA18xgGuts4eFivJwhabGm1AdVVQ |  | ||||||
| /CYvqCuTxd/DCzWZBdyxYpDru64i/kyeJCt1pThKEFDWmpumFdBI4CxJ0OhxVSGp |  | ||||||
| dOXzK+Y6ULepxCvi6/OpSog52jQ6PnNd1ghiYtq7yO1T4GQz65M1vtHHVvQ3gfBE |  | ||||||
| AuKYQp6io7ypitRx+LpjsBQenyP4FFGfrq7pm90nLluOBOArfSdF0N+CP2wo/YFV |  | ||||||
| 9BGf89OtvRi3BXCm2NXkE/Sc4We26tY8x7xNLOmNs8YOT0O3r/EQ690W9GIwRMx0 |  | ||||||
| m0r/RXWn5V3o4Jib9r8eH9NzaDstD8g9dECcGfM4fHoM/DAGFaRrNcjMsS1APP3L |  | ||||||
| jp7+BfBSXtrz9V6rVJ3CBLXlLK0AuSm7bqd1MJsGA9uMLpsVZIUA+KawcmPGdPU+ |  | ||||||
| NxdpBCtzyurQSUyaTLtVqSeP35gMAwaNzUDph8Uh+vHz+kRwgXS19OQvTaud5LJu |  | ||||||
| nQe4JNS+u5e2VDEBWUxt8NTpu6eShDN0iIEHtxA= |  | ||||||
| -----END CERTIFICATE----- |  | ||||||
| @ -1,228 +0,0 @@ | |||||||
| from unittest.mock import MagicMock, patch |  | ||||||
| from urllib.parse import quote_plus |  | ||||||
|  |  | ||||||
| from django.urls import reverse |  | ||||||
| from guardian.shortcuts import assign_perm |  | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.core.tests.utils import ( |  | ||||||
|     create_test_brand, |  | ||||||
|     create_test_cert, |  | ||||||
|     create_test_flow, |  | ||||||
|     create_test_user, |  | ||||||
| ) |  | ||||||
| from authentik.crypto.models import CertificateKeyPair |  | ||||||
| from authentik.enterprise.stages.mtls.models import ( |  | ||||||
|     CertAttributes, |  | ||||||
|     MutualTLSStage, |  | ||||||
|     TLSMode, |  | ||||||
|     UserAttributes, |  | ||||||
| ) |  | ||||||
| from authentik.enterprise.stages.mtls.stage import PLAN_CONTEXT_CERTIFICATE |  | ||||||
| from authentik.flows.models import FlowDesignation, FlowStageBinding |  | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER |  | ||||||
| from authentik.flows.tests import FlowTestCase |  | ||||||
| from authentik.lib.generators import generate_id |  | ||||||
| from authentik.lib.tests.utils import load_fixture |  | ||||||
| from authentik.outposts.models import Outpost, OutpostType |  | ||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MTLSStageTests(FlowTestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         super().setUp() |  | ||||||
|         self.flow = create_test_flow(FlowDesignation.AUTHENTICATION) |  | ||||||
|         self.ca = CertificateKeyPair.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|             certificate_data=load_fixture("fixtures/ca.pem"), |  | ||||||
|         ) |  | ||||||
|         self.stage = MutualTLSStage.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|             mode=TLSMode.REQUIRED, |  | ||||||
|             cert_attribute=CertAttributes.COMMON_NAME, |  | ||||||
|             user_attribute=UserAttributes.USERNAME, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         self.stage.certificate_authorities.add(self.ca) |  | ||||||
|         self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=0) |  | ||||||
|         self.client_cert = load_fixture("fixtures/cert_client.pem") |  | ||||||
|         # User matching the certificate |  | ||||||
|         User.objects.filter(username="client").delete() |  | ||||||
|         self.cert_user = create_test_user(username="client") |  | ||||||
|  |  | ||||||
|     def test_parse_xfcc(self): |  | ||||||
|         """Test authentik Proxy/Envoy's XFCC format""" |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-Client-Cert": f"Cert={quote_plus(self.client_cert)}"}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_nginx(self): |  | ||||||
|         """Test nginx's format""" |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"SSL-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_traefik(self): |  | ||||||
|         """Test traefik's format""" |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_outpost_object(self): |  | ||||||
|         """Test outposts's format""" |  | ||||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) |  | ||||||
|         assign_perm("pass_outpost_certificate", outpost.user, self.stage) |  | ||||||
|         with patch( |  | ||||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", |  | ||||||
|             MagicMock(return_value=outpost.user), |  | ||||||
|         ): |  | ||||||
|             with self.assertFlowFinishes() as plan: |  | ||||||
|                 res = self.client.get( |  | ||||||
|                     reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                     headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, |  | ||||||
|                 ) |  | ||||||
|                 self.assertEqual(res.status_code, 200) |  | ||||||
|                 self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|             self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_outpost_global(self): |  | ||||||
|         """Test outposts's format""" |  | ||||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) |  | ||||||
|         assign_perm("authentik_stages_mtls.pass_outpost_certificate", outpost.user) |  | ||||||
|         with patch( |  | ||||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", |  | ||||||
|             MagicMock(return_value=outpost.user), |  | ||||||
|         ): |  | ||||||
|             with self.assertFlowFinishes() as plan: |  | ||||||
|                 res = self.client.get( |  | ||||||
|                     reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                     headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, |  | ||||||
|                 ) |  | ||||||
|                 self.assertEqual(res.status_code, 200) |  | ||||||
|                 self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|             self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_parse_outpost_no_perm(self): |  | ||||||
|         """Test outposts's format""" |  | ||||||
|         outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY) |  | ||||||
|         with patch( |  | ||||||
|             "authentik.root.middleware.ClientIPMiddleware.get_outpost_user", |  | ||||||
|             MagicMock(return_value=outpost.user), |  | ||||||
|         ): |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|  |  | ||||||
|     def test_invalid_cert(self): |  | ||||||
|         """Test invalid certificate""" |  | ||||||
|         cert = create_test_cert() |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(cert.certificate_data)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|         self.assertNotIn(PLAN_CONTEXT_PENDING_USER, plan().context) |  | ||||||
|  |  | ||||||
|     def test_auth_no_user(self): |  | ||||||
|         """Test auth with no user""" |  | ||||||
|         User.objects.filter(username="client").delete() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|  |  | ||||||
|     def test_brand_ca(self): |  | ||||||
|         """Test using a CA from the brand""" |  | ||||||
|         self.stage.certificate_authorities.clear() |  | ||||||
|  |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         brand.client_certificates.add(self.ca) |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user) |  | ||||||
|  |  | ||||||
|     def test_no_ca_optional(self): |  | ||||||
|         """Test using no CA Set""" |  | ||||||
|         self.stage.mode = TLSMode.OPTIONAL |  | ||||||
|         self.stage.certificate_authorities.clear() |  | ||||||
|         self.stage.save() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|  |  | ||||||
|     def test_no_ca_required(self): |  | ||||||
|         """Test using no CA Set""" |  | ||||||
|         self.stage.certificate_authorities.clear() |  | ||||||
|         self.stage.save() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|             headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageResponse(res, self.flow, component="ak-stage-access-denied") |  | ||||||
|  |  | ||||||
|     def test_no_cert_optional(self): |  | ||||||
|         """Test using no cert Set""" |  | ||||||
|         self.stage.mode = TLSMode.OPTIONAL |  | ||||||
|         self.stage.save() |  | ||||||
|         res = self.client.get( |  | ||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(res.status_code, 200) |  | ||||||
|         self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|  |  | ||||||
|     def test_enroll(self): |  | ||||||
|         """Test Enrollment flow""" |  | ||||||
|         self.flow.designation = FlowDesignation.ENROLLMENT |  | ||||||
|         self.flow.save() |  | ||||||
|         with self.assertFlowFinishes() as plan: |  | ||||||
|             res = self.client.get( |  | ||||||
|                 reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}), |  | ||||||
|                 headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)}, |  | ||||||
|             ) |  | ||||||
|             self.assertEqual(res.status_code, 200) |  | ||||||
|             self.assertStageRedirects(res, reverse("authentik_core:root-redirect")) |  | ||||||
|         self.assertEqual(plan().context[PLAN_CONTEXT_PROMPT], {"email": None, "name": "client"}) |  | ||||||
|         self.assertEqual( |  | ||||||
|             plan().context[PLAN_CONTEXT_CERTIFICATE], |  | ||||||
|             { |  | ||||||
|                 "fingerprint_sha1": "52:39:ca:1e:3a:1f:78:3a:9f:26:3b:c2:84:99:48:68:99:99:81:8a", |  | ||||||
|                 "fingerprint_sha256": ( |  | ||||||
|                     "c1:07:8b:7c:e9:02:57:87:1e:92:e5:81:83:21:bc:92:c7:47:65:e3:97:fb:05:97:6f:36:9e:b5:31:77:98:b7" |  | ||||||
|                 ), |  | ||||||
|                 "issuer": "OU=Self-signed,O=authentik,CN=authentik Test CA", |  | ||||||
|                 "serial_number": "70153443448884702681996102271549704759327537151", |  | ||||||
|                 "subject": "CN=client", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
| @ -1,5 +0,0 @@ | |||||||
| """API URLs""" |  | ||||||
|  |  | ||||||
| from authentik.enterprise.stages.mtls.api import MutualTLSStageViewSet |  | ||||||
|  |  | ||||||
| api_urlpatterns = [("stages/mtls", MutualTLSStageViewSet)] |  | ||||||
| @ -8,7 +8,6 @@ from django.test import TestCase | |||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from rest_framework.exceptions import ValidationError | from rest_framework.exceptions import ValidationError | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.enterprise.license import LicenseKey | from authentik.enterprise.license import LicenseKey | ||||||
| from authentik.enterprise.models import ( | from authentik.enterprise.models import ( | ||||||
|     THRESHOLD_READ_ONLY_WEEKS, |     THRESHOLD_READ_ONLY_WEEKS, | ||||||
| @ -72,9 +71,9 @@ class TestEnterpriseLicense(TestCase): | |||||||
|     ) |     ) | ||||||
|     def test_valid_multiple(self): |     def test_valid_multiple(self): | ||||||
|         """Check license verification""" |         """Check license verification""" | ||||||
|         lic = License.objects.create(key=generate_id(), expiry=expiry_valid) |         lic = License.objects.create(key=generate_id()) | ||||||
|         self.assertTrue(lic.status.status().is_valid) |         self.assertTrue(lic.status.status().is_valid) | ||||||
|         lic2 = License.objects.create(key=generate_id(), expiry=expiry_valid) |         lic2 = License.objects.create(key=generate_id()) | ||||||
|         self.assertTrue(lic2.status.status().is_valid) |         self.assertTrue(lic2.status.status().is_valid) | ||||||
|         total = LicenseKey.get_total() |         total = LicenseKey.get_total() | ||||||
|         self.assertEqual(total.internal_users, 200) |         self.assertEqual(total.internal_users, 200) | ||||||
| @ -233,9 +232,7 @@ class TestEnterpriseLicense(TestCase): | |||||||
|     ) |     ) | ||||||
|     def test_expiry_expired(self): |     def test_expiry_expired(self): | ||||||
|         """Check license verification""" |         """Check license verification""" | ||||||
|         User.objects.all().delete() |         License.objects.create(key=generate_id()) | ||||||
|         License.objects.all().delete() |  | ||||||
|         License.objects.create(key=generate_id(), expiry=expiry_expired) |  | ||||||
|         self.assertEqual(LicenseKey.get_total().summary().status, LicenseUsageStatus.EXPIRED) |         self.assertEqual(LicenseKey.get_total().summary().status, LicenseUsageStatus.EXPIRED) | ||||||
|  |  | ||||||
|     @patch( |     @patch( | ||||||
|  | |||||||
| @ -57,7 +57,7 @@ class LogEventSerializer(PassiveSerializer): | |||||||
|  |  | ||||||
|  |  | ||||||
| @contextmanager | @contextmanager | ||||||
| def capture_logs(log_default_output=True) -> Generator[list[LogEvent]]: | def capture_logs(log_default_output=True) -> Generator[list[LogEvent], None, None]: | ||||||
|     """Capture log entries created""" |     """Capture log entries created""" | ||||||
|     logs = [] |     logs = [] | ||||||
|     cap = LogCapture() |     cap = LogCapture() | ||||||
|  | |||||||
| @ -1,18 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-27 12:52 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_flows", "0027_auto_20231028_1424"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="flowtoken", |  | ||||||
|             name="revoke_on_execution", |  | ||||||
|             field=models.BooleanField(default=True), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -303,10 +303,9 @@ class FlowToken(Token): | |||||||
|  |  | ||||||
|     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) |     flow = models.ForeignKey(Flow, on_delete=models.CASCADE) | ||||||
|     _plan = models.TextField() |     _plan = models.TextField() | ||||||
|     revoke_on_execution = models.BooleanField(default=True) |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def pickle(plan: "FlowPlan") -> str: |     def pickle(plan) -> str: | ||||||
|         """Pickle into string""" |         """Pickle into string""" | ||||||
|         data = dumps(plan) |         data = dumps(plan) | ||||||
|         return b64encode(data).decode() |         return b64encode(data).decode() | ||||||
|  | |||||||
| @ -99,10 +99,9 @@ class ChallengeStageView(StageView): | |||||||
|             self.logger.debug("Got StageInvalidException", exc=exc) |             self.logger.debug("Got StageInvalidException", exc=exc) | ||||||
|             return self.executor.stage_invalid() |             return self.executor.stage_invalid() | ||||||
|         if not challenge.is_valid(): |         if not challenge.is_valid(): | ||||||
|             self.logger.error( |             self.logger.warning( | ||||||
|                 "f(ch): Invalid challenge", |                 "f(ch): Invalid challenge", | ||||||
|                 errors=challenge.errors, |                 errors=challenge.errors, | ||||||
|                 challenge=challenge.data, |  | ||||||
|             ) |             ) | ||||||
|         return HttpChallengeResponse(challenge) |         return HttpChallengeResponse(challenge) | ||||||
|  |  | ||||||
|  | |||||||
| @ -15,7 +15,6 @@ | |||||||
|         {% endblock %} |         {% endblock %} | ||||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/sfe/bootstrap.min.css' %}"> |         <link rel="stylesheet" type="text/css" href="{% static 'dist/sfe/bootstrap.min.css' %}"> | ||||||
|         <meta name="sentry-trace" content="{{ sentry_trace }}" /> |         <meta name="sentry-trace" content="{{ sentry_trace }}" /> | ||||||
|         <link rel="prefetch" href="{{ flow_background_url }}" /> |  | ||||||
|         {% include "base/header_js.html" %} |         {% include "base/header_js.html" %} | ||||||
|         <style> |         <style> | ||||||
|           html, |           html, | ||||||
| @ -23,7 +22,7 @@ | |||||||
|             height: 100%; |             height: 100%; | ||||||
|           } |           } | ||||||
|           body { |           body { | ||||||
|             background-image: url("{{ flow_background_url }}"); |             background-image: url("{{ flow.background_url }}"); | ||||||
|             background-repeat: no-repeat; |             background-repeat: no-repeat; | ||||||
|             background-size: cover; |             background-size: cover; | ||||||
|           } |           } | ||||||
|  | |||||||
| @ -5,9 +5,9 @@ | |||||||
|  |  | ||||||
| {% block head_before %} | {% block head_before %} | ||||||
| {{ block.super }} | {{ block.super }} | ||||||
| <link rel="prefetch" href="{{ flow_background_url }}" /> | <link rel="prefetch" href="{{ flow.background_url }}" /> | ||||||
| {% if flow.compatibility_mode and not inspector %} | {% if flow.compatibility_mode and not inspector %} | ||||||
| <script>ShadyDOM = { force: true };</script> | <script>ShadyDOM = { force: !navigator.webdriver };</script> | ||||||
| {% endif %} | {% endif %} | ||||||
| {% include "base/header_js.html" %} | {% include "base/header_js.html" %} | ||||||
| <script> | <script> | ||||||
| @ -21,7 +21,7 @@ window.authentik.flow = { | |||||||
| <script src="{% versioned_script 'dist/flow/FlowInterface-%v.js' %}" type="module"></script> | <script src="{% versioned_script 'dist/flow/FlowInterface-%v.js' %}" type="module"></script> | ||||||
| <style> | <style> | ||||||
| :root { | :root { | ||||||
|     --ak-flow-background: url("{{ flow_background_url }}"); |     --ak-flow-background: url("{{ flow.background_url }}"); | ||||||
| } | } | ||||||
| </style> | </style> | ||||||
| {% endblock %} | {% endblock %} | ||||||
|  | |||||||
| @ -1,10 +1,7 @@ | |||||||
| """Test helpers""" | """Test helpers""" | ||||||
|  |  | ||||||
| from collections.abc import Callable, Generator |  | ||||||
| from contextlib import contextmanager |  | ||||||
| from json import loads | from json import loads | ||||||
| from typing import Any | from typing import Any | ||||||
| from unittest.mock import MagicMock, patch |  | ||||||
|  |  | ||||||
| from django.http.response import HttpResponse | from django.http.response import HttpResponse | ||||||
| from django.urls.base import reverse | from django.urls.base import reverse | ||||||
| @ -12,8 +9,6 @@ from rest_framework.test import APITestCase | |||||||
|  |  | ||||||
| from authentik.core.models import User | from authentik.core.models import User | ||||||
| from authentik.flows.models import Flow | from authentik.flows.models import Flow | ||||||
| from authentik.flows.planner import FlowPlan |  | ||||||
| from authentik.flows.views.executor import SESSION_KEY_PLAN |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class FlowTestCase(APITestCase): | class FlowTestCase(APITestCase): | ||||||
| @ -49,12 +44,3 @@ class FlowTestCase(APITestCase): | |||||||
|     def assertStageRedirects(self, response: HttpResponse, to: str) -> dict[str, Any]: |     def assertStageRedirects(self, response: HttpResponse, to: str) -> dict[str, Any]: | ||||||
|         """Wrapper around assertStageResponse that checks for a redirect""" |         """Wrapper around assertStageResponse that checks for a redirect""" | ||||||
|         return self.assertStageResponse(response, component="xak-flow-redirect", to=to) |         return self.assertStageResponse(response, component="xak-flow-redirect", to=to) | ||||||
|  |  | ||||||
|     @contextmanager |  | ||||||
|     def assertFlowFinishes(self) -> Generator[Callable[[], FlowPlan]]: |  | ||||||
|         """Capture the flow plan before the flow finishes and return it""" |  | ||||||
|         try: |  | ||||||
|             with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()): |  | ||||||
|                 yield lambda: self.client.session.get(SESSION_KEY_PLAN) |  | ||||||
|         finally: |  | ||||||
|             pass |  | ||||||
|  | |||||||
| @ -146,8 +146,7 @@ class FlowExecutorView(APIView): | |||||||
|         except (AttributeError, EOFError, ImportError, IndexError) as exc: |         except (AttributeError, EOFError, ImportError, IndexError) as exc: | ||||||
|             LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) |             LOGGER.warning("f(exec): Failed to restore token plan", exc=exc) | ||||||
|         finally: |         finally: | ||||||
|             if token.revoke_on_execution: |             token.delete() | ||||||
|                 token.delete() |  | ||||||
|         if not isinstance(plan, FlowPlan): |         if not isinstance(plan, FlowPlan): | ||||||
|             return None |             return None | ||||||
|         plan.context[PLAN_CONTEXT_IS_RESTORED] = token |         plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||||
|  | |||||||
| @ -13,9 +13,7 @@ class FlowInterfaceView(InterfaceView): | |||||||
|     """Flow interface""" |     """Flow interface""" | ||||||
|  |  | ||||||
|     def get_context_data(self, **kwargs: Any) -> dict[str, Any]: |     def get_context_data(self, **kwargs: Any) -> dict[str, Any]: | ||||||
|         flow = get_object_or_404(Flow, slug=self.kwargs.get("flow_slug")) |         kwargs["flow"] = get_object_or_404(Flow, slug=self.kwargs.get("flow_slug")) | ||||||
|         kwargs["flow"] = flow |  | ||||||
|         kwargs["flow_background_url"] = flow.background_url(self.request) |  | ||||||
|         kwargs["inspector"] = "inspector" in self.request.GET |         kwargs["inspector"] = "inspector" in self.request.GET | ||||||
|         return super().get_context_data(**kwargs) |         return super().get_context_data(**kwargs) | ||||||
|  |  | ||||||
|  | |||||||
| @ -363,9 +363,6 @@ def django_db_config(config: ConfigLoader | None = None) -> dict: | |||||||
|         pool_options = config.get_dict_from_b64_json("postgresql.pool_options", True) |         pool_options = config.get_dict_from_b64_json("postgresql.pool_options", True) | ||||||
|         if not pool_options: |         if not pool_options: | ||||||
|             pool_options = True |             pool_options = True | ||||||
|     # FIXME: Temporarily force pool to be deactivated. |  | ||||||
|     # See https://github.com/goauthentik/authentik/issues/14320 |  | ||||||
|     pool_options = False |  | ||||||
|  |  | ||||||
|     db = { |     db = { | ||||||
|         "default": { |         "default": { | ||||||
|  | |||||||
| @ -81,6 +81,7 @@ debugger: false | |||||||
|  |  | ||||||
| log_level: info | log_level: info | ||||||
|  |  | ||||||
|  | session_storage: cache | ||||||
| sessions: | sessions: | ||||||
|   unauthenticated_age: days=1 |   unauthenticated_age: days=1 | ||||||
|  |  | ||||||
|  | |||||||
| @ -17,7 +17,7 @@ from ldap3.core.exceptions import LDAPException | |||||||
| from redis.exceptions import ConnectionError as RedisConnectionError | from redis.exceptions import ConnectionError as RedisConnectionError | ||||||
| from redis.exceptions import RedisError, ResponseError | from redis.exceptions import RedisError, ResponseError | ||||||
| from rest_framework.exceptions import APIException | from rest_framework.exceptions import APIException | ||||||
| from sentry_sdk import HttpTransport, get_current_scope | from sentry_sdk import HttpTransport | ||||||
| from sentry_sdk import init as sentry_sdk_init | from sentry_sdk import init as sentry_sdk_init | ||||||
| from sentry_sdk.api import set_tag | from sentry_sdk.api import set_tag | ||||||
| from sentry_sdk.integrations.argv import ArgvIntegration | from sentry_sdk.integrations.argv import ArgvIntegration | ||||||
| @ -27,7 +27,6 @@ from sentry_sdk.integrations.redis import RedisIntegration | |||||||
| from sentry_sdk.integrations.socket import SocketIntegration | from sentry_sdk.integrations.socket import SocketIntegration | ||||||
| from sentry_sdk.integrations.stdlib import StdlibIntegration | from sentry_sdk.integrations.stdlib import StdlibIntegration | ||||||
| from sentry_sdk.integrations.threading import ThreadingIntegration | from sentry_sdk.integrations.threading import ThreadingIntegration | ||||||
| from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME |  | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| from websockets.exceptions import WebSocketException | from websockets.exceptions import WebSocketException | ||||||
|  |  | ||||||
| @ -96,8 +95,6 @@ def traces_sampler(sampling_context: dict) -> float: | |||||||
|         return 0 |         return 0 | ||||||
|     if _type == "websocket": |     if _type == "websocket": | ||||||
|         return 0 |         return 0 | ||||||
|     if CONFIG.get_bool("debug"): |  | ||||||
|         return 1 |  | ||||||
|     return float(CONFIG.get("error_reporting.sample_rate", 0.1)) |     return float(CONFIG.get("error_reporting.sample_rate", 0.1)) | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -170,14 +167,3 @@ def before_send(event: dict, hint: dict) -> dict | None: | |||||||
|     if settings.DEBUG: |     if settings.DEBUG: | ||||||
|         return None |         return None | ||||||
|     return event |     return event | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_http_meta(): |  | ||||||
|     """Get sentry-related meta key-values""" |  | ||||||
|     scope = get_current_scope() |  | ||||||
|     meta = { |  | ||||||
|         SENTRY_TRACE_HEADER_NAME: scope.get_traceparent() or "", |  | ||||||
|     } |  | ||||||
|     if bag := scope.get_baggage(): |  | ||||||
|         meta[BAGGAGE_HEADER_NAME] = bag.serialize() |  | ||||||
|     return meta |  | ||||||
|  | |||||||
| @ -59,7 +59,7 @@ class PropertyMappingManager: | |||||||
|         request: HttpRequest | None, |         request: HttpRequest | None, | ||||||
|         return_mapping: bool = False, |         return_mapping: bool = False, | ||||||
|         **kwargs, |         **kwargs, | ||||||
|     ) -> Generator[tuple[dict, PropertyMapping]]: |     ) -> Generator[tuple[dict, PropertyMapping], None]: | ||||||
|         """Iterate over all mappings that were pre-compiled and |         """Iterate over all mappings that were pre-compiled and | ||||||
|         execute all of them with the given context""" |         execute all of them with the given context""" | ||||||
|         if not self.__has_compiled: |         if not self.__has_compiled: | ||||||
|  | |||||||
| @ -23,6 +23,7 @@ if TYPE_CHECKING: | |||||||
|  |  | ||||||
|  |  | ||||||
| class Direction(StrEnum): | class Direction(StrEnum): | ||||||
|  |  | ||||||
|     add = "add" |     add = "add" | ||||||
|     remove = "remove" |     remove = "remove" | ||||||
|  |  | ||||||
| @ -36,16 +37,13 @@ SAFE_METHODS = [ | |||||||
|  |  | ||||||
|  |  | ||||||
| class BaseOutgoingSyncClient[ | class BaseOutgoingSyncClient[ | ||||||
|     TModel: "Model", |     TModel: "Model", TConnection: "Model", TSchema: dict, TProvider: "OutgoingSyncProvider" | ||||||
|     TConnection: "Model", |  | ||||||
|     TSchema: dict, |  | ||||||
|     TProvider: "OutgoingSyncProvider", |  | ||||||
| ]: | ]: | ||||||
|     """Basic Outgoing sync client Client""" |     """Basic Outgoing sync client Client""" | ||||||
|  |  | ||||||
|     provider: TProvider |     provider: TProvider | ||||||
|     connection_type: type[TConnection] |     connection_type: type[TConnection] | ||||||
|     connection_attr: str |     connection_type_query: str | ||||||
|     mapper: PropertyMappingManager |     mapper: PropertyMappingManager | ||||||
|  |  | ||||||
|     can_discover = False |     can_discover = False | ||||||
| @ -65,7 +63,9 @@ class BaseOutgoingSyncClient[ | |||||||
|     def write(self, obj: TModel) -> tuple[TConnection, bool]: |     def write(self, obj: TModel) -> tuple[TConnection, bool]: | ||||||
|         """Write object to destination. Uses self.create and self.update, but |         """Write object to destination. Uses self.create and self.update, but | ||||||
|         can be overwritten for further logic""" |         can be overwritten for further logic""" | ||||||
|         connection = getattr(obj, self.connection_attr).filter(provider=self.provider).first() |         connection = self.connection_type.objects.filter( | ||||||
|  |             provider=self.provider, **{self.connection_type_query: obj} | ||||||
|  |         ).first() | ||||||
|         try: |         try: | ||||||
|             if not connection: |             if not connection: | ||||||
|                 connection = self.create(obj) |                 connection = self.create(obj) | ||||||
|  | |||||||
| @ -1,7 +1,6 @@ | |||||||
| from collections.abc import Callable | from collections.abc import Callable | ||||||
| from dataclasses import asdict | from dataclasses import asdict | ||||||
|  |  | ||||||
| from celery import group |  | ||||||
| from celery.exceptions import Retry | from celery.exceptions import Retry | ||||||
| from celery.result import allow_join_result | from celery.result import allow_join_result | ||||||
| from django.core.paginator import Paginator | from django.core.paginator import Paginator | ||||||
| @ -83,41 +82,21 @@ class SyncTasks: | |||||||
|                 self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) |                 self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name) | ||||||
|                 return |                 return | ||||||
|             try: |             try: | ||||||
|                 messages.append(_("Syncing users")) |                 for page in users_paginator.page_range: | ||||||
|                 user_results = ( |                     messages.append(_("Syncing page {page} of users".format(page=page))) | ||||||
|                     group( |                     for msg in sync_objects.apply_async( | ||||||
|                         [ |                         args=(class_to_path(User), page, provider_pk), | ||||||
|                             sync_objects.signature( |                         time_limit=PAGE_TIMEOUT, | ||||||
|                                 args=(class_to_path(User), page, provider_pk), |                         soft_time_limit=PAGE_TIMEOUT, | ||||||
|                                 time_limit=PAGE_TIMEOUT, |                     ).get(): | ||||||
|                                 soft_time_limit=PAGE_TIMEOUT, |  | ||||||
|                             ) |  | ||||||
|                             for page in users_paginator.page_range |  | ||||||
|                         ] |  | ||||||
|                     ) |  | ||||||
|                     .apply_async() |  | ||||||
|                     .get() |  | ||||||
|                 ) |  | ||||||
|                 for result in user_results: |  | ||||||
|                     for msg in result: |  | ||||||
|                         messages.append(LogEvent(**msg)) |                         messages.append(LogEvent(**msg)) | ||||||
|                 messages.append(_("Syncing groups")) |                 for page in groups_paginator.page_range: | ||||||
|                 group_results = ( |                     messages.append(_("Syncing page {page} of groups".format(page=page))) | ||||||
|                     group( |                     for msg in sync_objects.apply_async( | ||||||
|                         [ |                         args=(class_to_path(Group), page, provider_pk), | ||||||
|                             sync_objects.signature( |                         time_limit=PAGE_TIMEOUT, | ||||||
|                                 args=(class_to_path(Group), page, provider_pk), |                         soft_time_limit=PAGE_TIMEOUT, | ||||||
|                                 time_limit=PAGE_TIMEOUT, |                     ).get(): | ||||||
|                                 soft_time_limit=PAGE_TIMEOUT, |  | ||||||
|                             ) |  | ||||||
|                             for page in groups_paginator.page_range |  | ||||||
|                         ] |  | ||||||
|                     ) |  | ||||||
|                     .apply_async() |  | ||||||
|                     .get() |  | ||||||
|                 ) |  | ||||||
|                 for result in group_results: |  | ||||||
|                     for msg in result: |  | ||||||
|                         messages.append(LogEvent(**msg)) |                         messages.append(LogEvent(**msg)) | ||||||
|             except TransientSyncException as exc: |             except TransientSyncException as exc: | ||||||
|                 self.logger.warning("transient sync exception", exc=exc) |                 self.logger.warning("transient sync exception", exc=exc) | ||||||
| @ -130,7 +109,7 @@ class SyncTasks: | |||||||
|     def sync_objects( |     def sync_objects( | ||||||
|         self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter |         self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter | ||||||
|     ): |     ): | ||||||
|         _object_type: type[Model] = path_to_class(object_type) |         _object_type = path_to_class(object_type) | ||||||
|         self.logger = get_logger().bind( |         self.logger = get_logger().bind( | ||||||
|             provider_type=class_to_path(self._provider_model), |             provider_type=class_to_path(self._provider_model), | ||||||
|             provider_pk=provider_pk, |             provider_pk=provider_pk, | ||||||
| @ -153,19 +132,6 @@ class SyncTasks: | |||||||
|             self.logger.debug("starting discover") |             self.logger.debug("starting discover") | ||||||
|             client.discover() |             client.discover() | ||||||
|         self.logger.debug("starting sync for page", page=page) |         self.logger.debug("starting sync for page", page=page) | ||||||
|         messages.append( |  | ||||||
|             asdict( |  | ||||||
|                 LogEvent( |  | ||||||
|                     _( |  | ||||||
|                         "Syncing page {page} of {object_type}".format( |  | ||||||
|                             page=page, object_type=_object_type._meta.verbose_name_plural |  | ||||||
|                         ) |  | ||||||
|                     ), |  | ||||||
|                     log_level="info", |  | ||||||
|                     logger=f"{provider._meta.verbose_name}@{object_type}", |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         for obj in paginator.page(page).object_list: |         for obj in paginator.page(page).object_list: | ||||||
|             obj: Model |             obj: Model | ||||||
|             try: |             try: | ||||||
|  | |||||||
| @ -494,88 +494,86 @@ class TestConfig(TestCase): | |||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     # FIXME: Temporarily force pool to be deactivated. |     def test_db_pool(self): | ||||||
|     # See https://github.com/goauthentik/authentik/issues/14320 |         """Test DB Config with pool""" | ||||||
|     # def test_db_pool(self): |         config = ConfigLoader() | ||||||
|     #     """Test DB Config with pool""" |         config.set("postgresql.host", "foo") | ||||||
|     #     config = ConfigLoader() |         config.set("postgresql.name", "foo") | ||||||
|     #     config.set("postgresql.host", "foo") |         config.set("postgresql.user", "foo") | ||||||
|     #     config.set("postgresql.name", "foo") |         config.set("postgresql.password", "foo") | ||||||
|     #     config.set("postgresql.user", "foo") |         config.set("postgresql.port", "foo") | ||||||
|     #     config.set("postgresql.password", "foo") |         config.set("postgresql.test.name", "foo") | ||||||
|     #     config.set("postgresql.port", "foo") |         config.set("postgresql.use_pool", True) | ||||||
|     #     config.set("postgresql.test.name", "foo") |         conf = django_db_config(config) | ||||||
|     #     config.set("postgresql.use_pool", True) |         self.assertEqual( | ||||||
|     #     conf = django_db_config(config) |             conf, | ||||||
|     #     self.assertEqual( |             { | ||||||
|     #         conf, |                 "default": { | ||||||
|     #         { |                     "ENGINE": "authentik.root.db", | ||||||
|     #             "default": { |                     "HOST": "foo", | ||||||
|     #                 "ENGINE": "authentik.root.db", |                     "NAME": "foo", | ||||||
|     #                 "HOST": "foo", |                     "OPTIONS": { | ||||||
|     #                 "NAME": "foo", |                         "pool": True, | ||||||
|     #                 "OPTIONS": { |                         "sslcert": None, | ||||||
|     #                     "pool": True, |                         "sslkey": None, | ||||||
|     #                     "sslcert": None, |                         "sslmode": None, | ||||||
|     #                     "sslkey": None, |                         "sslrootcert": None, | ||||||
|     #                     "sslmode": None, |                     }, | ||||||
|     #                     "sslrootcert": None, |                     "PASSWORD": "foo", | ||||||
|     #                 }, |                     "PORT": "foo", | ||||||
|     #                 "PASSWORD": "foo", |                     "TEST": {"NAME": "foo"}, | ||||||
|     #                 "PORT": "foo", |                     "USER": "foo", | ||||||
|     #                 "TEST": {"NAME": "foo"}, |                     "CONN_MAX_AGE": 0, | ||||||
|     #                 "USER": "foo", |                     "CONN_HEALTH_CHECKS": False, | ||||||
|     #                 "CONN_MAX_AGE": 0, |                     "DISABLE_SERVER_SIDE_CURSORS": False, | ||||||
|     #                 "CONN_HEALTH_CHECKS": False, |                 } | ||||||
|     #                 "DISABLE_SERVER_SIDE_CURSORS": False, |             }, | ||||||
|     #             } |         ) | ||||||
|     #         }, |  | ||||||
|     #     ) |  | ||||||
|  |  | ||||||
|     # def test_db_pool_options(self): |     def test_db_pool_options(self): | ||||||
|     #     """Test DB Config with pool""" |         """Test DB Config with pool""" | ||||||
|     #     config = ConfigLoader() |         config = ConfigLoader() | ||||||
|     #     config.set("postgresql.host", "foo") |         config.set("postgresql.host", "foo") | ||||||
|     #     config.set("postgresql.name", "foo") |         config.set("postgresql.name", "foo") | ||||||
|     #     config.set("postgresql.user", "foo") |         config.set("postgresql.user", "foo") | ||||||
|     #     config.set("postgresql.password", "foo") |         config.set("postgresql.password", "foo") | ||||||
|     #     config.set("postgresql.port", "foo") |         config.set("postgresql.port", "foo") | ||||||
|     #     config.set("postgresql.test.name", "foo") |         config.set("postgresql.test.name", "foo") | ||||||
|     #     config.set("postgresql.use_pool", True) |         config.set("postgresql.use_pool", True) | ||||||
|     #     config.set( |         config.set( | ||||||
|     #         "postgresql.pool_options", |             "postgresql.pool_options", | ||||||
|     #         base64.b64encode( |             base64.b64encode( | ||||||
|     #             dumps( |                 dumps( | ||||||
|     #                 { |                     { | ||||||
|     #                     "max_size": 15, |                         "max_size": 15, | ||||||
|     #                 } |                     } | ||||||
|     #             ).encode() |                 ).encode() | ||||||
|     #         ).decode(), |             ).decode(), | ||||||
|     #     ) |         ) | ||||||
|     #     conf = django_db_config(config) |         conf = django_db_config(config) | ||||||
|     #     self.assertEqual( |         self.assertEqual( | ||||||
|     #         conf, |             conf, | ||||||
|     #         { |             { | ||||||
|     #             "default": { |                 "default": { | ||||||
|     #                 "ENGINE": "authentik.root.db", |                     "ENGINE": "authentik.root.db", | ||||||
|     #                 "HOST": "foo", |                     "HOST": "foo", | ||||||
|     #                 "NAME": "foo", |                     "NAME": "foo", | ||||||
|     #                 "OPTIONS": { |                     "OPTIONS": { | ||||||
|     #                     "pool": { |                         "pool": { | ||||||
|     #                         "max_size": 15, |                             "max_size": 15, | ||||||
|     #                     }, |                         }, | ||||||
|     #                     "sslcert": None, |                         "sslcert": None, | ||||||
|     #                     "sslkey": None, |                         "sslkey": None, | ||||||
|     #                     "sslmode": None, |                         "sslmode": None, | ||||||
|     #                     "sslrootcert": None, |                         "sslrootcert": None, | ||||||
|     #                 }, |                     }, | ||||||
|     #                 "PASSWORD": "foo", |                     "PASSWORD": "foo", | ||||||
|     #                 "PORT": "foo", |                     "PORT": "foo", | ||||||
|     #                 "TEST": {"NAME": "foo"}, |                     "TEST": {"NAME": "foo"}, | ||||||
|     #                 "USER": "foo", |                     "USER": "foo", | ||||||
|     #                 "CONN_MAX_AGE": 0, |                     "CONN_MAX_AGE": 0, | ||||||
|     #                 "CONN_HEALTH_CHECKS": False, |                     "CONN_HEALTH_CHECKS": False, | ||||||
|     #                 "DISABLE_SERVER_SIDE_CURSORS": False, |                     "DISABLE_SERVER_SIDE_CURSORS": False, | ||||||
|     #             } |                 } | ||||||
|     #         }, |             }, | ||||||
|     #     ) |         ) | ||||||
|  | |||||||
| @ -74,8 +74,6 @@ class OutpostConfig: | |||||||
|     kubernetes_ingress_annotations: dict[str, str] = field(default_factory=dict) |     kubernetes_ingress_annotations: dict[str, str] = field(default_factory=dict) | ||||||
|     kubernetes_ingress_secret_name: str = field(default="authentik-outpost-tls") |     kubernetes_ingress_secret_name: str = field(default="authentik-outpost-tls") | ||||||
|     kubernetes_ingress_class_name: str | None = field(default=None) |     kubernetes_ingress_class_name: str | None = field(default=None) | ||||||
|     kubernetes_httproute_annotations: dict[str, str] = field(default_factory=dict) |  | ||||||
|     kubernetes_httproute_parent_refs: list[dict[str, str]] = field(default_factory=list) |  | ||||||
|     kubernetes_service_type: str = field(default="ClusterIP") |     kubernetes_service_type: str = field(default="ClusterIP") | ||||||
|     kubernetes_disabled_components: list[str] = field(default_factory=list) |     kubernetes_disabled_components: list[str] = field(default_factory=list) | ||||||
|     kubernetes_image_pull_secrets: list[str] = field(default_factory=list) |     kubernetes_image_pull_secrets: list[str] = field(default_factory=list) | ||||||
|  | |||||||
| @ -1,11 +1,9 @@ | |||||||
| """Websocket tests""" | """Websocket tests""" | ||||||
|  |  | ||||||
| from dataclasses import asdict | from dataclasses import asdict | ||||||
| from unittest.mock import patch |  | ||||||
|  |  | ||||||
| from channels.routing import URLRouter | from channels.routing import URLRouter | ||||||
| from channels.testing import WebsocketCommunicator | from channels.testing import WebsocketCommunicator | ||||||
| from django.contrib.contenttypes.models import ContentType |  | ||||||
| from django.test import TransactionTestCase | from django.test import TransactionTestCase | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import __version__ | ||||||
| @ -16,12 +14,6 @@ from authentik.providers.proxy.models import ProxyProvider | |||||||
| from authentik.root import websocket | from authentik.root import websocket | ||||||
|  |  | ||||||
|  |  | ||||||
| def patched__get_ct_cached(app_label, codename): |  | ||||||
|     """Caches `ContentType` instances like its `QuerySet` does.""" |  | ||||||
|     return ContentType.objects.get(app_label=app_label, permission__codename=codename) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @patch("guardian.shortcuts._get_ct_cached", patched__get_ct_cached) |  | ||||||
| class TestOutpostWS(TransactionTestCase): | class TestOutpostWS(TransactionTestCase): | ||||||
|     """Websocket tests""" |     """Websocket tests""" | ||||||
|  |  | ||||||
| @ -46,7 +38,6 @@ class TestOutpostWS(TransactionTestCase): | |||||||
|         ) |         ) | ||||||
|         connected, _ = await communicator.connect() |         connected, _ = await communicator.connect() | ||||||
|         self.assertFalse(connected) |         self.assertFalse(connected) | ||||||
|         await communicator.disconnect() |  | ||||||
|  |  | ||||||
|     async def test_auth_valid(self): |     async def test_auth_valid(self): | ||||||
|         """Test auth with token""" |         """Test auth with token""" | ||||||
| @ -57,7 +48,6 @@ class TestOutpostWS(TransactionTestCase): | |||||||
|         ) |         ) | ||||||
|         connected, _ = await communicator.connect() |         connected, _ = await communicator.connect() | ||||||
|         self.assertTrue(connected) |         self.assertTrue(connected) | ||||||
|         await communicator.disconnect() |  | ||||||
|  |  | ||||||
|     async def test_send(self): |     async def test_send(self): | ||||||
|         """Test sending of Hello""" |         """Test sending of Hello""" | ||||||
|  | |||||||
| @ -7,8 +7,10 @@ from django.db import migrations | |||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||||
|  |     from authentik.core.models import User | ||||||
|     from django.apps import apps as real_apps |     from django.apps import apps as real_apps | ||||||
|     from django.contrib.auth.management import create_permissions |     from django.contrib.auth.management import create_permissions | ||||||
|  |     from guardian.shortcuts import UserObjectPermission | ||||||
|  |  | ||||||
|     db_alias = schema_editor.connection.alias |     db_alias = schema_editor.connection.alias | ||||||
|  |  | ||||||
|  | |||||||
| @ -50,4 +50,3 @@ AMR_PASSWORD = "pwd"  # nosec | |||||||
| AMR_MFA = "mfa" | AMR_MFA = "mfa" | ||||||
| AMR_OTP = "otp" | AMR_OTP = "otp" | ||||||
| AMR_WEBAUTHN = "user" | AMR_WEBAUTHN = "user" | ||||||
| AMR_SMART_CARD = "sc" |  | ||||||
|  | |||||||
| @ -16,7 +16,6 @@ from authentik.providers.oauth2.constants import ( | |||||||
|     ACR_AUTHENTIK_DEFAULT, |     ACR_AUTHENTIK_DEFAULT, | ||||||
|     AMR_MFA, |     AMR_MFA, | ||||||
|     AMR_PASSWORD, |     AMR_PASSWORD, | ||||||
|     AMR_SMART_CARD, |  | ||||||
|     AMR_WEBAUTHN, |     AMR_WEBAUTHN, | ||||||
| ) | ) | ||||||
| from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS | from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS | ||||||
| @ -140,10 +139,9 @@ class IDToken: | |||||||
|                 amr.append(AMR_PASSWORD) |                 amr.append(AMR_PASSWORD) | ||||||
|             if method == "auth_webauthn_pwl": |             if method == "auth_webauthn_pwl": | ||||||
|                 amr.append(AMR_WEBAUTHN) |                 amr.append(AMR_WEBAUTHN) | ||||||
|             if "certificate" in method_args: |  | ||||||
|                 amr.append(AMR_SMART_CARD) |  | ||||||
|             if "mfa_devices" in method_args: |             if "mfa_devices" in method_args: | ||||||
|                 amr.append(AMR_MFA) |                 if len(amr) > 0: | ||||||
|  |                     amr.append(AMR_MFA) | ||||||
|             if amr: |             if amr: | ||||||
|                 id_token.amr = amr |                 id_token.amr = amr | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,234 +0,0 @@ | |||||||
| from dataclasses import asdict, dataclass, field |  | ||||||
| from typing import TYPE_CHECKING |  | ||||||
| from urllib.parse import urlparse |  | ||||||
|  |  | ||||||
| from dacite.core import from_dict |  | ||||||
| from kubernetes.client import ApiextensionsV1Api, CustomObjectsApi, V1ObjectMeta |  | ||||||
|  |  | ||||||
| from authentik.outposts.controllers.base import FIELD_MANAGER |  | ||||||
| from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler |  | ||||||
| from authentik.outposts.controllers.k8s.triggers import NeedsUpdate |  | ||||||
| from authentik.outposts.controllers.kubernetes import KubernetesController |  | ||||||
| from authentik.providers.proxy.models import ProxyMode, ProxyProvider |  | ||||||
|  |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from authentik.outposts.controllers.kubernetes import KubernetesController |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) |  | ||||||
| class RouteBackendRef: |  | ||||||
|     name: str |  | ||||||
|     port: int |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) |  | ||||||
| class RouteSpecParentRefs: |  | ||||||
|     name: str |  | ||||||
|     sectionName: str | None = None |  | ||||||
|     port: int | None = None |  | ||||||
|     namespace: str | None = None |  | ||||||
|     kind: str = "Gateway" |  | ||||||
|     group: str = "gateway.networking.k8s.io" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) |  | ||||||
| class HTTPRouteSpecRuleMatchPath: |  | ||||||
|     type: str |  | ||||||
|     value: str |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) |  | ||||||
| class HTTPRouteSpecRuleMatchHeader: |  | ||||||
|     name: str |  | ||||||
|     value: str |  | ||||||
|     type: str = "Exact" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) |  | ||||||
| class HTTPRouteSpecRuleMatch: |  | ||||||
|     path: HTTPRouteSpecRuleMatchPath |  | ||||||
|     headers: list[HTTPRouteSpecRuleMatchHeader] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) |  | ||||||
| class HTTPRouteSpecRule: |  | ||||||
|     backendRefs: list[RouteBackendRef] |  | ||||||
|     matches: list[HTTPRouteSpecRuleMatch] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) |  | ||||||
| class HTTPRouteSpec: |  | ||||||
|     parentRefs: list[RouteSpecParentRefs] |  | ||||||
|     hostnames: list[str] |  | ||||||
|     rules: list[HTTPRouteSpecRule] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) |  | ||||||
| class HTTPRouteMetadata: |  | ||||||
|     name: str |  | ||||||
|     namespace: str |  | ||||||
|     annotations: dict = field(default_factory=dict) |  | ||||||
|     labels: dict = field(default_factory=dict) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) |  | ||||||
| class HTTPRoute: |  | ||||||
|     apiVersion: str |  | ||||||
|     kind: str |  | ||||||
|     metadata: HTTPRouteMetadata |  | ||||||
|     spec: HTTPRouteSpec |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class HTTPRouteReconciler(KubernetesObjectReconciler): |  | ||||||
|     """Kubernetes Gateway API HTTPRoute Reconciler""" |  | ||||||
|  |  | ||||||
|     def __init__(self, controller: "KubernetesController") -> None: |  | ||||||
|         super().__init__(controller) |  | ||||||
|         self.api_ex = ApiextensionsV1Api(controller.client) |  | ||||||
|         self.api = CustomObjectsApi(controller.client) |  | ||||||
|         self.crd_group = "gateway.networking.k8s.io" |  | ||||||
|         self.crd_version = "v1" |  | ||||||
|         self.crd_plural = "httproutes" |  | ||||||
|  |  | ||||||
|     @staticmethod |  | ||||||
|     def reconciler_name() -> str: |  | ||||||
|         return "httproute" |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def noop(self) -> bool: |  | ||||||
|         if not self.crd_exists(): |  | ||||||
|             self.logger.debug("CRD doesn't exist") |  | ||||||
|             return True |  | ||||||
|         if not self.controller.outpost.config.kubernetes_httproute_parent_refs: |  | ||||||
|             self.logger.debug("HTTPRoute parentRefs not set.") |  | ||||||
|             return True |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     def crd_exists(self) -> bool: |  | ||||||
|         """Check if the Gateway API resources exists""" |  | ||||||
|         return bool( |  | ||||||
|             len( |  | ||||||
|                 self.api_ex.list_custom_resource_definition( |  | ||||||
|                     field_selector=f"metadata.name={self.crd_plural}.{self.crd_group}" |  | ||||||
|                 ).items |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def reconcile(self, current: HTTPRoute, reference: HTTPRoute): |  | ||||||
|         super().reconcile(current, reference) |  | ||||||
|         if current.metadata.annotations != reference.metadata.annotations: |  | ||||||
|             raise NeedsUpdate() |  | ||||||
|         if current.spec.parentRefs != reference.spec.parentRefs: |  | ||||||
|             raise NeedsUpdate() |  | ||||||
|         if current.spec.hostnames != reference.spec.hostnames: |  | ||||||
|             raise NeedsUpdate() |  | ||||||
|         if current.spec.rules != reference.spec.rules: |  | ||||||
|             raise NeedsUpdate() |  | ||||||
|  |  | ||||||
|     def get_object_meta(self, **kwargs) -> V1ObjectMeta: |  | ||||||
|         return super().get_object_meta( |  | ||||||
|             **kwargs, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def get_reference_object(self) -> HTTPRoute: |  | ||||||
|         hostnames = [] |  | ||||||
|         rules = [] |  | ||||||
|  |  | ||||||
|         for proxy_provider in ProxyProvider.objects.filter(outpost__in=[self.controller.outpost]): |  | ||||||
|             proxy_provider: ProxyProvider |  | ||||||
|             external_host_name = urlparse(proxy_provider.external_host) |  | ||||||
|             if proxy_provider.mode in [ProxyMode.FORWARD_SINGLE, ProxyMode.FORWARD_DOMAIN]: |  | ||||||
|                 rule = HTTPRouteSpecRule( |  | ||||||
|                     backendRefs=[RouteBackendRef(name=self.name, port=9000)], |  | ||||||
|                     matches=[ |  | ||||||
|                         HTTPRouteSpecRuleMatch( |  | ||||||
|                             headers=[ |  | ||||||
|                                 HTTPRouteSpecRuleMatchHeader( |  | ||||||
|                                     name="Host", |  | ||||||
|                                     value=external_host_name.hostname, |  | ||||||
|                                 ) |  | ||||||
|                             ], |  | ||||||
|                             path=HTTPRouteSpecRuleMatchPath( |  | ||||||
|                                 type="PathPrefix", value="/outpost.goauthentik.io" |  | ||||||
|                             ), |  | ||||||
|                         ) |  | ||||||
|                     ], |  | ||||||
|                 ) |  | ||||||
|             else: |  | ||||||
|                 rule = HTTPRouteSpecRule( |  | ||||||
|                     backendRefs=[RouteBackendRef(name=self.name, port=9000)], |  | ||||||
|                     matches=[ |  | ||||||
|                         HTTPRouteSpecRuleMatch( |  | ||||||
|                             headers=[ |  | ||||||
|                                 HTTPRouteSpecRuleMatchHeader( |  | ||||||
|                                     name="Host", |  | ||||||
|                                     value=external_host_name.hostname, |  | ||||||
|                                 ) |  | ||||||
|                             ], |  | ||||||
|                             path=HTTPRouteSpecRuleMatchPath(type="PathPrefix", value="/"), |  | ||||||
|                         ) |  | ||||||
|                     ], |  | ||||||
|                 ) |  | ||||||
|             hostnames.append(external_host_name.hostname) |  | ||||||
|             rules.append(rule) |  | ||||||
|  |  | ||||||
|         return HTTPRoute( |  | ||||||
|             apiVersion=f"{self.crd_group}/{self.crd_version}", |  | ||||||
|             kind="HTTPRoute", |  | ||||||
|             metadata=HTTPRouteMetadata( |  | ||||||
|                 name=self.name, |  | ||||||
|                 namespace=self.namespace, |  | ||||||
|                 annotations=self.controller.outpost.config.kubernetes_httproute_annotations, |  | ||||||
|                 labels=self.get_object_meta().labels, |  | ||||||
|             ), |  | ||||||
|             spec=HTTPRouteSpec( |  | ||||||
|                 parentRefs=[ |  | ||||||
|                     from_dict(RouteSpecParentRefs, spec) |  | ||||||
|                     for spec in self.controller.outpost.config.kubernetes_httproute_parent_refs |  | ||||||
|                 ], |  | ||||||
|                 hostnames=hostnames, |  | ||||||
|                 rules=rules, |  | ||||||
|             ), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def create(self, reference: HTTPRoute): |  | ||||||
|         return self.api.create_namespaced_custom_object( |  | ||||||
|             group=self.crd_group, |  | ||||||
|             version=self.crd_version, |  | ||||||
|             plural=self.crd_plural, |  | ||||||
|             namespace=self.namespace, |  | ||||||
|             body=asdict(reference), |  | ||||||
|             field_manager=FIELD_MANAGER, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def delete(self, reference: HTTPRoute): |  | ||||||
|         return self.api.delete_namespaced_custom_object( |  | ||||||
|             group=self.crd_group, |  | ||||||
|             version=self.crd_version, |  | ||||||
|             plural=self.crd_plural, |  | ||||||
|             namespace=self.namespace, |  | ||||||
|             name=self.name, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def retrieve(self) -> HTTPRoute: |  | ||||||
|         return from_dict( |  | ||||||
|             HTTPRoute, |  | ||||||
|             self.api.get_namespaced_custom_object( |  | ||||||
|                 group=self.crd_group, |  | ||||||
|                 version=self.crd_version, |  | ||||||
|                 plural=self.crd_plural, |  | ||||||
|                 namespace=self.namespace, |  | ||||||
|                 name=self.name, |  | ||||||
|             ), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def update(self, current: HTTPRoute, reference: HTTPRoute): |  | ||||||
|         return self.api.patch_namespaced_custom_object( |  | ||||||
|             group=self.crd_group, |  | ||||||
|             version=self.crd_version, |  | ||||||
|             plural=self.crd_plural, |  | ||||||
|             namespace=self.namespace, |  | ||||||
|             name=self.name, |  | ||||||
|             body=asdict(reference), |  | ||||||
|             field_manager=FIELD_MANAGER, |  | ||||||
|         ) |  | ||||||
| @ -47,8 +47,6 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]): | |||||||
|     def reconcile(self, current: V1Ingress, reference: V1Ingress): |     def reconcile(self, current: V1Ingress, reference: V1Ingress): | ||||||
|         super().reconcile(current, reference) |         super().reconcile(current, reference) | ||||||
|         self._check_annotations(current, reference) |         self._check_annotations(current, reference) | ||||||
|         if current.spec.ingress_class_name != reference.spec.ingress_class_name: |  | ||||||
|             raise NeedsUpdate() |  | ||||||
|         # Create a list of all expected host and tls hosts |         # Create a list of all expected host and tls hosts | ||||||
|         expected_hosts = [] |         expected_hosts = [] | ||||||
|         expected_hosts_tls = [] |         expected_hosts_tls = [] | ||||||
|  | |||||||
| @ -3,7 +3,6 @@ | |||||||
| from authentik.outposts.controllers.base import DeploymentPort | from authentik.outposts.controllers.base import DeploymentPort | ||||||
| from authentik.outposts.controllers.kubernetes import KubernetesController | from authentik.outposts.controllers.kubernetes import KubernetesController | ||||||
| from authentik.outposts.models import KubernetesServiceConnection, Outpost | from authentik.outposts.models import KubernetesServiceConnection, Outpost | ||||||
| from authentik.providers.proxy.controllers.k8s.httproute import HTTPRouteReconciler |  | ||||||
| from authentik.providers.proxy.controllers.k8s.ingress import IngressReconciler | from authentik.providers.proxy.controllers.k8s.ingress import IngressReconciler | ||||||
| from authentik.providers.proxy.controllers.k8s.traefik import TraefikMiddlewareReconciler | from authentik.providers.proxy.controllers.k8s.traefik import TraefikMiddlewareReconciler | ||||||
|  |  | ||||||
| @ -19,10 +18,8 @@ class ProxyKubernetesController(KubernetesController): | |||||||
|             DeploymentPort(9443, "https", "tcp"), |             DeploymentPort(9443, "https", "tcp"), | ||||||
|         ] |         ] | ||||||
|         self.reconcilers[IngressReconciler.reconciler_name()] = IngressReconciler |         self.reconcilers[IngressReconciler.reconciler_name()] = IngressReconciler | ||||||
|         self.reconcilers[HTTPRouteReconciler.reconciler_name()] = HTTPRouteReconciler |  | ||||||
|         self.reconcilers[TraefikMiddlewareReconciler.reconciler_name()] = ( |         self.reconcilers[TraefikMiddlewareReconciler.reconciler_name()] = ( | ||||||
|             TraefikMiddlewareReconciler |             TraefikMiddlewareReconciler | ||||||
|         ) |         ) | ||||||
|         self.reconcile_order.append(IngressReconciler.reconciler_name()) |         self.reconcile_order.append(IngressReconciler.reconciler_name()) | ||||||
|         self.reconcile_order.append(HTTPRouteReconciler.reconciler_name()) |  | ||||||
|         self.reconcile_order.append(TraefikMiddlewareReconciler.reconciler_name()) |         self.reconcile_order.append(TraefikMiddlewareReconciler.reconciler_name()) | ||||||
|  | |||||||
| @ -166,6 +166,7 @@ class ConnectionToken(ExpiringModel): | |||||||
|         always_merger.merge(settings, default_settings) |         always_merger.merge(settings, default_settings) | ||||||
|         always_merger.merge(settings, self.endpoint.provider.settings) |         always_merger.merge(settings, self.endpoint.provider.settings) | ||||||
|         always_merger.merge(settings, self.endpoint.settings) |         always_merger.merge(settings, self.endpoint.settings) | ||||||
|  |         always_merger.merge(settings, self.settings) | ||||||
|  |  | ||||||
|         def mapping_evaluator(mappings: QuerySet): |         def mapping_evaluator(mappings: QuerySet): | ||||||
|             for mapping in mappings: |             for mapping in mappings: | ||||||
| @ -190,7 +191,6 @@ class ConnectionToken(ExpiringModel): | |||||||
|         mapping_evaluator( |         mapping_evaluator( | ||||||
|             RACPropertyMapping.objects.filter(endpoint__in=[self.endpoint]).order_by("name") |             RACPropertyMapping.objects.filter(endpoint__in=[self.endpoint]).order_by("name") | ||||||
|         ) |         ) | ||||||
|         always_merger.merge(settings, self.settings) |  | ||||||
|  |  | ||||||
|         settings["drive-path"] = f"/tmp/connection/{self.token}"  # nosec |         settings["drive-path"] = f"/tmp/connection/{self.token}"  # nosec | ||||||
|         settings["create-drive-path"] = "true" |         settings["create-drive-path"] = "true" | ||||||
|  | |||||||
| @ -90,6 +90,23 @@ class TestModels(TransactionTestCase): | |||||||
|                 "resize-method": "display-update", |                 "resize-method": "display-update", | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |         # Set settings in token | ||||||
|  |         token.settings = { | ||||||
|  |             "level": "token", | ||||||
|  |         } | ||||||
|  |         token.save() | ||||||
|  |         self.assertEqual( | ||||||
|  |             token.get_settings(), | ||||||
|  |             { | ||||||
|  |                 "hostname": self.endpoint.host.split(":")[0], | ||||||
|  |                 "port": "1324", | ||||||
|  |                 "client-name": f"authentik - {self.user}", | ||||||
|  |                 "drive-path": path, | ||||||
|  |                 "create-drive-path": "true", | ||||||
|  |                 "level": "token", | ||||||
|  |                 "resize-method": "display-update", | ||||||
|  |             }, | ||||||
|  |         ) | ||||||
|         # Set settings in property mapping (provider) |         # Set settings in property mapping (provider) | ||||||
|         mapping = RACPropertyMapping.objects.create( |         mapping = RACPropertyMapping.objects.create( | ||||||
|             name=generate_id(), |             name=generate_id(), | ||||||
| @ -134,22 +151,3 @@ class TestModels(TransactionTestCase): | |||||||
|                 "resize-method": "display-update", |                 "resize-method": "display-update", | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         # Set settings in token |  | ||||||
|         token.settings = { |  | ||||||
|             "level": "token", |  | ||||||
|         } |  | ||||||
|         token.save() |  | ||||||
|         self.assertEqual( |  | ||||||
|             token.get_settings(), |  | ||||||
|             { |  | ||||||
|                 "hostname": self.endpoint.host.split(":")[0], |  | ||||||
|                 "port": "1324", |  | ||||||
|                 "client-name": f"authentik - {self.user}", |  | ||||||
|                 "drive-path": path, |  | ||||||
|                 "create-drive-path": "true", |  | ||||||
|                 "foo": "true", |  | ||||||
|                 "bar": "6", |  | ||||||
|                 "resize-method": "display-update", |  | ||||||
|                 "level": "token", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  | |||||||
| @ -34,7 +34,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]): | |||||||
|     """SCIM client for groups""" |     """SCIM client for groups""" | ||||||
|  |  | ||||||
|     connection_type = SCIMProviderGroup |     connection_type = SCIMProviderGroup | ||||||
|     connection_attr = "scimprovidergroup_set" |     connection_type_query = "group" | ||||||
|     mapper: PropertyMappingManager |     mapper: PropertyMappingManager | ||||||
|  |  | ||||||
|     def __init__(self, provider: SCIMProvider): |     def __init__(self, provider: SCIMProvider): | ||||||
| @ -47,16 +47,15 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]): | |||||||
|  |  | ||||||
|     def to_schema(self, obj: Group, connection: SCIMProviderGroup) -> SCIMGroupSchema: |     def to_schema(self, obj: Group, connection: SCIMProviderGroup) -> SCIMGroupSchema: | ||||||
|         """Convert authentik user into SCIM""" |         """Convert authentik user into SCIM""" | ||||||
|         raw_scim_group = super().to_schema(obj, connection) |         raw_scim_group = super().to_schema( | ||||||
|  |             obj, | ||||||
|  |             connection, | ||||||
|  |             schemas=(SCIM_GROUP_SCHEMA,), | ||||||
|  |         ) | ||||||
|         try: |         try: | ||||||
|             scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group)) |             scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group)) | ||||||
|         except ValidationError as exc: |         except ValidationError as exc: | ||||||
|             raise StopSync(exc, obj) from exc |             raise StopSync(exc, obj) from exc | ||||||
|         if SCIM_GROUP_SCHEMA not in scim_group.schemas: |  | ||||||
|             scim_group.schemas.insert(0, SCIM_GROUP_SCHEMA) |  | ||||||
|         # As this might be unset, we need to tell pydantic it's set so ensure the schemas |  | ||||||
|         # are included, even if its just the defaults |  | ||||||
|         scim_group.schemas = list(scim_group.schemas) |  | ||||||
|         if not scim_group.externalId: |         if not scim_group.externalId: | ||||||
|             scim_group.externalId = str(obj.pk) |             scim_group.externalId = str(obj.pk) | ||||||
|  |  | ||||||
| @ -200,7 +199,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]): | |||||||
|             chunk_size = len(ops) |             chunk_size = len(ops) | ||||||
|         if len(ops) < 1: |         if len(ops) < 1: | ||||||
|             return |             return | ||||||
|         for chunk in batched(ops, chunk_size, strict=False): |         for chunk in batched(ops, chunk_size): | ||||||
|             req = PatchRequest(Operations=list(chunk)) |             req = PatchRequest(Operations=list(chunk)) | ||||||
|             self._request( |             self._request( | ||||||
|                 "PATCH", |                 "PATCH", | ||||||
|  | |||||||
| @ -18,7 +18,7 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]): | |||||||
|     """SCIM client for users""" |     """SCIM client for users""" | ||||||
|  |  | ||||||
|     connection_type = SCIMProviderUser |     connection_type = SCIMProviderUser | ||||||
|     connection_attr = "scimprovideruser_set" |     connection_type_query = "user" | ||||||
|     mapper: PropertyMappingManager |     mapper: PropertyMappingManager | ||||||
|  |  | ||||||
|     def __init__(self, provider: SCIMProvider): |     def __init__(self, provider: SCIMProvider): | ||||||
| @ -31,16 +31,15 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]): | |||||||
|  |  | ||||||
|     def to_schema(self, obj: User, connection: SCIMProviderUser) -> SCIMUserSchema: |     def to_schema(self, obj: User, connection: SCIMProviderUser) -> SCIMUserSchema: | ||||||
|         """Convert authentik user into SCIM""" |         """Convert authentik user into SCIM""" | ||||||
|         raw_scim_user = super().to_schema(obj, connection) |         raw_scim_user = super().to_schema( | ||||||
|  |             obj, | ||||||
|  |             connection, | ||||||
|  |             schemas=(SCIM_USER_SCHEMA,), | ||||||
|  |         ) | ||||||
|         try: |         try: | ||||||
|             scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user)) |             scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user)) | ||||||
|         except ValidationError as exc: |         except ValidationError as exc: | ||||||
|             raise StopSync(exc, obj) from exc |             raise StopSync(exc, obj) from exc | ||||||
|         if SCIM_USER_SCHEMA not in scim_user.schemas: |  | ||||||
|             scim_user.schemas.insert(0, SCIM_USER_SCHEMA) |  | ||||||
|         # As this might be unset, we need to tell pydantic it's set so ensure the schemas |  | ||||||
|         # are included, even if its just the defaults |  | ||||||
|         scim_user.schemas = list(scim_user.schemas) |  | ||||||
|         if not scim_user.externalId: |         if not scim_user.externalId: | ||||||
|             scim_user.externalId = str(obj.uid) |             scim_user.externalId = str(obj.uid) | ||||||
|         return scim_user |         return scim_user | ||||||
|  | |||||||
| @ -116,7 +116,7 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|         if type == User: |         if type == User: | ||||||
|             # Get queryset of all users with consistent ordering |             # Get queryset of all users with consistent ordering | ||||||
|             # according to the provider's settings |             # according to the provider's settings | ||||||
|             base = User.objects.prefetch_related("scimprovideruser_set").all().exclude_anonymous() |             base = User.objects.all().exclude_anonymous() | ||||||
|             if self.exclude_users_service_account: |             if self.exclude_users_service_account: | ||||||
|                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( |                 base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude( | ||||||
|                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT |                     type=UserTypes.INTERNAL_SERVICE_ACCOUNT | ||||||
| @ -126,7 +126,7 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider): | |||||||
|             return base.order_by("pk") |             return base.order_by("pk") | ||||||
|         if type == Group: |         if type == Group: | ||||||
|             # Get queryset of all groups with consistent ordering |             # Get queryset of all groups with consistent ordering | ||||||
|             return Group.objects.prefetch_related("scimprovidergroup_set").all().order_by("pk") |             return Group.objects.all().order_by("pk") | ||||||
|         raise ValueError(f"Invalid type {type}") |         raise ValueError(f"Invalid type {type}") | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|  | |||||||
| @ -91,57 +91,6 @@ class SCIMUserTests(TestCase): | |||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     @Mocker() |  | ||||||
|     def test_user_create_custom_schema(self, mock: Mocker): |  | ||||||
|         """Test user creation with custom schema""" |  | ||||||
|         schema = SCIMMapping.objects.create( |  | ||||||
|             name="custom_schema", |  | ||||||
|             expression="""return {"schemas": ["foo"]}""", |  | ||||||
|         ) |  | ||||||
|         self.provider.property_mappings.add(schema) |  | ||||||
|         scim_id = generate_id() |  | ||||||
|         mock.get( |  | ||||||
|             "https://localhost/ServiceProviderConfig", |  | ||||||
|             json={}, |  | ||||||
|         ) |  | ||||||
|         mock.post( |  | ||||||
|             "https://localhost/Users", |  | ||||||
|             json={ |  | ||||||
|                 "id": scim_id, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|         uid = generate_id() |  | ||||||
|         user = User.objects.create( |  | ||||||
|             username=uid, |  | ||||||
|             name=f"{uid} {uid}", |  | ||||||
|             email=f"{uid}@goauthentik.io", |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(mock.call_count, 2) |  | ||||||
|         self.assertEqual(mock.request_history[0].method, "GET") |  | ||||||
|         self.assertEqual(mock.request_history[1].method, "POST") |  | ||||||
|         self.assertJSONEqual( |  | ||||||
|             mock.request_history[1].body, |  | ||||||
|             { |  | ||||||
|                 "schemas": ["urn:ietf:params:scim:schemas:core:2.0:User", "foo"], |  | ||||||
|                 "active": True, |  | ||||||
|                 "emails": [ |  | ||||||
|                     { |  | ||||||
|                         "primary": True, |  | ||||||
|                         "type": "other", |  | ||||||
|                         "value": f"{uid}@goauthentik.io", |  | ||||||
|                     } |  | ||||||
|                 ], |  | ||||||
|                 "externalId": user.uid, |  | ||||||
|                 "name": { |  | ||||||
|                     "familyName": uid, |  | ||||||
|                     "formatted": f"{uid} {uid}", |  | ||||||
|                     "givenName": uid, |  | ||||||
|                 }, |  | ||||||
|                 "displayName": f"{uid} {uid}", |  | ||||||
|                 "userName": uid, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @Mocker() |     @Mocker() | ||||||
|     def test_user_create_different_provider_same_id(self, mock: Mocker): |     def test_user_create_different_provider_same_id(self, mock: Mocker): | ||||||
|         """Test user creation with multiple providers that happen |         """Test user creation with multiple providers that happen | ||||||
| @ -435,7 +384,7 @@ class SCIMUserTests(TestCase): | |||||||
|                 self.assertIn(request.method, SAFE_METHODS) |                 self.assertIn(request.method, SAFE_METHODS) | ||||||
|         task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first() |         task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first() | ||||||
|         self.assertIsNotNone(task) |         self.assertIsNotNone(task) | ||||||
|         drop_msg = task.messages[3] |         drop_msg = task.messages[2] | ||||||
|         self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run") |         self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run") | ||||||
|         self.assertIsNotNone(drop_msg["attributes"]["url"]) |         self.assertIsNotNone(drop_msg["attributes"]["url"]) | ||||||
|         self.assertIsNotNone(drop_msg["attributes"]["body"]) |         self.assertIsNotNone(drop_msg["attributes"]["body"]) | ||||||
|  | |||||||
| @ -99,7 +99,6 @@ class RBACPermissionViewSet(ReadOnlyModelViewSet): | |||||||
|     filterset_class = PermissionFilter |     filterset_class = PermissionFilter | ||||||
|     permission_classes = [IsAuthenticated] |     permission_classes = [IsAuthenticated] | ||||||
|     search_fields = [ |     search_fields = [ | ||||||
|         "name", |  | ||||||
|         "codename", |         "codename", | ||||||
|         "content_type__model", |         "content_type__model", | ||||||
|         "content_type__app_label", |         "content_type__app_label", | ||||||
|  | |||||||
| @ -132,7 +132,7 @@ TENANT_CREATION_FAKES_MIGRATIONS = True | |||||||
| TENANT_BASE_SCHEMA = "template" | TENANT_BASE_SCHEMA = "template" | ||||||
| PUBLIC_SCHEMA_NAME = CONFIG.get("postgresql.default_schema") | PUBLIC_SCHEMA_NAME = CONFIG.get("postgresql.default_schema") | ||||||
|  |  | ||||||
| GUARDIAN_MONKEY_PATCH_USER = False | GUARDIAN_MONKEY_PATCH = False | ||||||
|  |  | ||||||
| SPECTACULAR_SETTINGS = { | SPECTACULAR_SETTINGS = { | ||||||
|     "TITLE": "authentik", |     "TITLE": "authentik", | ||||||
| @ -424,7 +424,7 @@ else: | |||||||
|         "BACKEND": "authentik.root.storages.FileStorage", |         "BACKEND": "authentik.root.storages.FileStorage", | ||||||
|         "OPTIONS": { |         "OPTIONS": { | ||||||
|             "location": Path(CONFIG.get("storage.media.file.path")), |             "location": Path(CONFIG.get("storage.media.file.path")), | ||||||
|             "base_url": CONFIG.get("web.path", "/") + "media/", |             "base_url": "/media/", | ||||||
|         }, |         }, | ||||||
|     } |     } | ||||||
|     # Compatibility for apps not supporting top-level STORAGES |     # Compatibility for apps not supporting top-level STORAGES | ||||||
|  | |||||||
| @ -31,8 +31,6 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | |||||||
|  |  | ||||||
|         if kwargs.get("randomly_seed", None): |         if kwargs.get("randomly_seed", None): | ||||||
|             self.args.append(f"--randomly-seed={kwargs['randomly_seed']}") |             self.args.append(f"--randomly-seed={kwargs['randomly_seed']}") | ||||||
|         if kwargs.get("no_capture", False): |  | ||||||
|             self.args.append("--capture=no") |  | ||||||
|  |  | ||||||
|         settings.TEST = True |         settings.TEST = True | ||||||
|         settings.CELERY["task_always_eager"] = True |         settings.CELERY["task_always_eager"] = True | ||||||
| @ -66,11 +64,6 @@ class PytestTestRunner(DiscoverRunner):  # pragma: no cover | |||||||
|             "Default behaviour: use random.Random().getrandbits(32), so the seed is" |             "Default behaviour: use random.Random().getrandbits(32), so the seed is" | ||||||
|             "different on each run.", |             "different on each run.", | ||||||
|         ) |         ) | ||||||
|         parser.add_argument( |  | ||||||
|             "--no-capture", |  | ||||||
|             action="store_true", |  | ||||||
|             help="Disable any capturing of stdout/stderr during tests.", |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def run_tests(self, test_labels, extra_tests=None, **kwargs): |     def run_tests(self, test_labels, extra_tests=None, **kwargs): | ||||||
|         """Run pytest and return the exitcode. |         """Run pytest and return the exitcode. | ||||||
|  | |||||||
| @ -317,7 +317,7 @@ class KerberosSource(Source): | |||||||
|                 usage="accept", name=name, store=self.get_gssapi_store() |                 usage="accept", name=name, store=self.get_gssapi_store() | ||||||
|             ) |             ) | ||||||
|         except gssapi.exceptions.GSSError as exc: |         except gssapi.exceptions.GSSError as exc: | ||||||
|             LOGGER.warning("GSSAPI credentials failure", exc=exc) |             LOGGER.warn("GSSAPI credentials failure", exc=exc) | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -103,7 +103,6 @@ class LDAPSourceSerializer(SourceSerializer): | |||||||
|             "user_object_filter", |             "user_object_filter", | ||||||
|             "group_object_filter", |             "group_object_filter", | ||||||
|             "group_membership_field", |             "group_membership_field", | ||||||
|             "user_membership_attribute", |  | ||||||
|             "object_uniqueness_field", |             "object_uniqueness_field", | ||||||
|             "password_login_update_internal_password", |             "password_login_update_internal_password", | ||||||
|             "sync_users", |             "sync_users", | ||||||
| @ -112,7 +111,6 @@ class LDAPSourceSerializer(SourceSerializer): | |||||||
|             "sync_parent_group", |             "sync_parent_group", | ||||||
|             "connectivity", |             "connectivity", | ||||||
|             "lookup_groups_from_user", |             "lookup_groups_from_user", | ||||||
|             "delete_not_found_objects", |  | ||||||
|         ] |         ] | ||||||
|         extra_kwargs = {"bind_password": {"write_only": True}} |         extra_kwargs = {"bind_password": {"write_only": True}} | ||||||
|  |  | ||||||
| @ -140,7 +138,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "user_object_filter", |         "user_object_filter", | ||||||
|         "group_object_filter", |         "group_object_filter", | ||||||
|         "group_membership_field", |         "group_membership_field", | ||||||
|         "user_membership_attribute", |  | ||||||
|         "object_uniqueness_field", |         "object_uniqueness_field", | ||||||
|         "password_login_update_internal_password", |         "password_login_update_internal_password", | ||||||
|         "sync_users", |         "sync_users", | ||||||
| @ -150,7 +147,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet): | |||||||
|         "user_property_mappings", |         "user_property_mappings", | ||||||
|         "group_property_mappings", |         "group_property_mappings", | ||||||
|         "lookup_groups_from_user", |         "lookup_groups_from_user", | ||||||
|         "delete_not_found_objects", |  | ||||||
|     ] |     ] | ||||||
|     search_fields = ["name", "slug"] |     search_fields = ["name", "slug"] | ||||||
|     ordering = ["name"] |     ordering = ["name"] | ||||||
|  | |||||||
| @ -1,48 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-28 08:15 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0048_delete_oldauthenticatedsession_content_type"), |  | ||||||
|         ("authentik_sources_ldap", "0008_groupldapsourceconnection_userldapsourceconnection"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="groupldapsourceconnection", |  | ||||||
|             name="validated_by", |  | ||||||
|             field=models.UUIDField( |  | ||||||
|                 blank=True, |  | ||||||
|                 help_text="Unique ID used while checking if this object still exists in the directory.", |  | ||||||
|                 null=True, |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="ldapsource", |  | ||||||
|             name="delete_not_found_objects", |  | ||||||
|             field=models.BooleanField( |  | ||||||
|                 default=False, |  | ||||||
|                 help_text="Delete authentik users and groups which were previously supplied by this source, but are now missing from it.", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="userldapsourceconnection", |  | ||||||
|             name="validated_by", |  | ||||||
|             field=models.UUIDField( |  | ||||||
|                 blank=True, |  | ||||||
|                 help_text="Unique ID used while checking if this object still exists in the directory.", |  | ||||||
|                 null=True, |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="groupldapsourceconnection", |  | ||||||
|             index=models.Index(fields=["validated_by"], name="authentik_s_validat_b70447_idx"), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="userldapsourceconnection", |  | ||||||
|             index=models.Index(fields=["validated_by"], name="authentik_s_validat_ff2ebc_idx"), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,32 +0,0 @@ | |||||||
| # Generated by Django 5.1.9 on 2025-05-29 11:22 |  | ||||||
|  |  | ||||||
| from django.apps.registry import Apps |  | ||||||
| from django.db import migrations, models |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def set_user_membership_attribute(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     LDAPSource = apps.get_model("authentik_sources_ldap", "LDAPSource") |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     LDAPSource.objects.using(db_alias).filter(group_membership_field="memberUid").all().update( |  | ||||||
|         user_membership_attribute="ldap_uniq" |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_sources_ldap", "0009_groupldapsourceconnection_validated_by_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="ldapsource", |  | ||||||
|             name="user_membership_attribute", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 default="distinguishedName", |  | ||||||
|                 help_text="Attribute which matches the value of `group_membership_field`.", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython(set_user_membership_attribute, migrations.RunPython.noop), |  | ||||||
|     ] |  | ||||||
| @ -100,10 +100,6 @@ class LDAPSource(Source): | |||||||
|         default="(objectClass=person)", |         default="(objectClass=person)", | ||||||
|         help_text=_("Consider Objects matching this filter to be Users."), |         help_text=_("Consider Objects matching this filter to be Users."), | ||||||
|     ) |     ) | ||||||
|     user_membership_attribute = models.TextField( |  | ||||||
|         default=LDAP_DISTINGUISHED_NAME, |  | ||||||
|         help_text=_("Attribute which matches the value of `group_membership_field`."), |  | ||||||
|     ) |  | ||||||
|     group_membership_field = models.TextField( |     group_membership_field = models.TextField( | ||||||
|         default="member", help_text=_("Field which contains members of a group.") |         default="member", help_text=_("Field which contains members of a group.") | ||||||
|     ) |     ) | ||||||
| @ -141,14 +137,6 @@ class LDAPSource(Source): | |||||||
|         ), |         ), | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     delete_not_found_objects = models.BooleanField( |  | ||||||
|         default=False, |  | ||||||
|         help_text=_( |  | ||||||
|             "Delete authentik users and groups which were previously supplied by this source, " |  | ||||||
|             "but are now missing from it." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def component(self) -> str: |     def component(self) -> str: | ||||||
|         return "ak-source-ldap-form" |         return "ak-source-ldap-form" | ||||||
| @ -333,12 +321,6 @@ class LDAPSourcePropertyMapping(PropertyMapping): | |||||||
|  |  | ||||||
|  |  | ||||||
| class UserLDAPSourceConnection(UserSourceConnection): | class UserLDAPSourceConnection(UserSourceConnection): | ||||||
|     validated_by = models.UUIDField( |  | ||||||
|         null=True, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_("Unique ID used while checking if this object still exists in the directory."), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> type[Serializer]: |     def serializer(self) -> type[Serializer]: | ||||||
|         from authentik.sources.ldap.api import ( |         from authentik.sources.ldap.api import ( | ||||||
| @ -350,18 +332,9 @@ class UserLDAPSourceConnection(UserSourceConnection): | |||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("User LDAP Source Connection") |         verbose_name = _("User LDAP Source Connection") | ||||||
|         verbose_name_plural = _("User LDAP Source Connections") |         verbose_name_plural = _("User LDAP Source Connections") | ||||||
|         indexes = [ |  | ||||||
|             models.Index(fields=["validated_by"]), |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupLDAPSourceConnection(GroupSourceConnection): | class GroupLDAPSourceConnection(GroupSourceConnection): | ||||||
|     validated_by = models.UUIDField( |  | ||||||
|         null=True, |  | ||||||
|         blank=True, |  | ||||||
|         help_text=_("Unique ID used while checking if this object still exists in the directory."), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> type[Serializer]: |     def serializer(self) -> type[Serializer]: | ||||||
|         from authentik.sources.ldap.api import ( |         from authentik.sources.ldap.api import ( | ||||||
| @ -373,6 +346,3 @@ class GroupLDAPSourceConnection(GroupSourceConnection): | |||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("Group LDAP Source Connection") |         verbose_name = _("Group LDAP Source Connection") | ||||||
|         verbose_name_plural = _("Group LDAP Source Connections") |         verbose_name_plural = _("Group LDAP Source Connections") | ||||||
|         indexes = [ |  | ||||||
|             models.Index(fields=["validated_by"]), |  | ||||||
|         ] |  | ||||||
|  | |||||||
| @ -9,7 +9,7 @@ from structlog.stdlib import BoundLogger, get_logger | |||||||
| from authentik.core.sources.mapper import SourceMapper | from authentik.core.sources.mapper import SourceMapper | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.sync.mapper import PropertyMappingManager | from authentik.lib.sync.mapper import PropertyMappingManager | ||||||
| from authentik.sources.ldap.models import LDAPSource, flatten | from authentik.sources.ldap.models import LDAPSource | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseLDAPSynchronizer: | class BaseLDAPSynchronizer: | ||||||
| @ -77,16 +77,6 @@ class BaseLDAPSynchronizer: | |||||||
|         """Get objects from LDAP, implemented in subclass""" |         """Get objects from LDAP, implemented in subclass""" | ||||||
|         raise NotImplementedError() |         raise NotImplementedError() | ||||||
|  |  | ||||||
|     def get_attributes(self, object): |  | ||||||
|         if "attributes" not in object: |  | ||||||
|             return |  | ||||||
|         return object.get("attributes", {}) |  | ||||||
|  |  | ||||||
|     def get_identifier(self, attributes: dict): |  | ||||||
|         if not attributes.get(self._source.object_uniqueness_field): |  | ||||||
|             return |  | ||||||
|         return flatten(attributes[self._source.object_uniqueness_field]) |  | ||||||
|  |  | ||||||
|     def search_paginator(  # noqa: PLR0913 |     def search_paginator(  # noqa: PLR0913 | ||||||
|         self, |         self, | ||||||
|         search_base, |         search_base, | ||||||
|  | |||||||
| @ -1,61 +0,0 @@ | |||||||
| from collections.abc import Generator |  | ||||||
| from itertools import batched |  | ||||||
| from uuid import uuid4 |  | ||||||
|  |  | ||||||
| from ldap3 import SUBTREE |  | ||||||
|  |  | ||||||
| from authentik.core.models import Group |  | ||||||
| from authentik.sources.ldap.models import GroupLDAPSourceConnection |  | ||||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer |  | ||||||
| from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE, UPDATE_CHUNK_SIZE |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupLDAPForwardDeletion(BaseLDAPSynchronizer): |  | ||||||
|     """Delete LDAP Groups from authentik""" |  | ||||||
|  |  | ||||||
|     @staticmethod |  | ||||||
|     def name() -> str: |  | ||||||
|         return "group_deletions" |  | ||||||
|  |  | ||||||
|     def get_objects(self, **kwargs) -> Generator: |  | ||||||
|         if not self._source.sync_groups or not self._source.delete_not_found_objects: |  | ||||||
|             self.message("Group syncing is disabled for this Source") |  | ||||||
|             return iter(()) |  | ||||||
|  |  | ||||||
|         uuid = uuid4() |  | ||||||
|         groups = self._source.connection().extend.standard.paged_search( |  | ||||||
|             search_base=self.base_dn_groups, |  | ||||||
|             search_filter=self._source.group_object_filter, |  | ||||||
|             search_scope=SUBTREE, |  | ||||||
|             attributes=[self._source.object_uniqueness_field], |  | ||||||
|             generator=True, |  | ||||||
|             **kwargs, |  | ||||||
|         ) |  | ||||||
|         for batch in batched(groups, UPDATE_CHUNK_SIZE, strict=False): |  | ||||||
|             identifiers = [] |  | ||||||
|             for group in batch: |  | ||||||
|                 if not (attributes := self.get_attributes(group)): |  | ||||||
|                     continue |  | ||||||
|                 if identifier := self.get_identifier(attributes): |  | ||||||
|                     identifiers.append(identifier) |  | ||||||
|             GroupLDAPSourceConnection.objects.filter(identifier__in=identifiers).update( |  | ||||||
|                 validated_by=uuid |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         return batched( |  | ||||||
|             GroupLDAPSourceConnection.objects.filter(source=self._source) |  | ||||||
|             .exclude(validated_by=uuid) |  | ||||||
|             .values_list("group", flat=True) |  | ||||||
|             .iterator(chunk_size=DELETE_CHUNK_SIZE), |  | ||||||
|             DELETE_CHUNK_SIZE, |  | ||||||
|             strict=False, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def sync(self, group_pks: tuple) -> int: |  | ||||||
|         """Delete authentik groups""" |  | ||||||
|         if not self._source.sync_groups or not self._source.delete_not_found_objects: |  | ||||||
|             self.message("Group syncing is disabled for this Source") |  | ||||||
|             return -1 |  | ||||||
|         self._logger.debug("Deleting groups", group_pks=group_pks) |  | ||||||
|         _, deleted_per_type = Group.objects.filter(pk__in=group_pks).delete() |  | ||||||
|         return deleted_per_type.get(Group._meta.label, 0) |  | ||||||
| @ -1,63 +0,0 @@ | |||||||
| from collections.abc import Generator |  | ||||||
| from itertools import batched |  | ||||||
| from uuid import uuid4 |  | ||||||
|  |  | ||||||
| from ldap3 import SUBTREE |  | ||||||
|  |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.sources.ldap.models import UserLDAPSourceConnection |  | ||||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer |  | ||||||
|  |  | ||||||
| UPDATE_CHUNK_SIZE = 10_000 |  | ||||||
| DELETE_CHUNK_SIZE = 50 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserLDAPForwardDeletion(BaseLDAPSynchronizer): |  | ||||||
|     """Delete LDAP Users from authentik""" |  | ||||||
|  |  | ||||||
|     @staticmethod |  | ||||||
|     def name() -> str: |  | ||||||
|         return "user_deletions" |  | ||||||
|  |  | ||||||
|     def get_objects(self, **kwargs) -> Generator: |  | ||||||
|         if not self._source.sync_users or not self._source.delete_not_found_objects: |  | ||||||
|             self.message("User syncing is disabled for this Source") |  | ||||||
|             return iter(()) |  | ||||||
|  |  | ||||||
|         uuid = uuid4() |  | ||||||
|         users = self._source.connection().extend.standard.paged_search( |  | ||||||
|             search_base=self.base_dn_users, |  | ||||||
|             search_filter=self._source.user_object_filter, |  | ||||||
|             search_scope=SUBTREE, |  | ||||||
|             attributes=[self._source.object_uniqueness_field], |  | ||||||
|             generator=True, |  | ||||||
|             **kwargs, |  | ||||||
|         ) |  | ||||||
|         for batch in batched(users, UPDATE_CHUNK_SIZE, strict=False): |  | ||||||
|             identifiers = [] |  | ||||||
|             for user in batch: |  | ||||||
|                 if not (attributes := self.get_attributes(user)): |  | ||||||
|                     continue |  | ||||||
|                 if identifier := self.get_identifier(attributes): |  | ||||||
|                     identifiers.append(identifier) |  | ||||||
|             UserLDAPSourceConnection.objects.filter(identifier__in=identifiers).update( |  | ||||||
|                 validated_by=uuid |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         return batched( |  | ||||||
|             UserLDAPSourceConnection.objects.filter(source=self._source) |  | ||||||
|             .exclude(validated_by=uuid) |  | ||||||
|             .values_list("user", flat=True) |  | ||||||
|             .iterator(chunk_size=DELETE_CHUNK_SIZE), |  | ||||||
|             DELETE_CHUNK_SIZE, |  | ||||||
|             strict=False, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def sync(self, user_pks: tuple) -> int: |  | ||||||
|         """Delete authentik users""" |  | ||||||
|         if not self._source.sync_users or not self._source.delete_not_found_objects: |  | ||||||
|             self.message("User syncing is disabled for this Source") |  | ||||||
|             return -1 |  | ||||||
|         self._logger.debug("Deleting users", user_pks=user_pks) |  | ||||||
|         _, deleted_per_type = User.objects.filter(pk__in=user_pks).delete() |  | ||||||
|         return deleted_per_type.get(User._meta.label, 0) |  | ||||||
| @ -58,16 +58,18 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer): | |||||||
|             return -1 |             return -1 | ||||||
|         group_count = 0 |         group_count = 0 | ||||||
|         for group in page_data: |         for group in page_data: | ||||||
|             if (attributes := self.get_attributes(group)) is None: |             if "attributes" not in group: | ||||||
|                 continue |                 continue | ||||||
|  |             attributes = group.get("attributes", {}) | ||||||
|             group_dn = flatten(flatten(group.get("entryDN", group.get("dn")))) |             group_dn = flatten(flatten(group.get("entryDN", group.get("dn")))) | ||||||
|             if not (uniq := self.get_identifier(attributes)): |             if not attributes.get(self._source.object_uniqueness_field): | ||||||
|                 self.message( |                 self.message( | ||||||
|                     f"Uniqueness field not found/not set in attributes: '{group_dn}'", |                     f"Uniqueness field not found/not set in attributes: '{group_dn}'", | ||||||
|                     attributes=attributes.keys(), |                     attributes=attributes.keys(), | ||||||
|                     dn=group_dn, |                     dn=group_dn, | ||||||
|                 ) |                 ) | ||||||
|                 continue |                 continue | ||||||
|  |             uniq = flatten(attributes[self._source.object_uniqueness_field]) | ||||||
|             try: |             try: | ||||||
|                 defaults = { |                 defaults = { | ||||||
|                     k: flatten(v) |                     k: flatten(v) | ||||||
|  | |||||||
| @ -63,19 +63,25 @@ class MembershipLDAPSynchronizer(BaseLDAPSynchronizer): | |||||||
|                     group_member_dn = group_member.get("dn", {}) |                     group_member_dn = group_member.get("dn", {}) | ||||||
|                     members.append(group_member_dn) |                     members.append(group_member_dn) | ||||||
|             else: |             else: | ||||||
|                 if (attributes := self.get_attributes(group)) is None: |                 if "attributes" not in group: | ||||||
|                     continue |                     continue | ||||||
|                 members = attributes.get(self._source.group_membership_field, []) |                 members = group.get("attributes", {}).get(self._source.group_membership_field, []) | ||||||
|  |  | ||||||
|             ak_group = self.get_group(group) |             ak_group = self.get_group(group) | ||||||
|             if not ak_group: |             if not ak_group: | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|  |             membership_mapping_attribute = LDAP_DISTINGUISHED_NAME | ||||||
|  |             if self._source.group_membership_field == "memberUid": | ||||||
|  |                 # If memberships are based on the posixGroup's 'memberUid' | ||||||
|  |                 # attribute we use the RDN instead of the FDN to lookup members. | ||||||
|  |                 membership_mapping_attribute = LDAP_UNIQUENESS | ||||||
|  |  | ||||||
|             users = User.objects.filter( |             users = User.objects.filter( | ||||||
|                 Q(**{f"attributes__{self._source.user_membership_attribute}__in": members}) |                 Q(**{f"attributes__{membership_mapping_attribute}__in": members}) | ||||||
|                 | Q( |                 | Q( | ||||||
|                     **{ |                     **{ | ||||||
|                         f"attributes__{self._source.user_membership_attribute}__isnull": True, |                         f"attributes__{membership_mapping_attribute}__isnull": True, | ||||||
|                         "ak_groups__in": [ak_group], |                         "ak_groups__in": [ak_group], | ||||||
|                     } |                     } | ||||||
|                 ) |                 ) | ||||||
|  | |||||||
| @ -60,16 +60,18 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer): | |||||||
|             return -1 |             return -1 | ||||||
|         user_count = 0 |         user_count = 0 | ||||||
|         for user in page_data: |         for user in page_data: | ||||||
|             if (attributes := self.get_attributes(user)) is None: |             if "attributes" not in user: | ||||||
|                 continue |                 continue | ||||||
|  |             attributes = user.get("attributes", {}) | ||||||
|             user_dn = flatten(user.get("entryDN", user.get("dn"))) |             user_dn = flatten(user.get("entryDN", user.get("dn"))) | ||||||
|             if not (uniq := self.get_identifier(attributes)): |             if not attributes.get(self._source.object_uniqueness_field): | ||||||
|                 self.message( |                 self.message( | ||||||
|                     f"Uniqueness field not found/not set in attributes: '{user_dn}'", |                     f"Uniqueness field not found/not set in attributes: '{user_dn}'", | ||||||
|                     attributes=attributes.keys(), |                     attributes=attributes.keys(), | ||||||
|                     dn=user_dn, |                     dn=user_dn, | ||||||
|                 ) |                 ) | ||||||
|                 continue |                 continue | ||||||
|  |             uniq = flatten(attributes[self._source.object_uniqueness_field]) | ||||||
|             try: |             try: | ||||||
|                 defaults = { |                 defaults = { | ||||||
|                     k: flatten(v) |                     k: flatten(v) | ||||||
|  | |||||||
| @ -17,8 +17,6 @@ from authentik.lib.utils.reflection import class_to_path, path_to_class | |||||||
| from authentik.root.celery import CELERY_APP | from authentik.root.celery import CELERY_APP | ||||||
| from authentik.sources.ldap.models import LDAPSource | from authentik.sources.ldap.models import LDAPSource | ||||||
| from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.forward_delete_groups import GroupLDAPForwardDeletion |  | ||||||
| from authentik.sources.ldap.sync.forward_delete_users import UserLDAPForwardDeletion |  | ||||||
| from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer | ||||||
| from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | from authentik.sources.ldap.sync.users import UserLDAPSynchronizer | ||||||
| @ -54,11 +52,11 @@ def ldap_connectivity_check(pk: str | None = None): | |||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task( | @CELERY_APP.task( | ||||||
|     # We take the configured hours timeout time by 3.5 as we run user and |     # We take the configured hours timeout time by 2.5 as we run user and | ||||||
|     # group in parallel and then membership, then deletions, so 3x is to cover the serial tasks, |     # group in parallel and then membership, so 2x is to cover the serial tasks, | ||||||
|     # and 0.5x on top of that to give some more leeway |     # and 0.5x on top of that to give some more leeway | ||||||
|     soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, |     soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5, | ||||||
|     task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5, |     task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5, | ||||||
| ) | ) | ||||||
| def ldap_sync_single(source_pk: str): | def ldap_sync_single(source_pk: str): | ||||||
|     """Sync a single source""" |     """Sync a single source""" | ||||||
| @ -81,25 +79,6 @@ def ldap_sync_single(source_pk: str): | |||||||
|             group( |             group( | ||||||
|                 ldap_sync_paginator(source, MembershipLDAPSynchronizer), |                 ldap_sync_paginator(source, MembershipLDAPSynchronizer), | ||||||
|             ), |             ), | ||||||
|             # Finally, deletions. What we'd really like to do here is something like |  | ||||||
|             # ``` |  | ||||||
|             # user_identifiers = <ldap query> |  | ||||||
|             # User.objects.exclude( |  | ||||||
|             #     usersourceconnection__identifier__in=user_uniqueness_identifiers, |  | ||||||
|             # ).delete() |  | ||||||
|             # ``` |  | ||||||
|             # This runs into performance issues in large installations. So instead we spread the |  | ||||||
|             # work out into three steps: |  | ||||||
|             # 1. Get every object from the LDAP source. |  | ||||||
|             # 2. Mark every object as "safe" in the database. This is quick, but any error could |  | ||||||
|             #    mean deleting users which should not be deleted, so we do it immediately, in |  | ||||||
|             #    large chunks, and only queue the deletion step afterwards. |  | ||||||
|             # 3. Delete every unmarked item. This is slow, so we spread it over many tasks in |  | ||||||
|             #    small chunks. |  | ||||||
|             group( |  | ||||||
|                 ldap_sync_paginator(source, UserLDAPForwardDeletion) |  | ||||||
|                 + ldap_sync_paginator(source, GroupLDAPForwardDeletion), |  | ||||||
|             ), |  | ||||||
|         ) |         ) | ||||||
|         task() |         task() | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,33 +2,6 @@ | |||||||
|  |  | ||||||
| from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server | from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server | ||||||
|  |  | ||||||
| # The mock modifies these in place, so we have to define them per string |  | ||||||
| user_in_slapd_dn = "cn=user_in_slapd_cn,ou=users,dc=goauthentik,dc=io" |  | ||||||
| user_in_slapd_cn = "user_in_slapd_cn" |  | ||||||
| user_in_slapd_uid = "user_in_slapd_uid" |  | ||||||
| user_in_slapd_object_class = "person" |  | ||||||
| user_in_slapd = { |  | ||||||
|     "dn": user_in_slapd_dn, |  | ||||||
|     "attributes": { |  | ||||||
|         "cn": user_in_slapd_cn, |  | ||||||
|         "uid": user_in_slapd_uid, |  | ||||||
|         "objectClass": user_in_slapd_object_class, |  | ||||||
|     }, |  | ||||||
| } |  | ||||||
| group_in_slapd_dn = "cn=user_in_slapd_cn,ou=groups,dc=goauthentik,dc=io" |  | ||||||
| group_in_slapd_cn = "group_in_slapd_cn" |  | ||||||
| group_in_slapd_uid = "group_in_slapd_uid" |  | ||||||
| group_in_slapd_object_class = "groupOfNames" |  | ||||||
| group_in_slapd = { |  | ||||||
|     "dn": group_in_slapd_dn, |  | ||||||
|     "attributes": { |  | ||||||
|         "cn": group_in_slapd_cn, |  | ||||||
|         "uid": group_in_slapd_uid, |  | ||||||
|         "objectClass": group_in_slapd_object_class, |  | ||||||
|         "member": [user_in_slapd["dn"]], |  | ||||||
|     }, |  | ||||||
| } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def mock_slapd_connection(password: str) -> Connection: | def mock_slapd_connection(password: str) -> Connection: | ||||||
|     """Create mock SLAPD connection""" |     """Create mock SLAPD connection""" | ||||||
| @ -123,14 +96,5 @@ def mock_slapd_connection(password: str) -> Connection: | |||||||
|             "objectClass": "posixAccount", |             "objectClass": "posixAccount", | ||||||
|         }, |         }, | ||||||
|     ) |     ) | ||||||
|     # Known user and group |  | ||||||
|     connection.strategy.add_entry( |  | ||||||
|         user_in_slapd["dn"], |  | ||||||
|         user_in_slapd["attributes"], |  | ||||||
|     ) |  | ||||||
|     connection.strategy.add_entry( |  | ||||||
|         group_in_slapd["dn"], |  | ||||||
|         group_in_slapd["attributes"], |  | ||||||
|     ) |  | ||||||
|     connection.bind() |     connection.bind() | ||||||
|     return connection |     return connection | ||||||
|  | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	