Compare commits
	
		
			39 Commits
		
	
	
		
			version-20
			...
			benchmarks
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| cde4e395e9 | |||
| d19c692f81 | |||
| d5d2be5672 | |||
| 8597db59f5 | |||
| 74fb9492bc | |||
| defbafb55e | |||
| e2ed7391bc | |||
| 8dcd0dcaa9 | |||
| 18eee1b722 | |||
| d0f6c815c3 | |||
| b13eba3b0a | |||
| 77fe4e9fe2 | |||
| 71fe8b4fb3 | |||
| b14cb832b2 | |||
| 24b5296d88 | |||
| 41b7e50bc6 | |||
| 6b750d7c59 | |||
| d268c28934 | |||
| 688404b6a5 | |||
| cbd2425a5f | |||
| 877c264d59 | |||
| 2575b540fa | |||
| 0e0b76a62e | |||
| 6d625fd1d7 | |||
| bd0630e300 | |||
| ffb7d44024 | |||
| 7589b11f98 | |||
| ad21dfa2bc | |||
| 95692f5a7c | |||
| 1f4ed1defa | |||
| 334b183465 | |||
| 1f789dd4c5 | |||
| 057e5747c9 | |||
| 8717a3aaab | |||
| 527173236a | |||
| 3e6eb6f248 | |||
| 6babf0f1c4 | |||
| ca7cc30504 | |||
| a7cb808cad | 
| @ -1,5 +1,5 @@ | |||||||
| [bumpversion] | [bumpversion] | ||||||
| current_version = 2024.10.5 | current_version = 2024.2.2 | ||||||
| tag = True | tag = True | ||||||
| commit = True | commit = True | ||||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||||
| @ -17,14 +17,10 @@ optional_value = final | |||||||
|  |  | ||||||
| [bumpversion:file:pyproject.toml] | [bumpversion:file:pyproject.toml] | ||||||
|  |  | ||||||
| [bumpversion:file:package.json] |  | ||||||
|  |  | ||||||
| [bumpversion:file:docker-compose.yml] | [bumpversion:file:docker-compose.yml] | ||||||
|  |  | ||||||
| [bumpversion:file:schema.yml] | [bumpversion:file:schema.yml] | ||||||
|  |  | ||||||
| [bumpversion:file:blueprints/schema.json] |  | ||||||
|  |  | ||||||
| [bumpversion:file:authentik/__init__.py] | [bumpversion:file:authentik/__init__.py] | ||||||
|  |  | ||||||
| [bumpversion:file:internal/constants/constants.go] | [bumpversion:file:internal/constants/constants.go] | ||||||
|  | |||||||
| @ -54,7 +54,6 @@ runs: | |||||||
|             authentik: |             authentik: | ||||||
|                 outposts: |                 outposts: | ||||||
|                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s |                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||||
|             global: |  | ||||||
|             image: |             image: | ||||||
|                 repository: ghcr.io/goauthentik/dev-server |                 repository: ghcr.io/goauthentik/dev-server | ||||||
|                 tag: ${{ inputs.tag }} |                 tag: ${{ inputs.tag }} | ||||||
| @ -66,7 +65,6 @@ runs: | |||||||
|             authentik: |             authentik: | ||||||
|                 outposts: |                 outposts: | ||||||
|                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s |                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||||
|             global: |  | ||||||
|             image: |             image: | ||||||
|                 repository: ghcr.io/goauthentik/dev-server |                 repository: ghcr.io/goauthentik/dev-server | ||||||
|                 tag: ${{ inputs.tag }}-arm64 |                 tag: ${{ inputs.tag }}-arm64 | ||||||
|  | |||||||
| @ -29,15 +29,9 @@ outputs: | |||||||
|   imageTags: |   imageTags: | ||||||
|     description: "Docker image tags" |     description: "Docker image tags" | ||||||
|     value: ${{ steps.ev.outputs.imageTags }} |     value: ${{ steps.ev.outputs.imageTags }} | ||||||
|   attestImageNames: |  | ||||||
|     description: "Docker image names used for attestation" |  | ||||||
|     value: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|   imageMainTag: |   imageMainTag: | ||||||
|     description: "Docker image main tag" |     description: "Docker image main tag" | ||||||
|     value: ${{ steps.ev.outputs.imageMainTag }} |     value: ${{ steps.ev.outputs.imageMainTag }} | ||||||
|   imageMainName: |  | ||||||
|     description: "Docker image main name" |  | ||||||
|     value: ${{ steps.ev.outputs.imageMainName }} |  | ||||||
|  |  | ||||||
| runs: | runs: | ||||||
|   using: "composite" |   using: "composite" | ||||||
|  | |||||||
| @ -7,12 +7,12 @@ from time import time | |||||||
| parser = configparser.ConfigParser() | parser = configparser.ConfigParser() | ||||||
| parser.read(".bumpversion.cfg") | parser.read(".bumpversion.cfg") | ||||||
|  |  | ||||||
| should_build = str(len(os.environ.get("DOCKER_USERNAME", "")) > 0).lower() | should_build = str(os.environ.get("DOCKER_USERNAME", None) is not None).lower() | ||||||
|  |  | ||||||
| branch_name = os.environ["GITHUB_REF"] | branch_name = os.environ["GITHUB_REF"] | ||||||
| if os.environ.get("GITHUB_HEAD_REF", "") != "": | if os.environ.get("GITHUB_HEAD_REF", "") != "": | ||||||
|     branch_name = os.environ["GITHUB_HEAD_REF"] |     branch_name = os.environ["GITHUB_HEAD_REF"] | ||||||
| safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-").replace("'", "-") | safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-") | ||||||
|  |  | ||||||
| image_names = os.getenv("IMAGE_NAME").split(",") | image_names = os.getenv("IMAGE_NAME").split(",") | ||||||
| image_arch = os.getenv("IMAGE_ARCH") or None | image_arch = os.getenv("IMAGE_ARCH") or None | ||||||
| @ -50,25 +50,13 @@ else: | |||||||
|             f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}",  # Use by FluxCD |             f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}",  # Use by FluxCD | ||||||
|         ] |         ] | ||||||
|  |  | ||||||
| image_main_tag = image_tags[0].split(":")[-1] | image_main_tag = image_tags[0] | ||||||
|  | image_tags_rendered = ",".join(image_tags) | ||||||
|  |  | ||||||
| def get_attest_image_names(image_with_tags: list[str]): |  | ||||||
|     """Attestation only for GHCR""" |  | ||||||
|     image_tags = [] |  | ||||||
|     for image_name in set(name.split(":")[0] for name in image_with_tags): |  | ||||||
|         if not image_name.startswith("ghcr.io"): |  | ||||||
|             continue |  | ||||||
|         image_tags.append(image_name) |  | ||||||
|     return ",".join(set(image_tags)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output: | with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output: | ||||||
|     print(f"shouldBuild={should_build}", file=_output) |     print("shouldBuild=%s" % should_build, file=_output) | ||||||
|     print(f"sha={sha}", file=_output) |     print("sha=%s" % sha, file=_output) | ||||||
|     print(f"version={version}", file=_output) |     print("version=%s" % version, file=_output) | ||||||
|     print(f"prerelease={prerelease}", file=_output) |     print("prerelease=%s" % prerelease, file=_output) | ||||||
|     print(f"imageTags={','.join(image_tags)}", file=_output) |     print("imageTags=%s" % image_tags_rendered, file=_output) | ||||||
|     print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output) |     print("imageMainTag=%s" % image_main_tag, file=_output) | ||||||
|     print(f"imageMainTag={image_main_tag}", file=_output) |  | ||||||
|     print(f"imageMainName={image_tags[0]}", file=_output) |  | ||||||
|  | |||||||
							
								
								
									
										4
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -14,7 +14,7 @@ runs: | |||||||
|       run: | |       run: | | ||||||
|         pipx install poetry || true |         pipx install poetry || true | ||||||
|         sudo apt-get update |         sudo apt-get update | ||||||
|         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server |         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext | ||||||
|     - name: Setup python and restore poetry |     - name: Setup python and restore poetry | ||||||
|       uses: actions/setup-python@v5 |       uses: actions/setup-python@v5 | ||||||
|       with: |       with: | ||||||
| @ -35,7 +35,7 @@ runs: | |||||||
|       run: | |       run: | | ||||||
|         export PSQL_TAG=${{ inputs.postgresql_version }} |         export PSQL_TAG=${{ inputs.postgresql_version }} | ||||||
|         docker compose -f .github/actions/setup/docker-compose.yml up -d |         docker compose -f .github/actions/setup/docker-compose.yml up -d | ||||||
|         poetry install --sync |         poetry install | ||||||
|         cd web && npm ci |         cd web && npm ci | ||||||
|     - name: Generate config |     - name: Generate config | ||||||
|       shell: poetry run python {0} |       shell: poetry run python {0} | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,3 +1,5 @@ | |||||||
|  | version: "3.7" | ||||||
|  |  | ||||||
| services: | services: | ||||||
|   postgresql: |   postgresql: | ||||||
|     image: docker.io/library/postgres:${PSQL_TAG:-16} |     image: docker.io/library/postgres:${PSQL_TAG:-16} | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							| @ -4,4 +4,3 @@ hass | |||||||
| warmup | warmup | ||||||
| ontext | ontext | ||||||
| singed | singed | ||||||
| assertIn |  | ||||||
|  | |||||||
							
								
								
									
										45
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										45
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -21,9 +21,7 @@ updates: | |||||||
|     labels: |     labels: | ||||||
|       - dependencies |       - dependencies | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directories: |     directory: "/web" | ||||||
|       - "/web" |  | ||||||
|       - "/web/sfe" |  | ||||||
|     schedule: |     schedule: | ||||||
|       interval: daily |       interval: daily | ||||||
|       time: "04:00" |       time: "04:00" | ||||||
| @ -32,6 +30,7 @@ updates: | |||||||
|     open-pull-requests-limit: 10 |     open-pull-requests-limit: 10 | ||||||
|     commit-message: |     commit-message: | ||||||
|       prefix: "web:" |       prefix: "web:" | ||||||
|  |     # TODO: deduplicate these groups | ||||||
|     groups: |     groups: | ||||||
|       sentry: |       sentry: | ||||||
|         patterns: |         patterns: | ||||||
| @ -43,11 +42,9 @@ updates: | |||||||
|           - "babel-*" |           - "babel-*" | ||||||
|       eslint: |       eslint: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@eslint/*" |  | ||||||
|           - "@typescript-eslint/*" |           - "@typescript-eslint/*" | ||||||
|           - "eslint-*" |  | ||||||
|           - "eslint" |           - "eslint" | ||||||
|           - "typescript-eslint" |           - "eslint-*" | ||||||
|       storybook: |       storybook: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@storybook/*" |           - "@storybook/*" | ||||||
| @ -55,16 +52,42 @@ updates: | |||||||
|       esbuild: |       esbuild: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@esbuild/*" |           - "@esbuild/*" | ||||||
|           - "esbuild*" |  | ||||||
|       rollup: |       rollup: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@rollup/*" |           - "@rollup/*" | ||||||
|           - "rollup-*" |           - "rollup-*" | ||||||
|           - "rollup*" |   - package-ecosystem: npm | ||||||
|       swc: |     directory: "/tests/wdio" | ||||||
|  |     schedule: | ||||||
|  |       interval: daily | ||||||
|  |       time: "04:00" | ||||||
|  |     labels: | ||||||
|  |       - dependencies | ||||||
|  |     open-pull-requests-limit: 10 | ||||||
|  |     commit-message: | ||||||
|  |       prefix: "web:" | ||||||
|  |     # TODO: deduplicate these groups | ||||||
|  |     groups: | ||||||
|  |       sentry: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@swc/*" |           - "@sentry/*" | ||||||
|           - "swc-*" |           - "@spotlightjs/*" | ||||||
|  |       babel: | ||||||
|  |         patterns: | ||||||
|  |           - "@babel/*" | ||||||
|  |           - "babel-*" | ||||||
|  |       eslint: | ||||||
|  |         patterns: | ||||||
|  |           - "@typescript-eslint/*" | ||||||
|  |           - "eslint" | ||||||
|  |           - "eslint-*" | ||||||
|  |       storybook: | ||||||
|  |         patterns: | ||||||
|  |           - "@storybook/*" | ||||||
|  |           - "*storybook*" | ||||||
|  |       esbuild: | ||||||
|  |         patterns: | ||||||
|  |           - "@esbuild/*" | ||||||
|       wdio: |       wdio: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@wdio/*" |           - "@wdio/*" | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							| @ -1,7 +1,7 @@ | |||||||
| <!-- | <!-- | ||||||
| 👋 Hi there! Welcome. | 👋 Hi there! Welcome. | ||||||
|  |  | ||||||
| Please check the Contributing guidelines: https://docs.goauthentik.io/docs/developer-docs/#how-can-i-contribute | Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute | ||||||
| --> | --> | ||||||
|  |  | ||||||
| ## Details | ## Details | ||||||
|  | |||||||
							
								
								
									
										9
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -31,16 +31,11 @@ jobs: | |||||||
|         env: |         env: | ||||||
|           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} |           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} | ||||||
|       - name: Upgrade /web |       - name: Upgrade /web | ||||||
|         working-directory: web |         working-directory: web/ | ||||||
|         run: | |         run: | | ||||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` |           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` | ||||||
|           npm i @goauthentik/api@$VERSION |           npm i @goauthentik/api@$VERSION | ||||||
|       - name: Upgrade /web/packages/sfe |       - uses: peter-evans/create-pull-request@v6 | ||||||
|         working-directory: web/packages/sfe |  | ||||||
|         run: | |  | ||||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` |  | ||||||
|           npm i @goauthentik/api@$VERSION |  | ||||||
|       - uses: peter-evans/create-pull-request@v7 |  | ||||||
|         id: cpr |         id: cpr | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										46
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										46
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -50,6 +50,7 @@ jobs: | |||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         psql: |         psql: | ||||||
|  |           - 12-alpine | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|     steps: |     steps: | ||||||
| @ -103,6 +104,7 @@ jobs: | |||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         psql: |         psql: | ||||||
|  |           - 12-alpine | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|     steps: |     steps: | ||||||
| @ -120,12 +122,6 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           flags: unit |           flags: unit | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |           token: ${{ secrets.CODECOV_TOKEN }} | ||||||
|       - if: ${{ !cancelled() }} |  | ||||||
|         uses: codecov/test-results-action@v1 |  | ||||||
|         with: |  | ||||||
|           flags: unit |  | ||||||
|           file: unittest.xml |  | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |  | ||||||
|   test-integration: |   test-integration: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     timeout-minutes: 30 |     timeout-minutes: 30 | ||||||
| @ -134,7 +130,7 @@ jobs: | |||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: Create k8s Kind Cluster |       - name: Create k8s Kind Cluster | ||||||
|         uses: helm/kind-action@v1.10.0 |         uses: helm/kind-action@v1.9.0 | ||||||
|       - name: run integration |       - name: run integration | ||||||
|         run: | |         run: | | ||||||
|           poetry run coverage run manage.py test tests/integration |           poetry run coverage run manage.py test tests/integration | ||||||
| @ -144,12 +140,6 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           flags: integration |           flags: integration | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |           token: ${{ secrets.CODECOV_TOKEN }} | ||||||
|       - if: ${{ !cancelled() }} |  | ||||||
|         uses: codecov/test-results-action@v1 |  | ||||||
|         with: |  | ||||||
|           flags: integration |  | ||||||
|           file: unittest.xml |  | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |  | ||||||
|   test-e2e: |   test-e2e: | ||||||
|     name: test-e2e (${{ matrix.job.name }}) |     name: test-e2e (${{ matrix.job.name }}) | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
| @ -180,7 +170,7 @@ jobs: | |||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: Setup e2e env (chrome, etc) |       - name: Setup e2e env (chrome, etc) | ||||||
|         run: | |         run: | | ||||||
|           docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull |           docker compose -f tests/e2e/docker-compose.yml up -d | ||||||
|       - id: cache-web |       - id: cache-web | ||||||
|         uses: actions/cache@v4 |         uses: actions/cache@v4 | ||||||
|         with: |         with: | ||||||
| @ -202,12 +192,6 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           flags: e2e |           flags: e2e | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |           token: ${{ secrets.CODECOV_TOKEN }} | ||||||
|       - if: ${{ !cancelled() }} |  | ||||||
|         uses: codecov/test-results-action@v1 |  | ||||||
|         with: |  | ||||||
|           flags: e2e |  | ||||||
|           file: unittest.xml |  | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |  | ||||||
|   ci-core-mark: |   ci-core-mark: | ||||||
|     needs: |     needs: | ||||||
|       - lint |       - lint | ||||||
| @ -231,16 +215,13 @@ jobs: | |||||||
|     permissions: |     permissions: | ||||||
|       # Needed to upload contianer images to ghcr.io |       # Needed to upload contianer images to ghcr.io | ||||||
|       packages: write |       packages: write | ||||||
|       # Needed for attestation |  | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     timeout-minutes: 120 |     timeout-minutes: 120 | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|         with: |         with: | ||||||
|           ref: ${{ github.event.pull_request.head.sha }} |           ref: ${{ github.event.pull_request.head.sha }} | ||||||
|       - name: Set up QEMU |       - name: Set up QEMU | ||||||
|         uses: docker/setup-qemu-action@v3.2.0 |         uses: docker/setup-qemu-action@v3.0.0 | ||||||
|       - name: Set up Docker Buildx |       - name: Set up Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@v3 |         uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
| @ -261,8 +242,7 @@ jobs: | |||||||
|       - name: generate ts client |       - name: generate ts client | ||||||
|         run: make gen-client-ts |         run: make gen-client-ts | ||||||
|       - name: Build Docker Image |       - name: Build Docker Image | ||||||
|         uses: docker/build-push-action@v6 |         uses: docker/build-push-action@v5 | ||||||
|         id: push |  | ||||||
|         with: |         with: | ||||||
|           context: . |           context: . | ||||||
|           secrets: | |           secrets: | | ||||||
| @ -272,16 +252,9 @@ jobs: | |||||||
|           push: ${{ steps.ev.outputs.shouldBuild == 'true' }} |           push: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||||
|           build-args: | |           build-args: | | ||||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} |             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache |           cache-from: type=gha | ||||||
|           cache-to: ${{ steps.ev.outputs.shouldBuild == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max' || '' }} |           cache-to: type=gha,mode=max | ||||||
|           platforms: linux/${{ matrix.arch }} |           platforms: linux/${{ matrix.arch }} | ||||||
|       - uses: actions/attest-build-provenance@v1 |  | ||||||
|         id: attest |  | ||||||
|         if: ${{ steps.ev.outputs.shouldBuild == 'true' }} |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   pr-comment: |   pr-comment: | ||||||
|     needs: |     needs: | ||||||
|       - build |       - build | ||||||
| @ -303,7 +276,6 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           image-name: ghcr.io/goauthentik/dev-server |           image-name: ghcr.io/goauthentik/dev-server | ||||||
|       - name: Comment on PR |       - name: Comment on PR | ||||||
|         if: ${{ steps.ev.outputs.shouldBuild == 'true' }} |  | ||||||
|         uses: ./.github/actions/comment-pr-instructions |         uses: ./.github/actions/comment-pr-instructions | ||||||
|         with: |         with: | ||||||
|           tag: ${{ steps.ev.outputs.imageMainTag }} |           tag: gh-${{ steps.ev.outputs.imageMainTag }} | ||||||
|  | |||||||
							
								
								
									
										23
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										23
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -29,9 +29,9 @@ jobs: | |||||||
|       - name: Generate API |       - name: Generate API | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: golangci-lint |       - name: golangci-lint | ||||||
|         uses: golangci/golangci-lint-action@v6 |         uses: golangci/golangci-lint-action@v4 | ||||||
|         with: |         with: | ||||||
|           version: latest |           version: v1.54.2 | ||||||
|           args: --timeout 5000s --verbose |           args: --timeout 5000s --verbose | ||||||
|           skip-cache: true |           skip-cache: true | ||||||
|   test-unittest: |   test-unittest: | ||||||
| @ -71,15 +71,12 @@ jobs: | |||||||
|     permissions: |     permissions: | ||||||
|       # Needed to upload contianer images to ghcr.io |       # Needed to upload contianer images to ghcr.io | ||||||
|       packages: write |       packages: write | ||||||
|       # Needed for attestation |  | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|         with: |         with: | ||||||
|           ref: ${{ github.event.pull_request.head.sha }} |           ref: ${{ github.event.pull_request.head.sha }} | ||||||
|       - name: Set up QEMU |       - name: Set up QEMU | ||||||
|         uses: docker/setup-qemu-action@v3.2.0 |         uses: docker/setup-qemu-action@v3.0.0 | ||||||
|       - name: Set up Docker Buildx |       - name: Set up Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@v3 |         uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
| @ -99,8 +96,7 @@ jobs: | |||||||
|       - name: Generate API |       - name: Generate API | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: Build Docker Image |       - name: Build Docker Image | ||||||
|         id: push |         uses: docker/build-push-action@v5 | ||||||
|         uses: docker/build-push-action@v6 |  | ||||||
|         with: |         with: | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |           tags: ${{ steps.ev.outputs.imageTags }} | ||||||
|           file: ${{ matrix.type }}.Dockerfile |           file: ${{ matrix.type }}.Dockerfile | ||||||
| @ -109,15 +105,8 @@ jobs: | |||||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} |             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||||
|           platforms: linux/amd64,linux/arm64 |           platforms: linux/amd64,linux/arm64 | ||||||
|           context: . |           context: . | ||||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache |           cache-from: type=gha | ||||||
|           cache-to: ${{ steps.ev.outputs.shouldBuild == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }} |           cache-to: type=gha,mode=max | ||||||
|       - uses: actions/attest-build-provenance@v1 |  | ||||||
|         id: attest |  | ||||||
|         if: ${{ steps.ev.outputs.shouldBuild == 'true' }} |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   build-binary: |   build-binary: | ||||||
|     timeout-minutes: 120 |     timeout-minutes: 120 | ||||||
|     needs: |     needs: | ||||||
|  | |||||||
							
								
								
									
										115
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										115
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,23 +12,14 @@ on: | |||||||
|       - version-* |       - version-* | ||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   lint: |   lint-eslint: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         command: |  | ||||||
|           - lint |  | ||||||
|           - lint:lockfile |  | ||||||
|           - tsc |  | ||||||
|           - prettier-check |  | ||||||
|         project: |         project: | ||||||
|           - web |           - web | ||||||
|         include: |           - tests/wdio | ||||||
|           - command: tsc |  | ||||||
|             project: web |  | ||||||
|           - command: lit-analyse |  | ||||||
|             project: web |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
| @ -37,14 +28,83 @@ jobs: | |||||||
|           cache: "npm" |           cache: "npm" | ||||||
|           cache-dependency-path: ${{ matrix.project }}/package-lock.json |           cache-dependency-path: ${{ matrix.project }}/package-lock.json | ||||||
|       - working-directory: ${{ matrix.project }}/ |       - working-directory: ${{ matrix.project }}/ | ||||||
|         run: | |         run: npm ci | ||||||
|           npm ci |  | ||||||
|       - name: Generate API |       - name: Generate API | ||||||
|         run: make gen-client-ts |         run: make gen-client-ts | ||||||
|       - name: Lint |       - name: Eslint | ||||||
|         working-directory: ${{ matrix.project }}/ |         working-directory: ${{ matrix.project }}/ | ||||||
|         run: npm run ${{ matrix.command }} |         run: npm run lint | ||||||
|  |   lint-build: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v4 | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: web/package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: web/package-lock.json | ||||||
|  |       - working-directory: web/ | ||||||
|  |         run: npm ci | ||||||
|  |       - name: Generate API | ||||||
|  |         run: make gen-client-ts | ||||||
|  |       - name: TSC | ||||||
|  |         working-directory: web/ | ||||||
|  |         run: npm run tsc | ||||||
|  |   lint-prettier: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     strategy: | ||||||
|  |       fail-fast: false | ||||||
|  |       matrix: | ||||||
|  |         project: | ||||||
|  |           - web | ||||||
|  |           - tests/wdio | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v4 | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: ${{ matrix.project }}/package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: ${{ matrix.project }}/package-lock.json | ||||||
|  |       - working-directory: ${{ matrix.project }}/ | ||||||
|  |         run: npm ci | ||||||
|  |       - name: Generate API | ||||||
|  |         run: make gen-client-ts | ||||||
|  |       - name: prettier | ||||||
|  |         working-directory: ${{ matrix.project }}/ | ||||||
|  |         run: npm run prettier-check | ||||||
|  |   lint-lit-analyse: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v4 | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: web/package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: web/package-lock.json | ||||||
|  |       - working-directory: web/ | ||||||
|  |         run: | | ||||||
|  |           npm ci | ||||||
|  |           # lit-analyse doesn't understand path rewrites, so make it | ||||||
|  |           # belive it's an actual module | ||||||
|  |           cd node_modules/@goauthentik | ||||||
|  |           ln -s ../../src/ web | ||||||
|  |       - name: Generate API | ||||||
|  |         run: make gen-client-ts | ||||||
|  |       - name: lit-analyse | ||||||
|  |         working-directory: web/ | ||||||
|  |         run: npm run lit-analyse | ||||||
|  |   ci-web-mark: | ||||||
|  |     needs: | ||||||
|  |       - lint-eslint | ||||||
|  |       - lint-prettier | ||||||
|  |       - lint-lit-analyse | ||||||
|  |       - lint-build | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - run: echo mark | ||||||
|   build: |   build: | ||||||
|  |     needs: | ||||||
|  |       - ci-web-mark | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -60,28 +120,3 @@ jobs: | |||||||
|       - name: build |       - name: build | ||||||
|         working-directory: web/ |         working-directory: web/ | ||||||
|         run: npm run build |         run: npm run build | ||||||
|   ci-web-mark: |  | ||||||
|     needs: |  | ||||||
|       - build |  | ||||||
|       - lint |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - run: echo mark |  | ||||||
|   test: |  | ||||||
|     needs: |  | ||||||
|       - ci-web-mark |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - uses: actions/setup-node@v4 |  | ||||||
|         with: |  | ||||||
|           node-version-file: web/package.json |  | ||||||
|           cache: "npm" |  | ||||||
|           cache-dependency-path: web/package-lock.json |  | ||||||
|       - working-directory: web/ |  | ||||||
|         run: npm ci |  | ||||||
|       - name: Generate API |  | ||||||
|         run: make gen-client-ts |  | ||||||
|       - name: test |  | ||||||
|         working-directory: web/ |  | ||||||
|         run: npm run test || exit 0 |  | ||||||
|  | |||||||
							
								
								
									
										19
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										19
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,21 +12,20 @@ on: | |||||||
|       - version-* |       - version-* | ||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   lint: |   lint-prettier: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|       matrix: |  | ||||||
|         command: |  | ||||||
|           - lint:lockfile |  | ||||||
|           - prettier-check |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: website/package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: website/package-lock.json | ||||||
|       - working-directory: website/ |       - working-directory: website/ | ||||||
|         run: npm ci |         run: npm ci | ||||||
|       - name: Lint |       - name: prettier | ||||||
|         working-directory: website/ |         working-directory: website/ | ||||||
|         run: npm run ${{ matrix.command }} |         run: npm run prettier-check | ||||||
|   test: |   test: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
| @ -63,7 +62,7 @@ jobs: | |||||||
|         run: npm run ${{ matrix.job }} |         run: npm run ${{ matrix.job }} | ||||||
|   ci-website-mark: |   ci-website-mark: | ||||||
|     needs: |     needs: | ||||||
|       - lint |       - lint-prettier | ||||||
|       - test |       - test | ||||||
|       - build |       - build | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|  | |||||||
| @ -24,7 +24,7 @@ jobs: | |||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - run: poetry run ak update_webauthn_mds |       - run: poetry run ak update_webauthn_mds | ||||||
|       - uses: peter-evans/create-pull-request@v7 |       - uses: peter-evans/create-pull-request@v6 | ||||||
|         id: cpr |         id: cpr | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -42,7 +42,7 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           githubToken: ${{ steps.generate_token.outputs.token }} |           githubToken: ${{ steps.generate_token.outputs.token }} | ||||||
|           compressOnly: ${{ github.event_name != 'pull_request' }} |           compressOnly: ${{ github.event_name != 'pull_request' }} | ||||||
|       - uses: peter-evans/create-pull-request@v7 |       - uses: peter-evans/create-pull-request@v6 | ||||||
|         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" |         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" | ||||||
|         id: cpr |         id: cpr | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										40
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										40
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,13 +11,10 @@ jobs: | |||||||
|     permissions: |     permissions: | ||||||
|       # Needed to upload contianer images to ghcr.io |       # Needed to upload contianer images to ghcr.io | ||||||
|       packages: write |       packages: write | ||||||
|       # Needed for attestation |  | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - name: Set up QEMU |       - name: Set up QEMU | ||||||
|         uses: docker/setup-qemu-action@v3.2.0 |         uses: docker/setup-qemu-action@v3.0.0 | ||||||
|       - name: Set up Docker Buildx |       - name: Set up Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@v3 |         uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
| @ -43,32 +40,20 @@ jobs: | |||||||
|           mkdir -p ./gen-ts-api |           mkdir -p ./gen-ts-api | ||||||
|           mkdir -p ./gen-go-api |           mkdir -p ./gen-go-api | ||||||
|       - name: Build Docker Image |       - name: Build Docker Image | ||||||
|         uses: docker/build-push-action@v6 |         uses: docker/build-push-action@v5 | ||||||
|         id: push |  | ||||||
|         with: |         with: | ||||||
|           context: . |           context: . | ||||||
|           push: true |           push: true | ||||||
|           secrets: | |           secrets: | | ||||||
|             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} |             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||||
|             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} |             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||||
|           build-args: | |  | ||||||
|             VERSION=${{ github.ref }} |  | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |           tags: ${{ steps.ev.outputs.imageTags }} | ||||||
|           platforms: linux/amd64,linux/arm64 |           platforms: linux/amd64,linux/arm64 | ||||||
|       - uses: actions/attest-build-provenance@v1 |  | ||||||
|         id: attest |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   build-outpost: |   build-outpost: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     permissions: |     permissions: | ||||||
|       # Needed to upload contianer images to ghcr.io |       # Needed to upload contianer images to ghcr.io | ||||||
|       packages: write |       packages: write | ||||||
|       # Needed for attestation |  | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
| @ -83,7 +68,7 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           go-version-file: "go.mod" |           go-version-file: "go.mod" | ||||||
|       - name: Set up QEMU |       - name: Set up QEMU | ||||||
|         uses: docker/setup-qemu-action@v3.2.0 |         uses: docker/setup-qemu-action@v3.0.0 | ||||||
|       - name: Set up Docker Buildx |       - name: Set up Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@v3 |         uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
| @ -109,22 +94,13 @@ jobs: | |||||||
|           username: ${{ github.repository_owner }} |           username: ${{ github.repository_owner }} | ||||||
|           password: ${{ secrets.GITHUB_TOKEN }} |           password: ${{ secrets.GITHUB_TOKEN }} | ||||||
|       - name: Build Docker Image |       - name: Build Docker Image | ||||||
|         uses: docker/build-push-action@v6 |         uses: docker/build-push-action@v5 | ||||||
|         id: push |  | ||||||
|         with: |         with: | ||||||
|           push: true |           push: true | ||||||
|           build-args: | |  | ||||||
|             VERSION=${{ github.ref }} |  | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |           tags: ${{ steps.ev.outputs.imageTags }} | ||||||
|           file: ${{ matrix.type }}.Dockerfile |           file: ${{ matrix.type }}.Dockerfile | ||||||
|           platforms: linux/amd64,linux/arm64 |           platforms: linux/amd64,linux/arm64 | ||||||
|           context: . |           context: . | ||||||
|       - uses: actions/attest-build-provenance@v1 |  | ||||||
|         id: attest |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   build-outpost-binary: |   build-outpost-binary: | ||||||
|     timeout-minutes: 120 |     timeout-minutes: 120 | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
| @ -179,8 +155,8 @@ jobs: | |||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - name: Run test suite in final docker images |       - name: Run test suite in final docker images | ||||||
|         run: | |         run: | | ||||||
|           echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env |           echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env |           echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||||
|           docker compose pull -q |           docker compose pull -q | ||||||
|           docker compose up --no-start |           docker compose up --no-start | ||||||
|           docker compose start postgresql redis |           docker compose start postgresql redis | ||||||
| @ -202,8 +178,8 @@ jobs: | |||||||
|           image-name: ghcr.io/goauthentik/server |           image-name: ghcr.io/goauthentik/server | ||||||
|       - name: Get static files from docker image |       - name: Get static files from docker image | ||||||
|         run: | |         run: | | ||||||
|           docker pull ${{ steps.ev.outputs.imageMainName }} |           docker pull ${{ steps.ev.outputs.imageMainTag }} | ||||||
|           container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) |           container=$(docker container create ${{ steps.ev.outputs.imageMainTag }}) | ||||||
|           docker cp ${container}:web/ . |           docker cp ${container}:web/ . | ||||||
|       - name: Create a Sentry.io release |       - name: Create a Sentry.io release | ||||||
|         uses: getsentry/action-release@v1 |         uses: getsentry/action-release@v1 | ||||||
|  | |||||||
							
								
								
									
										6
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							| @ -14,11 +14,11 @@ jobs: | |||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - name: Pre-release test |       - name: Pre-release test | ||||||
|         run: | |         run: | | ||||||
|           echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env |           echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env |           echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||||
|           docker buildx install |           docker buildx install | ||||||
|           mkdir -p ./gen-ts-api |           mkdir -p ./gen-ts-api | ||||||
|           docker build --no-cache -t testing:latest . |           docker build -t testing:latest . | ||||||
|           echo "AUTHENTIK_IMAGE=testing" >> .env |           echo "AUTHENTIK_IMAGE=testing" >> .env | ||||||
|           echo "AUTHENTIK_TAG=latest" >> .env |           echo "AUTHENTIK_TAG=latest" >> .env | ||||||
|           docker compose up --no-start |           docker compose up --no-start | ||||||
|  | |||||||
| @ -32,7 +32,7 @@ jobs: | |||||||
|           poetry run ak compilemessages |           poetry run ak compilemessages | ||||||
|           make web-check-compile |           make web-check-compile | ||||||
|       - name: Create Pull Request |       - name: Create Pull Request | ||||||
|         uses: peter-evans/create-pull-request@v7 |         uses: peter-evans/create-pull-request@v6 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|           branch: extract-compile-backend-translation |           branch: extract-compile-backend-translation | ||||||
|  | |||||||
							
								
								
									
										7
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -209,3 +209,10 @@ source_docs/ | |||||||
|  |  | ||||||
| ### Golang ### | ### Golang ### | ||||||
| /vendor/ | /vendor/ | ||||||
|  |  | ||||||
|  | ### Benchmark ### | ||||||
|  | tests/benchmark/k6 | ||||||
|  | tests/benchmark/prometheus | ||||||
|  | tests/benchmark/**/*.json | ||||||
|  | tests/benchmark/**/*.ndjson | ||||||
|  | tests/benchmark/**/*.html | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @ -16,6 +16,6 @@ | |||||||
|         "ms-python.black-formatter", |         "ms-python.black-formatter", | ||||||
|         "redhat.vscode-yaml", |         "redhat.vscode-yaml", | ||||||
|         "Tobermory.es6-string-html", |         "Tobermory.es6-string-html", | ||||||
|         "unifiedjs.vscode-mdx" |         "unifiedjs.vscode-mdx", | ||||||
|     ] |     ] | ||||||
| } | } | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							| @ -22,6 +22,6 @@ | |||||||
|             }, |             }, | ||||||
|             "justMyCode": true, |             "justMyCode": true, | ||||||
|             "django": true |             "django": true | ||||||
|         } |         }, | ||||||
|     ] |     ] | ||||||
| } | } | ||||||
|  | |||||||
							
								
								
									
										33
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										33
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -4,36 +4,33 @@ | |||||||
|         "asgi", |         "asgi", | ||||||
|         "authentik", |         "authentik", | ||||||
|         "authn", |         "authn", | ||||||
|         "entra", |  | ||||||
|         "goauthentik", |         "goauthentik", | ||||||
|         "jwe", |  | ||||||
|         "jwks", |         "jwks", | ||||||
|         "kubernetes", |  | ||||||
|         "oidc", |         "oidc", | ||||||
|         "openid", |         "openid", | ||||||
|         "passwordless", |  | ||||||
|         "plex", |         "plex", | ||||||
|         "saml", |         "saml", | ||||||
|         "scim", |  | ||||||
|         "slo", |  | ||||||
|         "sso", |  | ||||||
|         "totp", |         "totp", | ||||||
|  |         "webauthn", | ||||||
|         "traefik", |         "traefik", | ||||||
|         "webauthn" |         "passwordless", | ||||||
|  |         "kubernetes", | ||||||
|  |         "sso", | ||||||
|  |         "slo", | ||||||
|  |         "scim", | ||||||
|     ], |     ], | ||||||
|     "todo-tree.tree.showCountsInTree": true, |     "todo-tree.tree.showCountsInTree": true, | ||||||
|     "todo-tree.tree.showBadges": true, |     "todo-tree.tree.showBadges": true, | ||||||
|     "yaml.customTags": [ |     "yaml.customTags": [ | ||||||
|         "!Condition sequence", |  | ||||||
|         "!Context scalar", |  | ||||||
|         "!Enumerate sequence", |  | ||||||
|         "!Env scalar", |  | ||||||
|         "!Find sequence", |         "!Find sequence", | ||||||
|         "!Format sequence", |  | ||||||
|         "!If sequence", |  | ||||||
|         "!Index scalar", |  | ||||||
|         "!KeyOf scalar", |         "!KeyOf scalar", | ||||||
|         "!Value scalar" |         "!Context scalar", | ||||||
|  |         "!Context sequence", | ||||||
|  |         "!Format sequence", | ||||||
|  |         "!Condition sequence", | ||||||
|  |         "!Env sequence", | ||||||
|  |         "!Env scalar", | ||||||
|  |         "!If sequence" | ||||||
|     ], |     ], | ||||||
|     "typescript.preferences.importModuleSpecifier": "non-relative", |     "typescript.preferences.importModuleSpecifier": "non-relative", | ||||||
|     "typescript.preferences.importModuleSpecifierEnding": "index", |     "typescript.preferences.importModuleSpecifierEnding": "index", | ||||||
| @ -50,7 +47,9 @@ | |||||||
|             "ignoreCase": false |             "ignoreCase": false | ||||||
|         } |         } | ||||||
|     ], |     ], | ||||||
|     "go.testFlags": ["-count=1"], |     "go.testFlags": [ | ||||||
|  |         "-count=1" | ||||||
|  |     ], | ||||||
|     "github-actions.workflows.pinned.workflows": [ |     "github-actions.workflows.pinned.workflows": [ | ||||||
|         ".github/workflows/ci-main.yml" |         ".github/workflows/ci-main.yml" | ||||||
|     ] |     ] | ||||||
|  | |||||||
							
								
								
									
										62
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										62
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @ -2,67 +2,85 @@ | |||||||
|     "version": "2.0.0", |     "version": "2.0.0", | ||||||
|     "tasks": [ |     "tasks": [ | ||||||
|         { |         { | ||||||
|             "label": "authentik/core: make", |             "label": "authentik[core]: format & test", | ||||||
|             "command": "poetry", |             "command": "poetry", | ||||||
|             "args": ["run", "make", "lint-fix", "lint"], |             "args": [ | ||||||
|             "presentation": { |                 "run", | ||||||
|                 "panel": "new" |                 "make" | ||||||
|             }, |             ], | ||||||
|             "group": "test" |             "group": "build", | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/core: run", |             "label": "authentik[core]: run", | ||||||
|             "command": "poetry", |             "command": "poetry", | ||||||
|             "args": ["run", "ak", "server"], |             "args": [ | ||||||
|  |                 "run", | ||||||
|  |                 "make", | ||||||
|  |                 "run", | ||||||
|  |             ], | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
|                 "group": "running" |                 "group": "running" | ||||||
|             } |             }, | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/web: make", |             "label": "authentik[web]: format", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["web"], |             "args": ["web"], | ||||||
|             "group": "build" |             "group": "build", | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/web: watch", |             "label": "authentik[web]: watch", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["web-watch"], |             "args": ["web-watch"], | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
|                 "group": "running" |                 "group": "running" | ||||||
|             } |             }, | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik: install", |             "label": "authentik: install", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["install", "-j4"], |             "args": ["install"], | ||||||
|             "group": "build" |             "group": "build", | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/website: make", |             "label": "authentik: i18n-extract", | ||||||
|  |             "command": "poetry", | ||||||
|  |             "args": [ | ||||||
|  |                 "run", | ||||||
|  |                 "make", | ||||||
|  |                 "i18n-extract" | ||||||
|  |             ], | ||||||
|  |             "group": "build", | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             "label": "authentik[website]: format", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["website"], |             "args": ["website"], | ||||||
|             "group": "build" |             "group": "build", | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/website: watch", |             "label": "authentik[website]: watch", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["website-watch"], |             "args": ["website-watch"], | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
|                 "group": "running" |                 "group": "running" | ||||||
|             } |             }, | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/api: generate", |             "label": "authentik[api]: generate", | ||||||
|             "command": "poetry", |             "command": "poetry", | ||||||
|             "args": ["run", "make", "gen"], |             "args": [ | ||||||
|  |                 "run", | ||||||
|  |                 "make", | ||||||
|  |                 "gen" | ||||||
|  |             ], | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         } |         }, | ||||||
|     ] |     ] | ||||||
| } | } | ||||||
|  | |||||||
							
								
								
									
										53
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										53
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,7 @@ | |||||||
| # syntax=docker/dockerfile:1 | # syntax=docker/dockerfile:1 | ||||||
|  |  | ||||||
| # Stage 1: Build website | # Stage 1: Build website | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder | ||||||
|  |  | ||||||
| ENV NODE_ENV=production | ENV NODE_ENV=production | ||||||
|  |  | ||||||
| @ -20,22 +20,17 @@ COPY ./SECURITY.md /work/ | |||||||
| RUN npm run build-bundled | RUN npm run build-bundled | ||||||
|  |  | ||||||
| # Stage 2: Build webui | # Stage 2: Build webui | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder | ||||||
|  |  | ||||||
| ARG GIT_BUILD_HASH |  | ||||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH |  | ||||||
| ENV NODE_ENV=production | ENV NODE_ENV=production | ||||||
|  |  | ||||||
| WORKDIR /work/web | WORKDIR /work/web | ||||||
|  |  | ||||||
| RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | ||||||
|     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ |     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ | ||||||
|     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ |  | ||||||
|     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ |  | ||||||
|     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ |     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ | ||||||
|     npm ci --include=dev |     npm ci --include=dev | ||||||
|  |  | ||||||
| COPY ./package.json /work |  | ||||||
| COPY ./web /work/web/ | COPY ./web /work/web/ | ||||||
| COPY ./website /work/website/ | COPY ./website /work/website/ | ||||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||||
| @ -43,7 +38,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | |||||||
| RUN npm run build | RUN npm run build | ||||||
|  |  | ||||||
| # Stage 3: Build go proxy | # Stage 3: Build go proxy | ||||||
| FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder | FROM --platform=${BUILDPLATFORM} docker.io/golang:1.22.2-bookworm AS go-builder | ||||||
|  |  | ||||||
| ARG TARGETOS | ARG TARGETOS | ||||||
| ARG TARGETARCH | ARG TARGETARCH | ||||||
| @ -54,11 +49,6 @@ ARG GOARCH=$TARGETARCH | |||||||
|  |  | ||||||
| WORKDIR /go/src/goauthentik.io | WORKDIR /go/src/goauthentik.io | ||||||
|  |  | ||||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ |  | ||||||
|     dpkg --add-architecture arm64 && \ |  | ||||||
|     apt-get update && \ |  | ||||||
|     apt-get install -y --no-install-recommends crossbuild-essential-arm64 gcc-aarch64-linux-gnu |  | ||||||
|  |  | ||||||
| RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | ||||||
|     --mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \ |     --mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \ | ||||||
|     --mount=type=cache,target=/go/pkg/mod \ |     --mount=type=cache,target=/go/pkg/mod \ | ||||||
| @ -73,14 +63,14 @@ COPY ./internal /go/src/goauthentik.io/internal | |||||||
| COPY ./go.mod /go/src/goauthentik.io/go.mod | COPY ./go.mod /go/src/goauthentik.io/go.mod | ||||||
| COPY ./go.sum /go/src/goauthentik.io/go.sum | COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||||
|  |  | ||||||
|  | ENV CGO_ENABLED=0 | ||||||
|  |  | ||||||
| RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | ||||||
|     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ |     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ | ||||||
|     if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ |     GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server | ||||||
|     CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \ |  | ||||||
|     go build -o /go/authentik ./cmd/server |  | ||||||
|  |  | ||||||
| # Stage 4: MaxMind GeoIP | # Stage 4: MaxMind GeoIP | ||||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 AS geoip | FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 as geoip | ||||||
|  |  | ||||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||||
| ENV GEOIPUPDATE_VERBOSE="1" | ENV GEOIPUPDATE_VERBOSE="1" | ||||||
| @ -94,10 +84,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | |||||||
|     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" |     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||||
|  |  | ||||||
| # Stage 5: Python dependencies | # Stage 5: Python dependencies | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS python-deps | FROM docker.io/python:3.12.3-slim-bookworm AS python-deps | ||||||
|  |  | ||||||
| ARG TARGETARCH |  | ||||||
| ARG TARGETVARIANT |  | ||||||
|  |  | ||||||
| WORKDIR /ak-root/poetry | WORKDIR /ak-root/poetry | ||||||
|  |  | ||||||
| @ -110,7 +97,7 @@ RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloa | |||||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | ||||||
|     apt-get update && \ |     apt-get update && \ | ||||||
|     # Required for installing pip packages |     # Required for installing pip packages | ||||||
|     apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev |     apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev | ||||||
|  |  | ||||||
| RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||||
|     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ |     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ | ||||||
| @ -120,28 +107,27 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | |||||||
|     bash -c "source ${VENV_PATH}/bin/activate && \ |     bash -c "source ${VENV_PATH}/bin/activate && \ | ||||||
|         pip3 install --upgrade pip && \ |         pip3 install --upgrade pip && \ | ||||||
|         pip3 install poetry && \ |         pip3 install poetry && \ | ||||||
|     poetry install --only=main --no-ansi --no-interaction --no-root && \ |         poetry install --only=main --no-ansi --no-interaction --no-root" | ||||||
|     pip install --force-reinstall /wheels/*" |  | ||||||
|  |  | ||||||
| # Stage 6: Run | # Stage 6: Run | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS final-image | FROM docker.io/python:3.12.3-slim-bookworm AS final-image | ||||||
|  |  | ||||||
| ARG VERSION |  | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
|  | ARG VERSION | ||||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||||
|  |  | ||||||
| LABEL org.opencontainers.image.url=https://goauthentik.io | LABEL org.opencontainers.image.url https://goauthentik.io | ||||||
| LABEL org.opencontainers.image.description="goauthentik.io Main server image, see https://goauthentik.io for more info." | LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info. | ||||||
| LABEL org.opencontainers.image.source=https://github.com/goauthentik/authentik | LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik | ||||||
| LABEL org.opencontainers.image.version=${VERSION} | LABEL org.opencontainers.image.version ${VERSION} | ||||||
| LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH} | LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH} | ||||||
|  |  | ||||||
| WORKDIR / | WORKDIR / | ||||||
|  |  | ||||||
| # We cannot cache this layer otherwise we'll end up with a bigger image | # We cannot cache this layer otherwise we'll end up with a bigger image | ||||||
| RUN apt-get update && \ | RUN apt-get update && \ | ||||||
|     # Required for runtime |     # Required for runtime | ||||||
|     apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 && \ |     apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 ca-certificates && \ | ||||||
|     # Required for bootstrap & healtcheck |     # Required for bootstrap & healtcheck | ||||||
|     apt-get install -y --no-install-recommends runit && \ |     apt-get install -y --no-install-recommends runit && \ | ||||||
|     apt-get clean && \ |     apt-get clean && \ | ||||||
| @ -161,7 +147,6 @@ COPY ./tests /tests | |||||||
| COPY ./manage.py / | COPY ./manage.py / | ||||||
| COPY ./blueprints /blueprints | COPY ./blueprints /blueprints | ||||||
| COPY ./lifecycle/ /lifecycle | COPY ./lifecycle/ /lifecycle | ||||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf |  | ||||||
| COPY --from=go-builder /go/authentik /bin/authentik | COPY --from=go-builder /go/authentik /bin/authentik | ||||||
| COPY --from=python-deps /ak-root/venv /ak-root/venv | COPY --from=python-deps /ak-root/venv /ak-root/venv | ||||||
| COPY --from=web-builder /work/web/dist/ /web/dist/ | COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||||
| @ -178,8 +163,6 @@ ENV TMPDIR=/dev/shm/ \ | |||||||
|     VENV_PATH="/ak-root/venv" \ |     VENV_PATH="/ak-root/venv" \ | ||||||
|     POETRY_VIRTUALENVS_CREATE=false |     POETRY_VIRTUALENVS_CREATE=false | ||||||
|  |  | ||||||
| ENV GOFIPS=1 |  | ||||||
|  |  | ||||||
| HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | ||||||
|  |  | ||||||
| ENTRYPOINT [ "dumb-init", "--", "ak" ] | ENTRYPOINT [ "dumb-init", "--", "ak" ] | ||||||
|  | |||||||
							
								
								
									
										35
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										35
									
								
								Makefile
									
									
									
									
									
								
							| @ -19,13 +19,13 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null) | |||||||
| CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \ | CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \ | ||||||
| 		-I .github/codespell-words.txt \ | 		-I .github/codespell-words.txt \ | ||||||
| 		-S 'web/src/locales/**' \ | 		-S 'web/src/locales/**' \ | ||||||
| 		-S 'website/docs/developer-docs/api/reference/**' \ |  | ||||||
| 		authentik \ | 		authentik \ | ||||||
| 		internal \ | 		internal \ | ||||||
| 		cmd \ | 		cmd \ | ||||||
| 		web/src \ | 		web/src \ | ||||||
| 		website/src \ | 		website/src \ | ||||||
| 		website/blog \ | 		website/blog \ | ||||||
|  | 		website/developer-docs \ | ||||||
| 		website/docs \ | 		website/docs \ | ||||||
| 		website/integrations \ | 		website/integrations \ | ||||||
| 		website/src | 		website/src | ||||||
| @ -42,12 +42,12 @@ help:  ## Show this help | |||||||
| 		sort | 		sort | ||||||
| 	@echo "" | 	@echo "" | ||||||
|  |  | ||||||
| go-test: | test-go: | ||||||
| 	go test -timeout 0 -v -race -cover ./... | 	go test -timeout 0 -v -race -cover ./... | ||||||
|  |  | ||||||
| test-docker:  ## Run all tests in a docker-compose | test-docker:  ## Run all tests in a docker-compose | ||||||
| 	echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env | 	echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||||
| 	echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env | 	echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||||
| 	docker compose pull -q | 	docker compose pull -q | ||||||
| 	docker compose up --no-start | 	docker compose up --no-start | ||||||
| 	docker compose start postgresql redis | 	docker compose start postgresql redis | ||||||
| @ -59,11 +59,9 @@ test: ## Run the server tests and produce a coverage report (locally) | |||||||
| 	coverage html | 	coverage html | ||||||
| 	coverage report | 	coverage report | ||||||
|  |  | ||||||
| lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | lint-fix:  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | ||||||
| 	black $(PY_SOURCES) | 	black $(PY_SOURCES) | ||||||
| 	ruff check --fix $(PY_SOURCES) | 	ruff check --fix $(PY_SOURCES) | ||||||
|  |  | ||||||
| lint-codespell:  ## Reports spelling errors. |  | ||||||
| 	codespell -w $(CODESPELL_ARGS) | 	codespell -w $(CODESPELL_ARGS) | ||||||
|  |  | ||||||
| lint: ## Lint the python and golang sources | lint: ## Lint the python and golang sources | ||||||
| @ -209,9 +207,6 @@ web: web-lint-fix web-lint web-check-compile  ## Automatically fix formatting is | |||||||
| web-install:  ## Install the necessary libraries to build the Authentik UI | web-install:  ## Install the necessary libraries to build the Authentik UI | ||||||
| 	cd web && npm ci | 	cd web && npm ci | ||||||
|  |  | ||||||
| web-test: ## Run tests for the Authentik UI |  | ||||||
| 	cd web && npm run test |  | ||||||
|  |  | ||||||
| web-watch:  ## Build and watch the Authentik UI for changes, updating automatically | web-watch:  ## Build and watch the Authentik UI for changes, updating automatically | ||||||
| 	rm -rf web/dist/ | 	rm -rf web/dist/ | ||||||
| 	mkdir web/dist/ | 	mkdir web/dist/ | ||||||
| @ -243,7 +238,7 @@ website: website-lint-fix website-build  ## Automatically fix formatting issues | |||||||
| website-install: | website-install: | ||||||
| 	cd website && npm ci | 	cd website && npm ci | ||||||
|  |  | ||||||
| website-lint-fix: lint-codespell | website-lint-fix: | ||||||
| 	cd website && npm run prettier | 	cd website && npm run prettier | ||||||
|  |  | ||||||
| website-build: | website-build: | ||||||
| @ -257,7 +252,6 @@ website-watch:  ## Build and watch the documentation website, updating automatic | |||||||
| ######################### | ######################### | ||||||
|  |  | ||||||
| docker:  ## Build a docker image of the current source tree | docker:  ## Build a docker image of the current source tree | ||||||
| 	mkdir -p ${GEN_API_TS} |  | ||||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| @ -284,3 +278,20 @@ ci-bandit: ci--meta-debug | |||||||
|  |  | ||||||
| ci-pending-migrations: ci--meta-debug | ci-pending-migrations: ci--meta-debug | ||||||
| 	ak makemigrations --check | 	ak makemigrations --check | ||||||
|  |  | ||||||
|  | ######################### | ||||||
|  | ## Benchmark | ||||||
|  | ######################### | ||||||
|  |  | ||||||
|  | benchmark-fixtures-create: | ||||||
|  | 	tests/benchmark/fixtures.py create | ||||||
|  |  | ||||||
|  | benchmark-run: | ||||||
|  | 	docker compose -f tests/benchmark/docker-compose.yml up -d | ||||||
|  | 	sleep 5 | ||||||
|  | 	tests/benchmark/run.sh | ||||||
|  |  | ||||||
|  | benchmark-fixtures-delete: | ||||||
|  | 	tests/benchmark/fixtures.py delete | ||||||
|  |  | ||||||
|  | benchmark: benchmark-fixtures-create benchmark-run benchmark-fixtures-delete | ||||||
|  | |||||||
| @ -15,9 +15,7 @@ | |||||||
|  |  | ||||||
| ## What is authentik? | ## What is authentik? | ||||||
|  |  | ||||||
| authentik is an open-source Identity Provider that emphasizes flexibility and versatility, with support for a wide set of protocols. | authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them. | ||||||
|  |  | ||||||
| Our [enterprise offer](https://goauthentik.io/pricing) can also be used as a self-hosted replacement for large-scale deployments of Okta/Auth0, Entra ID, Ping Identity, or other legacy IdPs for employees and B2B2C use. |  | ||||||
|  |  | ||||||
| ## Installation | ## Installation | ||||||
|  |  | ||||||
| @ -34,7 +32,7 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h | |||||||
|  |  | ||||||
| ## Development | ## Development | ||||||
|  |  | ||||||
| See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github) | See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github) | ||||||
|  |  | ||||||
| ## Security | ## Security | ||||||
|  |  | ||||||
|  | |||||||
| @ -19,9 +19,9 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | |||||||
| (.x being the latest patch release for each version) | (.x being the latest patch release for each version) | ||||||
|  |  | ||||||
| | Version | Supported | | | Version | Supported | | ||||||
| | --------- | --------- | | | --- | --- | | ||||||
| | 2024.8.x  | ✅        | | | 2023.6.x | ✅ | | ||||||
| | 2024.10.x | ✅        | | | 2023.8.x | ✅ | | ||||||
|  |  | ||||||
| ## Reporting a Vulnerability | ## Reporting a Vulnerability | ||||||
|  |  | ||||||
| @ -32,7 +32,7 @@ To report a vulnerability, send an email to [security@goauthentik.io](mailto:se | |||||||
| authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories: | authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories: | ||||||
|  |  | ||||||
| | Score | Severity | | | Score | Severity | | ||||||
| | ---------- | -------- | | | --- | --- | | ||||||
| | 0.0 | None | | | 0.0 | None | | ||||||
| | 0.1 – 3.9 | Low | | | 0.1 – 3.9 | Low | | ||||||
| | 4.0 – 6.9 | Medium | | | 4.0 – 6.9 | Medium | | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
|  |  | ||||||
| from os import environ | from os import environ | ||||||
|  |  | ||||||
| __version__ = "2024.10.5" | __version__ = "2024.2.2" | ||||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,21 +2,18 @@ | |||||||
|  |  | ||||||
| import platform | import platform | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from ssl import OPENSSL_VERSION |  | ||||||
| from sys import version as python_version | from sys import version as python_version | ||||||
| from typing import TypedDict | from typing import TypedDict | ||||||
|  |  | ||||||
| from cryptography.hazmat.backends.openssl.backend import backend |  | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from drf_spectacular.utils import extend_schema | from drf_spectacular.utils import extend_schema | ||||||
|  | from gunicorn import version_info as gunicorn_version | ||||||
| from rest_framework.fields import SerializerMethodField | from rest_framework.fields import SerializerMethodField | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.views import APIView | from rest_framework.views import APIView | ||||||
|  |  | ||||||
| from authentik import get_full_version |  | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.enterprise.license import LicenseKey |  | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.utils.reflection import get_env | from authentik.lib.utils.reflection import get_env | ||||||
| from authentik.outposts.apps import MANAGED_OUTPOST | from authentik.outposts.apps import MANAGED_OUTPOST | ||||||
| @ -28,13 +25,11 @@ class RuntimeDict(TypedDict): | |||||||
|     """Runtime information""" |     """Runtime information""" | ||||||
|  |  | ||||||
|     python_version: str |     python_version: str | ||||||
|  |     gunicorn_version: str | ||||||
|     environment: str |     environment: str | ||||||
|     architecture: str |     architecture: str | ||||||
|     platform: str |     platform: str | ||||||
|     uname: str |     uname: str | ||||||
|     openssl_version: str |  | ||||||
|     openssl_fips_enabled: bool | None |  | ||||||
|     authentik_version: str |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SystemInfoSerializer(PassiveSerializer): | class SystemInfoSerializer(PassiveSerializer): | ||||||
| @ -69,15 +64,11 @@ class SystemInfoSerializer(PassiveSerializer): | |||||||
|     def get_runtime(self, request: Request) -> RuntimeDict: |     def get_runtime(self, request: Request) -> RuntimeDict: | ||||||
|         """Get versions""" |         """Get versions""" | ||||||
|         return { |         return { | ||||||
|             "architecture": platform.machine(), |  | ||||||
|             "authentik_version": get_full_version(), |  | ||||||
|             "environment": get_env(), |  | ||||||
|             "openssl_fips_enabled": ( |  | ||||||
|                 backend._fips_enabled if LicenseKey.get_total().status().is_valid else None |  | ||||||
|             ), |  | ||||||
|             "openssl_version": OPENSSL_VERSION, |  | ||||||
|             "platform": platform.platform(), |  | ||||||
|             "python_version": python_version, |             "python_version": python_version, | ||||||
|  |             "gunicorn_version": ".".join(str(x) for x in gunicorn_version), | ||||||
|  |             "environment": get_env(), | ||||||
|  |             "architecture": platform.machine(), | ||||||
|  |             "platform": platform.platform(), | ||||||
|             "uname": " ".join(platform.uname()), |             "uname": " ".join(platform.uname()), | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  | |||||||
| @ -12,7 +12,6 @@ from rest_framework.views import APIView | |||||||
| from authentik import __version__, get_build_hash | from authentik import __version__, get_build_hash | ||||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.outposts.models import Outpost |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionSerializer(PassiveSerializer): | class VersionSerializer(PassiveSerializer): | ||||||
| @ -23,7 +22,6 @@ class VersionSerializer(PassiveSerializer): | |||||||
|     version_latest_valid = SerializerMethodField() |     version_latest_valid = SerializerMethodField() | ||||||
|     build_hash = SerializerMethodField() |     build_hash = SerializerMethodField() | ||||||
|     outdated = SerializerMethodField() |     outdated = SerializerMethodField() | ||||||
|     outpost_outdated = SerializerMethodField() |  | ||||||
|  |  | ||||||
|     def get_build_hash(self, _) -> str: |     def get_build_hash(self, _) -> str: | ||||||
|         """Get build hash, if version is not latest or released""" |         """Get build hash, if version is not latest or released""" | ||||||
| @ -49,15 +47,6 @@ class VersionSerializer(PassiveSerializer): | |||||||
|         """Check if we're running the latest version""" |         """Check if we're running the latest version""" | ||||||
|         return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance)) |         return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance)) | ||||||
|  |  | ||||||
|     def get_outpost_outdated(self, _) -> bool: |  | ||||||
|         """Check if any outpost is outdated/has a version mismatch""" |  | ||||||
|         any_outdated = False |  | ||||||
|         for outpost in Outpost.objects.all(): |  | ||||||
|             for state in outpost.state: |  | ||||||
|                 if state.version_outdated: |  | ||||||
|                     any_outdated = True |  | ||||||
|         return any_outdated |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionView(APIView): | class VersionView(APIView): | ||||||
|     """Get running and latest version.""" |     """Get running and latest version.""" | ||||||
|  | |||||||
| @ -1,33 +0,0 @@ | |||||||
| from rest_framework.permissions import IsAdminUser |  | ||||||
| from rest_framework.viewsets import ReadOnlyModelViewSet |  | ||||||
|  |  | ||||||
| from authentik.admin.models import VersionHistory |  | ||||||
| from authentik.core.api.utils import ModelSerializer |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionHistorySerializer(ModelSerializer): |  | ||||||
|     """VersionHistory Serializer""" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         model = VersionHistory |  | ||||||
|         fields = [ |  | ||||||
|             "id", |  | ||||||
|             "timestamp", |  | ||||||
|             "version", |  | ||||||
|             "build", |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionHistoryViewSet(ReadOnlyModelViewSet): |  | ||||||
|     """VersionHistory Viewset""" |  | ||||||
|  |  | ||||||
|     queryset = VersionHistory.objects.all() |  | ||||||
|     serializer_class = VersionHistorySerializer |  | ||||||
|     permission_classes = [IsAdminUser] |  | ||||||
|     filterset_fields = [ |  | ||||||
|         "version", |  | ||||||
|         "build", |  | ||||||
|     ] |  | ||||||
|     search_fields = ["version", "build"] |  | ||||||
|     ordering = ["-timestamp"] |  | ||||||
|     pagination_class = None |  | ||||||
| @ -1,22 +0,0 @@ | |||||||
| """authentik admin models""" |  | ||||||
|  |  | ||||||
| from django.db import models |  | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionHistory(models.Model): |  | ||||||
|     id = models.BigAutoField(primary_key=True) |  | ||||||
|     timestamp = models.DateTimeField() |  | ||||||
|     version = models.TextField() |  | ||||||
|     build = models.TextField() |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         managed = False |  | ||||||
|         db_table = "authentik_version_history" |  | ||||||
|         ordering = ("-timestamp",) |  | ||||||
|         verbose_name = _("Version history") |  | ||||||
|         verbose_name_plural = _("Version history") |  | ||||||
|         default_permissions = [] |  | ||||||
|  |  | ||||||
|     def __str__(self): |  | ||||||
|         return f"{self.version}.{self.build} ({self.timestamp})" |  | ||||||
| @ -1,8 +1,10 @@ | |||||||
| """authentik admin tasks""" | """authentik admin tasks""" | ||||||
|  |  | ||||||
|  | import re | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
|  | from django.core.validators import URLValidator | ||||||
| from django.db import DatabaseError, InternalError, ProgrammingError | from django.db import DatabaseError, InternalError, ProgrammingError | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
| from packaging.version import parse | from packaging.version import parse | ||||||
| from requests import RequestException | from requests import RequestException | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| @ -19,6 +21,8 @@ LOGGER = get_logger() | |||||||
| VERSION_NULL = "0.0.0" | VERSION_NULL = "0.0.0" | ||||||
| VERSION_CACHE_KEY = "authentik_latest_version" | VERSION_CACHE_KEY = "authentik_latest_version" | ||||||
| VERSION_CACHE_TIMEOUT = 8 * 60 * 60  # 8 hours | VERSION_CACHE_TIMEOUT = 8 * 60 * 60  # 8 hours | ||||||
|  | # Chop of the first ^ because we want to search the entire string | ||||||
|  | URL_FINDER = URLValidator.regex.pattern[1:] | ||||||
| LOCAL_VERSION = parse(__version__) | LOCAL_VERSION = parse(__version__) | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -74,16 +78,10 @@ def update_latest_version(self: SystemTask): | |||||||
|                 context__new_version=upstream_version, |                 context__new_version=upstream_version, | ||||||
|             ).exists(): |             ).exists(): | ||||||
|                 return |                 return | ||||||
|             Event.new( |             event_dict = {"new_version": upstream_version} | ||||||
|                 EventAction.UPDATE_AVAILABLE, |             if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")): | ||||||
|                 message=_( |                 event_dict["message"] = f"Changelog: {match.group()}" | ||||||
|                     "New version {version} available!".format( |             Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save() | ||||||
|                         version=upstream_version, |  | ||||||
|                     ) |  | ||||||
|                 ), |  | ||||||
|                 new_version=upstream_version, |  | ||||||
|                 changelog=data.get("stable", {}).get("changelog_url"), |  | ||||||
|             ).save() |  | ||||||
|     except (RequestException, IndexError) as exc: |     except (RequestException, IndexError) as exc: | ||||||
|         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) |         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) | ||||||
|         self.set_error(exc) |         self.set_error(exc) | ||||||
|  | |||||||
| @ -17,7 +17,6 @@ RESPONSE_VALID = { | |||||||
|     "stable": { |     "stable": { | ||||||
|         "version": "99999999.9999999", |         "version": "99999999.9999999", | ||||||
|         "changelog": "See https://goauthentik.io/test", |         "changelog": "See https://goauthentik.io/test", | ||||||
|         "changelog_url": "https://goauthentik.io/test", |  | ||||||
|         "reason": "bugfix", |         "reason": "bugfix", | ||||||
|     }, |     }, | ||||||
| } | } | ||||||
| @ -36,7 +35,7 @@ class TestAdminTasks(TestCase): | |||||||
|                 Event.objects.filter( |                 Event.objects.filter( | ||||||
|                     action=EventAction.UPDATE_AVAILABLE, |                     action=EventAction.UPDATE_AVAILABLE, | ||||||
|                     context__new_version="99999999.9999999", |                     context__new_version="99999999.9999999", | ||||||
|                     context__message="New version 99999999.9999999 available!", |                     context__message="Changelog: https://goauthentik.io/test", | ||||||
|                 ).exists() |                 ).exists() | ||||||
|             ) |             ) | ||||||
|             # test that a consecutive check doesn't create a duplicate event |             # test that a consecutive check doesn't create a duplicate event | ||||||
| @ -46,7 +45,7 @@ class TestAdminTasks(TestCase): | |||||||
|                     Event.objects.filter( |                     Event.objects.filter( | ||||||
|                         action=EventAction.UPDATE_AVAILABLE, |                         action=EventAction.UPDATE_AVAILABLE, | ||||||
|                         context__new_version="99999999.9999999", |                         context__new_version="99999999.9999999", | ||||||
|                         context__message="New version 99999999.9999999 available!", |                         context__message="Changelog: https://goauthentik.io/test", | ||||||
|                     ) |                     ) | ||||||
|                 ), |                 ), | ||||||
|                 1, |                 1, | ||||||
|  | |||||||
| @ -6,7 +6,6 @@ from authentik.admin.api.meta import AppsViewSet, ModelViewSet | |||||||
| from authentik.admin.api.metrics import AdministrationMetricsViewSet | from authentik.admin.api.metrics import AdministrationMetricsViewSet | ||||||
| from authentik.admin.api.system import SystemView | from authentik.admin.api.system import SystemView | ||||||
| from authentik.admin.api.version import VersionView | from authentik.admin.api.version import VersionView | ||||||
| from authentik.admin.api.version_history import VersionHistoryViewSet |  | ||||||
| from authentik.admin.api.workers import WorkerView | from authentik.admin.api.workers import WorkerView | ||||||
|  |  | ||||||
| api_urlpatterns = [ | api_urlpatterns = [ | ||||||
| @ -18,7 +17,6 @@ api_urlpatterns = [ | |||||||
|         name="admin_metrics", |         name="admin_metrics", | ||||||
|     ), |     ), | ||||||
|     path("admin/version/", VersionView.as_view(), name="admin_version"), |     path("admin/version/", VersionView.as_view(), name="admin_version"), | ||||||
|     ("admin/version/history", VersionHistoryViewSet, "version_history"), |  | ||||||
|     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), |     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), | ||||||
|     path("admin/system/", SystemView.as_view(), name="admin_system"), |     path("admin/system/", SystemView.as_view(), name="admin_system"), | ||||||
| ] | ] | ||||||
|  | |||||||
| @ -1,13 +1,13 @@ | |||||||
| {% extends "base/skeleton.html" %} | {% extends "base/skeleton.html" %} | ||||||
|  |  | ||||||
| {% load authentik_core %} | {% load static %} | ||||||
|  |  | ||||||
| {% block title %} | {% block title %} | ||||||
| API Browser - {{ brand.branding_title }} | API Browser - {{ brand.branding_title }} | ||||||
| {% endblock %} | {% endblock %} | ||||||
|  |  | ||||||
| {% block head %} | {% block head %} | ||||||
| <script src="{% versioned_script 'dist/standalone/api-browser/index-%v.js' %}" type="module"></script> | <script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script> | ||||||
| <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> | <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> | ||||||
| <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> | <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> | ||||||
| {% endblock %} | {% endblock %} | ||||||
|  | |||||||
| @ -51,11 +51,9 @@ class BlueprintInstanceSerializer(ModelSerializer): | |||||||
|         context = self.instance.context if self.instance else {} |         context = self.instance.context if self.instance else {} | ||||||
|         valid, logs = Importer.from_string(content, context).validate() |         valid, logs = Importer.from_string(content, context).validate() | ||||||
|         if not valid: |         if not valid: | ||||||
|  |             text_logs = "\n".join([x["event"] for x in logs]) | ||||||
|             raise ValidationError( |             raise ValidationError( | ||||||
|                 [ |                 _("Failed to validate blueprint: {logs}".format_map({"logs": text_logs})) | ||||||
|                     _("Failed to validate blueprint"), |  | ||||||
|                     *[f"- {x.event}" for x in logs], |  | ||||||
|                 ] |  | ||||||
|             ) |             ) | ||||||
|         return content |         return content | ||||||
|  |  | ||||||
|  | |||||||
| @ -23,11 +23,9 @@ class Command(BaseCommand): | |||||||
|                 for blueprint_path in options.get("blueprints", []): |                 for blueprint_path in options.get("blueprints", []): | ||||||
|                     content = BlueprintInstance(path=blueprint_path).retrieve() |                     content = BlueprintInstance(path=blueprint_path).retrieve() | ||||||
|                     importer = Importer.from_string(content) |                     importer = Importer.from_string(content) | ||||||
|                     valid, logs = importer.validate() |                     valid, _ = importer.validate() | ||||||
|                     if not valid: |                     if not valid: | ||||||
|                         self.stderr.write("Blueprint invalid") |                         self.stderr.write("blueprint invalid") | ||||||
|                         for log in logs: |  | ||||||
|                             self.stderr.write(f"\t{log.logger}: {log.event}: {log.attributes}") |  | ||||||
|                         sys_exit(1) |                         sys_exit(1) | ||||||
|                     importer.apply() |                     importer.apply() | ||||||
|  |  | ||||||
|  | |||||||
| @ -113,19 +113,16 @@ class Command(BaseCommand): | |||||||
|             ) |             ) | ||||||
|             model_path = f"{model._meta.app_label}.{model._meta.model_name}" |             model_path = f"{model._meta.app_label}.{model._meta.model_name}" | ||||||
|             self.schema["properties"]["entries"]["items"]["oneOf"].append( |             self.schema["properties"]["entries"]["items"]["oneOf"].append( | ||||||
|                 self.template_entry(model_path, model, serializer) |                 self.template_entry(model_path, serializer) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|     def template_entry(self, model_path: str, model: type[Model], serializer: Serializer) -> dict: |     def template_entry(self, model_path: str, serializer: Serializer) -> dict: | ||||||
|         """Template entry for a single model""" |         """Template entry for a single model""" | ||||||
|         model_schema = self.to_jsonschema(serializer) |         model_schema = self.to_jsonschema(serializer) | ||||||
|         model_schema["required"] = [] |         model_schema["required"] = [] | ||||||
|         def_name = f"model_{model_path}" |         def_name = f"model_{model_path}" | ||||||
|         def_path = f"#/$defs/{def_name}" |         def_path = f"#/$defs/{def_name}" | ||||||
|         self.schema["$defs"][def_name] = model_schema |         self.schema["$defs"][def_name] = model_schema | ||||||
|         def_name_perm = f"model_{model_path}_permissions" |  | ||||||
|         def_path_perm = f"#/$defs/{def_name_perm}" |  | ||||||
|         self.schema["$defs"][def_name_perm] = self.model_permissions(model) |  | ||||||
|         return { |         return { | ||||||
|             "type": "object", |             "type": "object", | ||||||
|             "required": ["model", "identifiers"], |             "required": ["model", "identifiers"], | ||||||
| @ -138,7 +135,6 @@ class Command(BaseCommand): | |||||||
|                     "default": "present", |                     "default": "present", | ||||||
|                 }, |                 }, | ||||||
|                 "conditions": {"type": "array", "items": {"type": "boolean"}}, |                 "conditions": {"type": "array", "items": {"type": "boolean"}}, | ||||||
|                 "permissions": {"$ref": def_path_perm}, |  | ||||||
|                 "attrs": {"$ref": def_path}, |                 "attrs": {"$ref": def_path}, | ||||||
|                 "identifiers": {"$ref": def_path}, |                 "identifiers": {"$ref": def_path}, | ||||||
|             }, |             }, | ||||||
| @ -189,20 +185,3 @@ class Command(BaseCommand): | |||||||
|         if required: |         if required: | ||||||
|             result["required"] = required |             result["required"] = required | ||||||
|         return result |         return result | ||||||
|  |  | ||||||
|     def model_permissions(self, model: type[Model]) -> dict: |  | ||||||
|         perms = [x[0] for x in model._meta.permissions] |  | ||||||
|         for action in model._meta.default_permissions: |  | ||||||
|             perms.append(f"{action}_{model._meta.model_name}") |  | ||||||
|         return { |  | ||||||
|             "type": "array", |  | ||||||
|             "items": { |  | ||||||
|                 "type": "object", |  | ||||||
|                 "required": ["permission"], |  | ||||||
|                 "properties": { |  | ||||||
|                     "permission": {"type": "string", "enum": perms}, |  | ||||||
|                     "user": {"type": "integer"}, |  | ||||||
|                     "role": {"type": "string"}, |  | ||||||
|                 }, |  | ||||||
|             }, |  | ||||||
|         } |  | ||||||
|  | |||||||
| @ -29,7 +29,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path): | |||||||
|         if version != 1: |         if version != 1: | ||||||
|             return |             return | ||||||
|         blueprint_file.seek(0) |         blueprint_file.seek(0) | ||||||
|     instance = BlueprintInstance.objects.using(db_alias).filter(path=path).first() |     instance: BlueprintInstance = ( | ||||||
|  |         BlueprintInstance.objects.using(db_alias).filter(path=path).first() | ||||||
|  |     ) | ||||||
|     rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir"))) |     rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir"))) | ||||||
|     meta = None |     meta = None | ||||||
|     if metadata: |     if metadata: | ||||||
|  | |||||||
| @ -1,24 +0,0 @@ | |||||||
| version: 1 |  | ||||||
| entries: |  | ||||||
|   - model: authentik_core.user |  | ||||||
|     id: user |  | ||||||
|     identifiers: |  | ||||||
|       username: "%(id)s" |  | ||||||
|     attrs: |  | ||||||
|       name: "%(id)s" |  | ||||||
|   - model: authentik_rbac.role |  | ||||||
|     id: role |  | ||||||
|     identifiers: |  | ||||||
|       name: "%(id)s" |  | ||||||
|   - model: authentik_flows.flow |  | ||||||
|     identifiers: |  | ||||||
|       slug: "%(id)s" |  | ||||||
|     attrs: |  | ||||||
|       designation: authentication |  | ||||||
|       name: foo |  | ||||||
|       title: foo |  | ||||||
|     permissions: |  | ||||||
|       - permission: view_flow |  | ||||||
|         user: !KeyOf user |  | ||||||
|       - permission: view_flow |  | ||||||
|         role: !KeyOf role |  | ||||||
| @ -1,8 +0,0 @@ | |||||||
| version: 1 |  | ||||||
| entries: |  | ||||||
|   - model: authentik_rbac.role |  | ||||||
|     identifiers: |  | ||||||
|       name: "%(id)s" |  | ||||||
|     attrs: |  | ||||||
|       permissions: |  | ||||||
|         - authentik_blueprints.view_blueprintinstance |  | ||||||
| @ -1,9 +0,0 @@ | |||||||
| version: 1 |  | ||||||
| entries: |  | ||||||
|   - model: authentik_core.user |  | ||||||
|     identifiers: |  | ||||||
|       username: "%(id)s" |  | ||||||
|     attrs: |  | ||||||
|       name: "%(id)s" |  | ||||||
|       permissions: |  | ||||||
|         - authentik_blueprints.view_blueprintinstance |  | ||||||
| @ -27,8 +27,7 @@ def blueprint_tester(file_name: Path) -> Callable: | |||||||
|         base = Path("blueprints/") |         base = Path("blueprints/") | ||||||
|         rel_path = Path(file_name).relative_to(base) |         rel_path = Path(file_name).relative_to(base) | ||||||
|         importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve()) |         importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve()) | ||||||
|         validation, logs = importer.validate() |         self.assertTrue(importer.validate()[0]) | ||||||
|         self.assertTrue(validation, logs) |  | ||||||
|         self.assertTrue(importer.apply()) |         self.assertTrue(importer.apply()) | ||||||
|  |  | ||||||
|     return tester |     return tester | ||||||
|  | |||||||
| @ -78,5 +78,5 @@ class TestBlueprintsV1API(APITestCase): | |||||||
|         self.assertEqual(res.status_code, 400) |         self.assertEqual(res.status_code, 400) | ||||||
|         self.assertJSONEqual( |         self.assertJSONEqual( | ||||||
|             res.content.decode(), |             res.content.decode(), | ||||||
|             {"content": ["Failed to validate blueprint", "- Invalid blueprint version"]}, |             {"content": ["Failed to validate blueprint: Invalid blueprint version"]}, | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -1,57 +0,0 @@ | |||||||
| """Test blueprints v1""" |  | ||||||
|  |  | ||||||
| from django.test import TransactionTestCase |  | ||||||
| from guardian.shortcuts import get_perms |  | ||||||
|  |  | ||||||
| from authentik.blueprints.v1.importer import Importer |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.flows.models import Flow |  | ||||||
| from authentik.lib.generators import generate_id |  | ||||||
| from authentik.lib.tests.utils import load_fixture |  | ||||||
| from authentik.rbac.models import Role |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestBlueprintsV1RBAC(TransactionTestCase): |  | ||||||
|     """Test Blueprints rbac attribute""" |  | ||||||
|  |  | ||||||
|     def test_user_permission(self): |  | ||||||
|         """Test permissions""" |  | ||||||
|         uid = generate_id() |  | ||||||
|         import_yaml = load_fixture("fixtures/rbac_user.yaml", id=uid) |  | ||||||
|  |  | ||||||
|         importer = Importer.from_string(import_yaml) |  | ||||||
|         self.assertTrue(importer.validate()[0]) |  | ||||||
|         self.assertTrue(importer.apply()) |  | ||||||
|         user = User.objects.filter(username=uid).first() |  | ||||||
|         self.assertIsNotNone(user) |  | ||||||
|         self.assertTrue(user.has_perms(["authentik_blueprints.view_blueprintinstance"])) |  | ||||||
|  |  | ||||||
|     def test_role_permission(self): |  | ||||||
|         """Test permissions""" |  | ||||||
|         uid = generate_id() |  | ||||||
|         import_yaml = load_fixture("fixtures/rbac_role.yaml", id=uid) |  | ||||||
|  |  | ||||||
|         importer = Importer.from_string(import_yaml) |  | ||||||
|         self.assertTrue(importer.validate()[0]) |  | ||||||
|         self.assertTrue(importer.apply()) |  | ||||||
|         role = Role.objects.filter(name=uid).first() |  | ||||||
|         self.assertIsNotNone(role) |  | ||||||
|         self.assertEqual( |  | ||||||
|             list(role.group.permissions.all().values_list("codename", flat=True)), |  | ||||||
|             ["view_blueprintinstance"], |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_object_permission(self): |  | ||||||
|         """Test permissions""" |  | ||||||
|         uid = generate_id() |  | ||||||
|         import_yaml = load_fixture("fixtures/rbac_object.yaml", id=uid) |  | ||||||
|  |  | ||||||
|         importer = Importer.from_string(import_yaml) |  | ||||||
|         self.assertTrue(importer.validate()[0]) |  | ||||||
|         self.assertTrue(importer.apply()) |  | ||||||
|         flow = Flow.objects.filter(slug=uid).first() |  | ||||||
|         user = User.objects.filter(username=uid).first() |  | ||||||
|         role = Role.objects.filter(name=uid).first() |  | ||||||
|         self.assertIsNotNone(flow) |  | ||||||
|         self.assertEqual(get_perms(user, flow), ["view_flow"]) |  | ||||||
|         self.assertEqual(get_perms(role.group, flow), ["view_flow"]) |  | ||||||
| @ -1,7 +1,7 @@ | |||||||
| """transfer common classes""" | """transfer common classes""" | ||||||
|  |  | ||||||
| from collections import OrderedDict | from collections import OrderedDict | ||||||
| from collections.abc import Generator, Iterable, Mapping | from collections.abc import Iterable, Mapping | ||||||
| from copy import copy | from copy import copy | ||||||
| from dataclasses import asdict, dataclass, field, is_dataclass | from dataclasses import asdict, dataclass, field, is_dataclass | ||||||
| from enum import Enum | from enum import Enum | ||||||
| @ -58,15 +58,6 @@ class BlueprintEntryDesiredState(Enum): | |||||||
|     MUST_CREATED = "must_created" |     MUST_CREATED = "must_created" | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass |  | ||||||
| class BlueprintEntryPermission: |  | ||||||
|     """Describe object-level permissions""" |  | ||||||
|  |  | ||||||
|     permission: Union[str, "YAMLTag"] |  | ||||||
|     user: Union[int, "YAMLTag", None] = field(default=None) |  | ||||||
|     role: Union[str, "YAMLTag", None] = field(default=None) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass | @dataclass | ||||||
| class BlueprintEntry: | class BlueprintEntry: | ||||||
|     """Single entry of a blueprint""" |     """Single entry of a blueprint""" | ||||||
| @ -78,14 +69,13 @@ class BlueprintEntry: | |||||||
|     conditions: list[Any] = field(default_factory=list) |     conditions: list[Any] = field(default_factory=list) | ||||||
|     identifiers: dict[str, Any] = field(default_factory=dict) |     identifiers: dict[str, Any] = field(default_factory=dict) | ||||||
|     attrs: dict[str, Any] | None = field(default_factory=dict) |     attrs: dict[str, Any] | None = field(default_factory=dict) | ||||||
|     permissions: list[BlueprintEntryPermission] = field(default_factory=list) |  | ||||||
|  |  | ||||||
|     id: str | None = None |     id: str | None = None | ||||||
|  |  | ||||||
|     _state: BlueprintEntryState = field(default_factory=BlueprintEntryState) |     _state: BlueprintEntryState = field(default_factory=BlueprintEntryState) | ||||||
|  |  | ||||||
|     def __post_init__(self, *args, **kwargs) -> None: |     def __post_init__(self, *args, **kwargs) -> None: | ||||||
|         self.__tag_contexts: list[YAMLTagContext] = [] |         self.__tag_contexts: list["YAMLTagContext"] = [] | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry": |     def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry": | ||||||
| @ -160,17 +150,6 @@ class BlueprintEntry: | |||||||
|         """Get the blueprint model, with yaml tags resolved if present""" |         """Get the blueprint model, with yaml tags resolved if present""" | ||||||
|         return str(self.tag_resolver(self.model, blueprint)) |         return str(self.tag_resolver(self.model, blueprint)) | ||||||
|  |  | ||||||
|     def get_permissions( |  | ||||||
|         self, blueprint: "Blueprint" |  | ||||||
|     ) -> Generator[BlueprintEntryPermission, None, None]: |  | ||||||
|         """Get permissions of this entry, with all yaml tags resolved""" |  | ||||||
|         for perm in self.permissions: |  | ||||||
|             yield BlueprintEntryPermission( |  | ||||||
|                 permission=self.tag_resolver(perm.permission, blueprint), |  | ||||||
|                 user=self.tag_resolver(perm.user, blueprint), |  | ||||||
|                 role=self.tag_resolver(perm.role, blueprint), |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     def check_all_conditions_match(self, blueprint: "Blueprint") -> bool: |     def check_all_conditions_match(self, blueprint: "Blueprint") -> bool: | ||||||
|         """Check all conditions of this entry match (evaluate to True)""" |         """Check all conditions of this entry match (evaluate to True)""" | ||||||
|         return all(self.tag_resolver(self.conditions, blueprint)) |         return all(self.tag_resolver(self.conditions, blueprint)) | ||||||
| @ -328,10 +307,7 @@ class Find(YAMLTag): | |||||||
|         else: |         else: | ||||||
|             model_name = self.model_name |             model_name = self.model_name | ||||||
|  |  | ||||||
|         try: |  | ||||||
|         model_class = apps.get_model(*model_name.split(".")) |         model_class = apps.get_model(*model_name.split(".")) | ||||||
|         except LookupError as exc: |  | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |  | ||||||
|  |  | ||||||
|         query = Q() |         query = Q() | ||||||
|         for cond in self.conditions: |         for cond in self.conditions: | ||||||
|  | |||||||
| @ -16,7 +16,6 @@ from django.db.models.query_utils import Q | |||||||
| from django.db.transaction import atomic | from django.db.transaction import atomic | ||||||
| from django.db.utils import IntegrityError | from django.db.utils import IntegrityError | ||||||
| from guardian.models import UserObjectPermission | from guardian.models import UserObjectPermission | ||||||
| from guardian.shortcuts import assign_perm |  | ||||||
| from rest_framework.exceptions import ValidationError | from rest_framework.exceptions import ValidationError | ||||||
| from rest_framework.serializers import BaseSerializer, Serializer | from rest_framework.serializers import BaseSerializer, Serializer | ||||||
| from structlog.stdlib import BoundLogger, get_logger | from structlog.stdlib import BoundLogger, get_logger | ||||||
| @ -33,47 +32,31 @@ from authentik.blueprints.v1.common import ( | |||||||
| from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | ||||||
| from authentik.core.models import ( | from authentik.core.models import ( | ||||||
|     AuthenticatedSession, |     AuthenticatedSession, | ||||||
|     GroupSourceConnection, |  | ||||||
|     PropertyMapping, |     PropertyMapping, | ||||||
|     Provider, |     Provider, | ||||||
|     Source, |     Source, | ||||||
|     User, |  | ||||||
|     UserSourceConnection, |     UserSourceConnection, | ||||||
| ) | ) | ||||||
| from authentik.enterprise.license import LicenseKey | from authentik.enterprise.license import LicenseKey | ||||||
| from authentik.enterprise.models import LicenseUsage | from authentik.enterprise.models import LicenseUsage | ||||||
| from authentik.enterprise.providers.google_workspace.models import ( |  | ||||||
|     GoogleWorkspaceProviderGroup, |  | ||||||
|     GoogleWorkspaceProviderUser, |  | ||||||
| ) |  | ||||||
| from authentik.enterprise.providers.microsoft_entra.models import ( |  | ||||||
|     MicrosoftEntraProviderGroup, |  | ||||||
|     MicrosoftEntraProviderUser, |  | ||||||
| ) |  | ||||||
| from authentik.enterprise.providers.rac.models import ConnectionToken | from authentik.enterprise.providers.rac.models import ConnectionToken | ||||||
| from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import ( |  | ||||||
|     EndpointDevice, |  | ||||||
|     EndpointDeviceConnection, |  | ||||||
| ) |  | ||||||
| from authentik.events.logs import LogEvent, capture_logs | from authentik.events.logs import LogEvent, capture_logs | ||||||
| from authentik.events.models import SystemTask | from authentik.events.models import SystemTask | ||||||
| from authentik.events.utils import cleanse_dict | from authentik.events.utils import cleanse_dict | ||||||
| from authentik.flows.models import FlowToken, Stage | from authentik.flows.models import FlowToken, Stage | ||||||
| from authentik.lib.models import SerializerModel | from authentik.lib.models import SerializerModel | ||||||
| from authentik.lib.sentry import SentryIgnoredException | from authentik.lib.sentry import SentryIgnoredException | ||||||
| from authentik.lib.utils.reflection import get_apps |  | ||||||
| from authentik.outposts.models import OutpostServiceConnection | from authentik.outposts.models import OutpostServiceConnection | ||||||
| from authentik.policies.models import Policy, PolicyBindingModel | from authentik.policies.models import Policy, PolicyBindingModel | ||||||
| from authentik.policies.reputation.models import Reputation | from authentik.policies.reputation.models import Reputation | ||||||
| from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken | from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken | ||||||
| from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser | from authentik.providers.scim.models import SCIMGroup, SCIMUser | ||||||
| from authentik.rbac.models import Role |  | ||||||
| from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser | from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser | ||||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType | from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType | ||||||
| from authentik.tenants.models import Tenant | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| # Context set when the serializer is created in a blueprint context | # Context set when the serializer is created in a blueprint context | ||||||
| # Update website/docs/customize/blueprints/v1/models.md when used | # Update website/developer-docs/blueprints/v1/models.md when used | ||||||
| SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry" | SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry" | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -96,7 +79,6 @@ def excluded_models() -> list[type[Model]]: | |||||||
|         Source, |         Source, | ||||||
|         PropertyMapping, |         PropertyMapping, | ||||||
|         UserSourceConnection, |         UserSourceConnection, | ||||||
|         GroupSourceConnection, |  | ||||||
|         Stage, |         Stage, | ||||||
|         OutpostServiceConnection, |         OutpostServiceConnection, | ||||||
|         Policy, |         Policy, | ||||||
| @ -104,11 +86,10 @@ def excluded_models() -> list[type[Model]]: | |||||||
|         # Classes that have other dependencies |         # Classes that have other dependencies | ||||||
|         AuthenticatedSession, |         AuthenticatedSession, | ||||||
|         # Classes which are only internally managed |         # Classes which are only internally managed | ||||||
|         # FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin |  | ||||||
|         FlowToken, |         FlowToken, | ||||||
|         LicenseUsage, |         LicenseUsage, | ||||||
|         SCIMProviderGroup, |         SCIMGroup, | ||||||
|         SCIMProviderUser, |         SCIMUser, | ||||||
|         Tenant, |         Tenant, | ||||||
|         SystemTask, |         SystemTask, | ||||||
|         ConnectionToken, |         ConnectionToken, | ||||||
| @ -119,12 +100,6 @@ def excluded_models() -> list[type[Model]]: | |||||||
|         WebAuthnDeviceType, |         WebAuthnDeviceType, | ||||||
|         SCIMSourceUser, |         SCIMSourceUser, | ||||||
|         SCIMSourceGroup, |         SCIMSourceGroup, | ||||||
|         GoogleWorkspaceProviderUser, |  | ||||||
|         GoogleWorkspaceProviderGroup, |  | ||||||
|         MicrosoftEntraProviderUser, |  | ||||||
|         MicrosoftEntraProviderGroup, |  | ||||||
|         EndpointDevice, |  | ||||||
|         EndpointDeviceConnection, |  | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -148,16 +123,6 @@ def transaction_rollback(): | |||||||
|         pass |         pass | ||||||
|  |  | ||||||
|  |  | ||||||
| def rbac_models() -> dict: |  | ||||||
|     models = {} |  | ||||||
|     for app in get_apps(): |  | ||||||
|         for model in app.get_models(): |  | ||||||
|             if not is_model_allowed(model): |  | ||||||
|                 continue |  | ||||||
|             models[model._meta.model_name] = app.label |  | ||||||
|     return models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Importer: | class Importer: | ||||||
|     """Import Blueprint from raw dict or YAML/JSON""" |     """Import Blueprint from raw dict or YAML/JSON""" | ||||||
|  |  | ||||||
| @ -176,10 +141,7 @@ class Importer: | |||||||
|  |  | ||||||
|     def default_context(self): |     def default_context(self): | ||||||
|         """Default context""" |         """Default context""" | ||||||
|         return { |         return {"goauthentik.io/enterprise/licensed": LicenseKey.get_total().is_valid()} | ||||||
|             "goauthentik.io/enterprise/licensed": LicenseKey.get_total().status().is_valid, |  | ||||||
|             "goauthentik.io/rbac/models": rbac_models(), |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def from_string(yaml_input: str, context: dict | None = None) -> "Importer": |     def from_string(yaml_input: str, context: dict | None = None) -> "Importer": | ||||||
| @ -239,17 +201,14 @@ class Importer: | |||||||
|  |  | ||||||
|         return main_query | sub_query |         return main_query | sub_query | ||||||
|  |  | ||||||
|     def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None:  # noqa: PLR0915 |     def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None: | ||||||
|         """Validate a single entry""" |         """Validate a single entry""" | ||||||
|         if not entry.check_all_conditions_match(self._import): |         if not entry.check_all_conditions_match(self._import): | ||||||
|             self.logger.debug("One or more conditions of this entry are not fulfilled, skipping") |             self.logger.debug("One or more conditions of this entry are not fulfilled, skipping") | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
|         model_app_label, model_name = entry.get_model(self._import).split(".") |         model_app_label, model_name = entry.get_model(self._import).split(".") | ||||||
|         try: |  | ||||||
|         model: type[SerializerModel] = registry.get_model(model_app_label, model_name) |         model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||||
|         except LookupError as exc: |  | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |  | ||||||
|         # Don't use isinstance since we don't want to check for inheritance |         # Don't use isinstance since we don't want to check for inheritance | ||||||
|         if not is_model_allowed(model): |         if not is_model_allowed(model): | ||||||
|             raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry) |             raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry) | ||||||
| @ -324,7 +283,10 @@ class Importer: | |||||||
|         try: |         try: | ||||||
|             full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import)) |             full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import)) | ||||||
|         except ValueError as exc: |         except ValueError as exc: | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |             raise EntryInvalidError.from_entry( | ||||||
|  |                 exc, | ||||||
|  |                 entry, | ||||||
|  |             ) from exc | ||||||
|         always_merger.merge(full_data, updated_identifiers) |         always_merger.merge(full_data, updated_identifiers) | ||||||
|         serializer_kwargs["data"] = full_data |         serializer_kwargs["data"] = full_data | ||||||
|  |  | ||||||
| @ -345,15 +307,6 @@ class Importer: | |||||||
|             ) from exc |             ) from exc | ||||||
|         return serializer |         return serializer | ||||||
|  |  | ||||||
|     def _apply_permissions(self, instance: Model, entry: BlueprintEntry): |  | ||||||
|         """Apply object-level permissions for an entry""" |  | ||||||
|         for perm in entry.get_permissions(self._import): |  | ||||||
|             if perm.user is not None: |  | ||||||
|                 assign_perm(perm.permission, User.objects.get(pk=perm.user), instance) |  | ||||||
|             if perm.role is not None: |  | ||||||
|                 role = Role.objects.get(pk=perm.role) |  | ||||||
|                 role.assign_permission(perm.permission, obj=instance) |  | ||||||
|  |  | ||||||
|     def apply(self) -> bool: |     def apply(self) -> bool: | ||||||
|         """Apply (create/update) models yaml, in database transaction""" |         """Apply (create/update) models yaml, in database transaction""" | ||||||
|         try: |         try: | ||||||
| @ -418,7 +371,6 @@ class Importer: | |||||||
|                 if "pk" in entry.identifiers: |                 if "pk" in entry.identifiers: | ||||||
|                     self.__pk_map[entry.identifiers["pk"]] = instance.pk |                     self.__pk_map[entry.identifiers["pk"]] = instance.pk | ||||||
|                 entry._state = BlueprintEntryState(instance) |                 entry._state = BlueprintEntryState(instance) | ||||||
|                 self._apply_permissions(instance, entry) |  | ||||||
|             elif state == BlueprintEntryDesiredState.ABSENT: |             elif state == BlueprintEntryDesiredState.ABSENT: | ||||||
|                 instance: Model | None = serializer.instance |                 instance: Model | None = serializer.instance | ||||||
|                 if instance.pk: |                 if instance.pk: | ||||||
| @ -435,7 +387,7 @@ class Importer: | |||||||
|         orig_import = deepcopy(self._import) |         orig_import = deepcopy(self._import) | ||||||
|         if self._import.version != 1: |         if self._import.version != 1: | ||||||
|             self.logger.warning("Invalid blueprint version") |             self.logger.warning("Invalid blueprint version") | ||||||
|             return False, [LogEvent("Invalid blueprint version", log_level="warning", logger=None)] |             return False, [{"event": "Invalid blueprint version"}] | ||||||
|         with ( |         with ( | ||||||
|             transaction_rollback(), |             transaction_rollback(), | ||||||
|             capture_logs() as logs, |             capture_logs() as logs, | ||||||
|  | |||||||
| @ -11,20 +11,21 @@ from rest_framework.filters import OrderingFilter, SearchFilter | |||||||
| from rest_framework.permissions import AllowAny | from rest_framework.permissions import AllowAny | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
|  | from rest_framework.serializers import ModelSerializer | ||||||
| from rest_framework.validators import UniqueValidator | from rest_framework.validators import UniqueValidator | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
| from authentik.api.authorization import SecretKeyFilter | from authentik.api.authorization import SecretKeyFilter | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.tenants.utils import get_current_tenant | from authentik.tenants.utils import get_current_tenant | ||||||
|  |  | ||||||
|  |  | ||||||
| class FooterLinkSerializer(PassiveSerializer): | class FooterLinkSerializer(PassiveSerializer): | ||||||
|     """Links returned in Config API""" |     """Links returned in Config API""" | ||||||
|  |  | ||||||
|     href = CharField(read_only=True, allow_null=True) |     href = CharField(read_only=True) | ||||||
|     name = CharField(read_only=True) |     name = CharField(read_only=True) | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -55,7 +56,6 @@ class BrandSerializer(ModelSerializer): | |||||||
|             "flow_unenrollment", |             "flow_unenrollment", | ||||||
|             "flow_user_settings", |             "flow_user_settings", | ||||||
|             "flow_device_code", |             "flow_device_code", | ||||||
|             "default_application", |  | ||||||
|             "web_certificate", |             "web_certificate", | ||||||
|             "attributes", |             "attributes", | ||||||
|         ] |         ] | ||||||
|  | |||||||
| @ -9,6 +9,3 @@ class AuthentikBrandsConfig(AppConfig): | |||||||
|     name = "authentik.brands" |     name = "authentik.brands" | ||||||
|     label = "authentik_brands" |     label = "authentik_brands" | ||||||
|     verbose_name = "authentik Brands" |     verbose_name = "authentik Brands" | ||||||
|     mountpoints = { |  | ||||||
|         "authentik.brands.urls_root": "", |  | ||||||
|     } |  | ||||||
|  | |||||||
| @ -4,7 +4,7 @@ from collections.abc import Callable | |||||||
|  |  | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
| from django.http.response import HttpResponse | from django.http.response import HttpResponse | ||||||
| from django.utils.translation import override | from django.utils.translation import activate | ||||||
|  |  | ||||||
| from authentik.brands.utils import get_brand_for_request | from authentik.brands.utils import get_brand_for_request | ||||||
|  |  | ||||||
| @ -18,12 +18,10 @@ class BrandMiddleware: | |||||||
|         self.get_response = get_response |         self.get_response = get_response | ||||||
|  |  | ||||||
|     def __call__(self, request: HttpRequest) -> HttpResponse: |     def __call__(self, request: HttpRequest) -> HttpResponse: | ||||||
|         locale_to_set = None |  | ||||||
|         if not hasattr(request, "brand"): |         if not hasattr(request, "brand"): | ||||||
|             brand = get_brand_for_request(request) |             brand = get_brand_for_request(request) | ||||||
|             request.brand = brand |             request.brand = brand | ||||||
|             locale = brand.default_locale |             locale = brand.default_locale | ||||||
|             if locale != "": |             if locale != "": | ||||||
|                 locale_to_set = locale |                 activate(locale) | ||||||
|         with override(locale_to_set): |  | ||||||
|         return self.get_response(request) |         return self.get_response(request) | ||||||
|  | |||||||
| @ -1,21 +0,0 @@ | |||||||
| # Generated by Django 5.0.4 on 2024-04-18 18:56 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0005_tenantuuid_to_branduuid"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="brand", |  | ||||||
|             index=models.Index(fields=["domain"], name="authentik_b_domain_b9b24a_idx"), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="brand", |  | ||||||
|             index=models.Index(fields=["default"], name="authentik_b_default_3ccf12_idx"), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,26 +0,0 @@ | |||||||
| # Generated by Django 5.0.6 on 2024-07-04 20:32 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0006_brand_authentik_b_domain_b9b24a_idx_and_more"), |  | ||||||
|         ("authentik_core", "0035_alter_group_options_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="default_application", |  | ||||||
|             field=models.ForeignKey( |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="When set, external users will be redirected to this application after authenticating.", |  | ||||||
|                 null=True, |  | ||||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, |  | ||||||
|                 to="authentik_core.application", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -3,7 +3,6 @@ | |||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from django.db import models | from django.db import models | ||||||
| from django.http import HttpRequest |  | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from rest_framework.serializers import Serializer | from rest_framework.serializers import Serializer | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| @ -52,16 +51,6 @@ class Brand(SerializerModel): | |||||||
|         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_device_code" |         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_device_code" | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     default_application = models.ForeignKey( |  | ||||||
|         "authentik_core.Application", |  | ||||||
|         null=True, |  | ||||||
|         default=None, |  | ||||||
|         on_delete=models.SET_DEFAULT, |  | ||||||
|         help_text=_( |  | ||||||
|             "When set, external users will be redirected to this application after authenticating." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     web_certificate = models.ForeignKey( |     web_certificate = models.ForeignKey( | ||||||
|         CertificateKeyPair, |         CertificateKeyPair, | ||||||
|         null=True, |         null=True, | ||||||
| @ -95,17 +84,3 @@ class Brand(SerializerModel): | |||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("Brand") |         verbose_name = _("Brand") | ||||||
|         verbose_name_plural = _("Brands") |         verbose_name_plural = _("Brands") | ||||||
|         indexes = [ |  | ||||||
|             models.Index(fields=["domain"]), |  | ||||||
|             models.Index(fields=["default"]), |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class WebfingerProvider(models.Model): |  | ||||||
|     """Provider which supports webfinger discovery""" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         abstract = True |  | ||||||
|  |  | ||||||
|     def webfinger(self, resource: str, request: HttpRequest) -> dict: |  | ||||||
|         raise NotImplementedError() |  | ||||||
|  | |||||||
| @ -5,11 +5,7 @@ from rest_framework.test import APITestCase | |||||||
|  |  | ||||||
| from authentik.brands.api import Themes | from authentik.brands.api import Themes | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.core.models import Application |  | ||||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_brand | from authentik.core.tests.utils import create_test_admin_user, create_test_brand | ||||||
| from authentik.lib.generators import generate_id |  | ||||||
| from authentik.providers.oauth2.models import OAuth2Provider |  | ||||||
| from authentik.providers.saml.models import SAMLProvider |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestBrands(APITestCase): | class TestBrands(APITestCase): | ||||||
| @ -79,45 +75,3 @@ class TestBrands(APITestCase): | |||||||
|             reverse("authentik_api:brand-list"), data={"domain": "bar", "default": True} |             reverse("authentik_api:brand-list"), data={"domain": "bar", "default": True} | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(response.status_code, 400) |         self.assertEqual(response.status_code, 400) | ||||||
|  |  | ||||||
|     def test_webfinger_no_app(self): |  | ||||||
|         """Test Webfinger""" |  | ||||||
|         create_test_brand() |  | ||||||
|         self.assertJSONEqual( |  | ||||||
|             self.client.get(reverse("authentik_brands:webfinger")).content.decode(), {} |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_webfinger_not_supported(self): |  | ||||||
|         """Test Webfinger""" |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         provider = SAMLProvider.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|         ) |  | ||||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id(), provider=provider) |  | ||||||
|         brand.default_application = app |  | ||||||
|         brand.save() |  | ||||||
|         self.assertJSONEqual( |  | ||||||
|             self.client.get(reverse("authentik_brands:webfinger")).content.decode(), {} |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_webfinger_oidc(self): |  | ||||||
|         """Test Webfinger""" |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         provider = OAuth2Provider.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|         ) |  | ||||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id(), provider=provider) |  | ||||||
|         brand.default_application = app |  | ||||||
|         brand.save() |  | ||||||
|         self.assertJSONEqual( |  | ||||||
|             self.client.get(reverse("authentik_brands:webfinger")).content.decode(), |  | ||||||
|             { |  | ||||||
|                 "links": [ |  | ||||||
|                     { |  | ||||||
|                         "href": f"http://testserver/application/o/{app.slug}/", |  | ||||||
|                         "rel": "http://openid.net/specs/connect/1.0/issuer", |  | ||||||
|                     } |  | ||||||
|                 ], |  | ||||||
|                 "subject": None, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  | |||||||
| @ -1,9 +0,0 @@ | |||||||
| """authentik brand root URLs""" |  | ||||||
|  |  | ||||||
| from django.urls import path |  | ||||||
|  |  | ||||||
| from authentik.brands.views.webfinger import WebFingerView |  | ||||||
|  |  | ||||||
| urlpatterns = [ |  | ||||||
|     path(".well-known/webfinger", WebFingerView.as_view(), name="webfinger"), |  | ||||||
| ] |  | ||||||
| @ -5,7 +5,7 @@ from typing import Any | |||||||
| from django.db.models import F, Q | from django.db.models import F, Q | ||||||
| from django.db.models import Value as V | from django.db.models import Value as V | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
| from sentry_sdk import get_current_span | from sentry_sdk.hub import Hub | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import get_full_version | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| @ -33,7 +33,7 @@ def context_processor(request: HttpRequest) -> dict[str, Any]: | |||||||
|     brand = getattr(request, "brand", DEFAULT_BRAND) |     brand = getattr(request, "brand", DEFAULT_BRAND) | ||||||
|     tenant = getattr(request, "tenant", Tenant()) |     tenant = getattr(request, "tenant", Tenant()) | ||||||
|     trace = "" |     trace = "" | ||||||
|     span = get_current_span() |     span = Hub.current.scope.span | ||||||
|     if span: |     if span: | ||||||
|         trace = span.to_traceparent() |         trace = span.to_traceparent() | ||||||
|     return { |     return { | ||||||
|  | |||||||
| @ -1,29 +0,0 @@ | |||||||
| from typing import Any |  | ||||||
|  |  | ||||||
| from django.http import HttpRequest, HttpResponse, JsonResponse |  | ||||||
| from django.views import View |  | ||||||
|  |  | ||||||
| from authentik.brands.models import Brand, WebfingerProvider |  | ||||||
| from authentik.core.models import Application |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class WebFingerView(View): |  | ||||||
|     """Webfinger endpoint""" |  | ||||||
|  |  | ||||||
|     def get(self, request: HttpRequest) -> HttpResponse: |  | ||||||
|         brand: Brand = request.brand |  | ||||||
|         if not brand.default_application: |  | ||||||
|             return JsonResponse({}) |  | ||||||
|         application: Application = brand.default_application |  | ||||||
|         provider = application.get_provider() |  | ||||||
|         if not provider or not isinstance(provider, WebfingerProvider): |  | ||||||
|             return JsonResponse({}) |  | ||||||
|         webfinger_data = provider.webfinger(request.GET.get("resource"), request) |  | ||||||
|         return JsonResponse(webfinger_data) |  | ||||||
|  |  | ||||||
|     def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse: |  | ||||||
|         response = super().dispatch(request, *args, **kwargs) |  | ||||||
|         # RFC7033 spec |  | ||||||
|         response["Access-Control-Allow-Origin"] = "*" |  | ||||||
|         response["Content-Type"] = "application/jrd+json" |  | ||||||
|         return response |  | ||||||
| @ -17,6 +17,7 @@ from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodFiel | |||||||
| from rest_framework.parsers import MultiPartParser | from rest_framework.parsers import MultiPartParser | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
|  | from rest_framework.serializers import ModelSerializer | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| @ -25,7 +26,6 @@ from authentik.api.pagination import Pagination | |||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||||
| from authentik.core.api.providers import ProviderSerializer | from authentik.core.api.providers import ProviderSerializer | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import ModelSerializer |  | ||||||
| from authentik.core.models import Application, User | from authentik.core.models import Application, User | ||||||
| from authentik.events.logs import LogEventSerializer, capture_logs | from authentik.events.logs import LogEventSerializer, capture_logs | ||||||
| from authentik.events.models import EventAction | from authentik.events.models import EventAction | ||||||
| @ -103,12 +103,7 @@ class ApplicationSerializer(ModelSerializer): | |||||||
| class ApplicationViewSet(UsedByMixin, ModelViewSet): | class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||||
|     """Application Viewset""" |     """Application Viewset""" | ||||||
|  |  | ||||||
|     queryset = ( |     queryset = Application.objects.all().prefetch_related("provider") | ||||||
|         Application.objects.all() |  | ||||||
|         .with_provider() |  | ||||||
|         .prefetch_related("policies") |  | ||||||
|         .prefetch_related("backchannel_providers") |  | ||||||
|     ) |  | ||||||
|     serializer_class = ApplicationSerializer |     serializer_class = ApplicationSerializer | ||||||
|     search_fields = [ |     search_fields = [ | ||||||
|         "name", |         "name", | ||||||
| @ -152,15 +147,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | |||||||
|                 applications.append(application) |                 applications.append(application) | ||||||
|         return applications |         return applications | ||||||
|  |  | ||||||
|     def _filter_applications_with_launch_url( |  | ||||||
|         self, pagined_apps: Iterator[Application] |  | ||||||
|     ) -> list[Application]: |  | ||||||
|         applications = [] |  | ||||||
|         for app in pagined_apps: |  | ||||||
|             if app.get_launch_url(): |  | ||||||
|                 applications.append(app) |  | ||||||
|         return applications |  | ||||||
|  |  | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         parameters=[ |         parameters=[ | ||||||
|             OpenApiParameter( |             OpenApiParameter( | ||||||
| @ -218,11 +204,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | |||||||
|                 location=OpenApiParameter.QUERY, |                 location=OpenApiParameter.QUERY, | ||||||
|                 type=OpenApiTypes.INT, |                 type=OpenApiTypes.INT, | ||||||
|             ), |             ), | ||||||
|             OpenApiParameter( |  | ||||||
|                 name="only_with_launch_url", |  | ||||||
|                 location=OpenApiParameter.QUERY, |  | ||||||
|                 type=OpenApiTypes.BOOL, |  | ||||||
|             ), |  | ||||||
|         ] |         ] | ||||||
|     ) |     ) | ||||||
|     def list(self, request: Request) -> Response: |     def list(self, request: Request) -> Response: | ||||||
| @ -235,10 +216,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | |||||||
|         if superuser_full_list and request.user.is_superuser: |         if superuser_full_list and request.user.is_superuser: | ||||||
|             return super().list(request) |             return super().list(request) | ||||||
|  |  | ||||||
|         only_with_launch_url = str( |  | ||||||
|             request.query_params.get("only_with_launch_url", "false") |  | ||||||
|         ).lower() |  | ||||||
|  |  | ||||||
|         queryset = self._filter_queryset_for_list(self.get_queryset()) |         queryset = self._filter_queryset_for_list(self.get_queryset()) | ||||||
|         paginator: Pagination = self.paginator |         paginator: Pagination = self.paginator | ||||||
|         paginated_apps = paginator.paginate_queryset(queryset, request) |         paginated_apps = paginator.paginate_queryset(queryset, request) | ||||||
| @ -274,10 +251,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | |||||||
|                     allowed_applications, |                     allowed_applications, | ||||||
|                     timeout=86400, |                     timeout=86400, | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|         if only_with_launch_url == "true": |  | ||||||
|             allowed_applications = self._filter_applications_with_launch_url(allowed_applications) |  | ||||||
|  |  | ||||||
|         serializer = self.get_serializer(allowed_applications, many=True) |         serializer = self.get_serializer(allowed_applications, many=True) | ||||||
|         return self.get_paginated_response(serializer.data) |         return self.get_paginated_response(serializer.data) | ||||||
|  |  | ||||||
|  | |||||||
| @ -8,12 +8,12 @@ from rest_framework import mixins | |||||||
| from rest_framework.fields import SerializerMethodField | from rest_framework.fields import SerializerMethodField | ||||||
| from rest_framework.filters import OrderingFilter, SearchFilter | from rest_framework.filters import OrderingFilter, SearchFilter | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
|  | from rest_framework.serializers import ModelSerializer | ||||||
| from rest_framework.viewsets import GenericViewSet | from rest_framework.viewsets import GenericViewSet | ||||||
| from ua_parser import user_agent_parser | from ua_parser import user_agent_parser | ||||||
|  |  | ||||||
| from authentik.api.authorization import OwnerSuperuserPermissions | from authentik.api.authorization import OwnerSuperuserPermissions | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import ModelSerializer |  | ||||||
| from authentik.core.models import AuthenticatedSession | from authentik.core.models import AuthenticatedSession | ||||||
| from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR, ASNDict | from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR, ASNDict | ||||||
| from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR, GeoIPDict | from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR, GeoIPDict | ||||||
|  | |||||||
| @ -1,55 +1,30 @@ | |||||||
| """Authenticator Devices API Views""" | """Authenticator Devices API Views""" | ||||||
|  |  | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
| from drf_spectacular.types import OpenApiTypes | from drf_spectacular.types import OpenApiTypes | ||||||
| from drf_spectacular.utils import OpenApiParameter, extend_schema | from drf_spectacular.utils import OpenApiParameter, extend_schema | ||||||
| from rest_framework.fields import ( | from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField | ||||||
|     BooleanField, | from rest_framework.permissions import IsAdminUser, IsAuthenticated | ||||||
|     CharField, |  | ||||||
|     DateTimeField, |  | ||||||
|     SerializerMethodField, |  | ||||||
| ) |  | ||||||
| from rest_framework.permissions import IsAuthenticated |  | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.viewsets import ViewSet | from rest_framework.viewsets import ViewSet | ||||||
|  |  | ||||||
| from authentik.core.api.utils import MetaNameSerializer | from authentik.core.api.utils import MetaNameSerializer | ||||||
| from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice |  | ||||||
| from authentik.rbac.decorators import permission_required |  | ||||||
| from authentik.stages.authenticator import device_classes, devices_for_user | from authentik.stages.authenticator import device_classes, devices_for_user | ||||||
| from authentik.stages.authenticator.models import Device | from authentik.stages.authenticator.models import Device | ||||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDevice |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeviceSerializer(MetaNameSerializer): | class DeviceSerializer(MetaNameSerializer): | ||||||
|     """Serializer for Duo authenticator devices""" |     """Serializer for Duo authenticator devices""" | ||||||
|  |  | ||||||
|     pk = CharField() |     pk = IntegerField() | ||||||
|     name = CharField() |     name = CharField() | ||||||
|     type = SerializerMethodField() |     type = SerializerMethodField() | ||||||
|     confirmed = BooleanField() |     confirmed = BooleanField() | ||||||
|     created = DateTimeField(read_only=True) |  | ||||||
|     last_updated = DateTimeField(read_only=True) |  | ||||||
|     last_used = DateTimeField(read_only=True, allow_null=True) |  | ||||||
|     extra_description = SerializerMethodField() |  | ||||||
|  |  | ||||||
|     def get_type(self, instance: Device) -> str: |     def get_type(self, instance: Device) -> str: | ||||||
|         """Get type of device""" |         """Get type of device""" | ||||||
|         return instance._meta.label |         return instance._meta.label | ||||||
|  |  | ||||||
|     def get_extra_description(self, instance: Device) -> str: |  | ||||||
|         """Get extra description""" |  | ||||||
|         if isinstance(instance, WebAuthnDevice): |  | ||||||
|             return ( |  | ||||||
|                 instance.device_type.description |  | ||||||
|                 if instance.device_type |  | ||||||
|                 else _("Extra description not available") |  | ||||||
|             ) |  | ||||||
|         if isinstance(instance, EndpointDevice): |  | ||||||
|             return instance.data.get("deviceSignals", {}).get("deviceModel") |  | ||||||
|         return "" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeviceViewSet(ViewSet): | class DeviceViewSet(ViewSet): | ||||||
|     """Viewset for authenticator devices""" |     """Viewset for authenticator devices""" | ||||||
| @ -68,7 +43,7 @@ class AdminDeviceViewSet(ViewSet): | |||||||
|     """Viewset for authenticator devices""" |     """Viewset for authenticator devices""" | ||||||
|  |  | ||||||
|     serializer_class = DeviceSerializer |     serializer_class = DeviceSerializer | ||||||
|     permission_classes = [] |     permission_classes = [IsAdminUser] | ||||||
|  |  | ||||||
|     def get_devices(self, **kwargs): |     def get_devices(self, **kwargs): | ||||||
|         """Get all devices in all child classes""" |         """Get all devices in all child classes""" | ||||||
| @ -86,10 +61,6 @@ class AdminDeviceViewSet(ViewSet): | |||||||
|         ], |         ], | ||||||
|         responses={200: DeviceSerializer(many=True)}, |         responses={200: DeviceSerializer(many=True)}, | ||||||
|     ) |     ) | ||||||
|     @permission_required( |  | ||||||
|         None, |  | ||||||
|         [f"{model._meta.app_label}.view_{model._meta.model_name}" for model in device_classes()], |  | ||||||
|     ) |  | ||||||
|     def list(self, request: Request) -> Response: |     def list(self, request: Request) -> Response: | ||||||
|         """Get all devices for current user""" |         """Get all devices for current user""" | ||||||
|         kwargs = {} |         kwargs = {} | ||||||
|  | |||||||
| @ -2,7 +2,6 @@ | |||||||
|  |  | ||||||
| from json import loads | from json import loads | ||||||
|  |  | ||||||
| from django.db.models import Prefetch |  | ||||||
| from django.http import Http404 | from django.http import Http404 | ||||||
| from django_filters.filters import CharFilter, ModelMultipleChoiceFilter | from django_filters.filters import CharFilter, ModelMultipleChoiceFilter | ||||||
| from django_filters.filterset import FilterSet | from django_filters.filterset import FilterSet | ||||||
| @ -17,12 +16,11 @@ from rest_framework.decorators import action | |||||||
| from rest_framework.fields import CharField, IntegerField, SerializerMethodField | from rest_framework.fields import CharField, IntegerField, SerializerMethodField | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.serializers import ListSerializer, ValidationError | from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError | ||||||
| from rest_framework.validators import UniqueValidator |  | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer | from authentik.core.api.utils import JSONDictField, PassiveSerializer | ||||||
| from authentik.core.models import Group, User | from authentik.core.models import Group, User | ||||||
| from authentik.rbac.api.roles import RoleSerializer | from authentik.rbac.api.roles import RoleSerializer | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
| @ -102,10 +100,7 @@ class GroupSerializer(ModelSerializer): | |||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
|             "users": { |             "users": { | ||||||
|                 "default": list, |                 "default": list, | ||||||
|             }, |             } | ||||||
|             # TODO: This field isn't unique on the database which is hard to backport |  | ||||||
|             # hence we just validate the uniqueness here |  | ||||||
|             "name": {"validators": [UniqueValidator(Group.objects.all())]}, |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -159,24 +154,12 @@ class GroupViewSet(UsedByMixin, ModelViewSet): | |||||||
|  |  | ||||||
|         pk = IntegerField(required=True) |         pk = IntegerField(required=True) | ||||||
|  |  | ||||||
|     queryset = Group.objects.none() |     queryset = Group.objects.all().select_related("parent").prefetch_related("users") | ||||||
|     serializer_class = GroupSerializer |     serializer_class = GroupSerializer | ||||||
|     search_fields = ["name", "is_superuser"] |     search_fields = ["name", "is_superuser"] | ||||||
|     filterset_class = GroupFilter |     filterset_class = GroupFilter | ||||||
|     ordering = ["name"] |     ordering = ["name"] | ||||||
|  |  | ||||||
|     def get_queryset(self): |  | ||||||
|         base_qs = Group.objects.all().select_related("parent").prefetch_related("roles") |  | ||||||
|  |  | ||||||
|         if self.serializer_class(context={"request": self.request})._should_include_users: |  | ||||||
|             base_qs = base_qs.prefetch_related("users") |  | ||||||
|         else: |  | ||||||
|             base_qs = base_qs.prefetch_related( |  | ||||||
|                 Prefetch("users", queryset=User.objects.all().only("id")) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         return base_qs |  | ||||||
|  |  | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         parameters=[ |         parameters=[ | ||||||
|             OpenApiParameter("include_users", bool, default=True), |             OpenApiParameter("include_users", bool, default=True), | ||||||
| @ -185,14 +168,6 @@ class GroupViewSet(UsedByMixin, ModelViewSet): | |||||||
|     def list(self, request, *args, **kwargs): |     def list(self, request, *args, **kwargs): | ||||||
|         return super().list(request, *args, **kwargs) |         return super().list(request, *args, **kwargs) | ||||||
|  |  | ||||||
|     @extend_schema( |  | ||||||
|         parameters=[ |  | ||||||
|             OpenApiParameter("include_users", bool, default=True), |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
|     def retrieve(self, request, *args, **kwargs): |  | ||||||
|         return super().retrieve(request, *args, **kwargs) |  | ||||||
|  |  | ||||||
|     @permission_required("authentik_core.add_user_to_group") |     @permission_required("authentik_core.add_user_to_group") | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         request=UserAccountSerializer, |         request=UserAccountSerializer, | ||||||
|  | |||||||
| @ -1,79 +0,0 @@ | |||||||
| """API Utilities""" |  | ||||||
|  |  | ||||||
| from drf_spectacular.utils import extend_schema |  | ||||||
| from rest_framework.decorators import action |  | ||||||
| from rest_framework.fields import ( |  | ||||||
|     BooleanField, |  | ||||||
|     CharField, |  | ||||||
| ) |  | ||||||
| from rest_framework.request import Request |  | ||||||
| from rest_framework.response import Response |  | ||||||
|  |  | ||||||
| from authentik.core.api.utils import PassiveSerializer |  | ||||||
| from authentik.enterprise.apps import EnterpriseConfig |  | ||||||
| from authentik.lib.utils.reflection import all_subclasses |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TypeCreateSerializer(PassiveSerializer): |  | ||||||
|     """Types of an object that can be created""" |  | ||||||
|  |  | ||||||
|     name = CharField(required=True) |  | ||||||
|     description = CharField(required=True) |  | ||||||
|     component = CharField(required=True) |  | ||||||
|     model_name = CharField(required=True) |  | ||||||
|  |  | ||||||
|     icon_url = CharField(required=False) |  | ||||||
|     requires_enterprise = BooleanField(default=False) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class CreatableType: |  | ||||||
|     """Class to inherit from to mark a model as creatable, even if the model itself is marked |  | ||||||
|     as abstract""" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class NonCreatableType: |  | ||||||
|     """Class to inherit from to mark a model as non-creatable even if it is not abstract""" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TypesMixin: |  | ||||||
|     """Mixin which adds an API endpoint to list all possible types that can be created""" |  | ||||||
|  |  | ||||||
|     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) |  | ||||||
|     @action(detail=False, pagination_class=None, filter_backends=[]) |  | ||||||
|     def types(self, request: Request, additional: list[dict] | None = None) -> Response: |  | ||||||
|         """Get all creatable types""" |  | ||||||
|         data = [] |  | ||||||
|         for subclass in all_subclasses(self.queryset.model): |  | ||||||
|             instance = None |  | ||||||
|             if subclass._meta.abstract: |  | ||||||
|                 if not issubclass(subclass, CreatableType): |  | ||||||
|                     continue |  | ||||||
|                 # Circumvent the django protection for not being able to instantiate |  | ||||||
|                 # abstract models. We need a model instance to access .component |  | ||||||
|                 # and further down .icon_url |  | ||||||
|                 instance = subclass.__new__(subclass) |  | ||||||
|                 # Django re-sets abstract = False so we need to override that |  | ||||||
|                 instance.Meta.abstract = True |  | ||||||
|             else: |  | ||||||
|                 if issubclass(subclass, NonCreatableType): |  | ||||||
|                     continue |  | ||||||
|                 instance = subclass() |  | ||||||
|             try: |  | ||||||
|                 data.append( |  | ||||||
|                     { |  | ||||||
|                         "name": subclass._meta.verbose_name, |  | ||||||
|                         "description": subclass.__doc__, |  | ||||||
|                         "component": instance.component, |  | ||||||
|                         "model_name": subclass._meta.model_name, |  | ||||||
|                         "icon_url": getattr(instance, "icon_url", None), |  | ||||||
|                         "requires_enterprise": isinstance( |  | ||||||
|                             subclass._meta.app_config, EnterpriseConfig |  | ||||||
|                         ), |  | ||||||
|                     } |  | ||||||
|                 ) |  | ||||||
|             except NotImplementedError: |  | ||||||
|                 continue |  | ||||||
|         if additional: |  | ||||||
|             data.extend(additional) |  | ||||||
|         data = sorted(data, key=lambda x: x["name"]) |  | ||||||
|         return Response(TypeCreateSerializer(data, many=True).data) |  | ||||||
| @ -2,38 +2,25 @@ | |||||||
| 
 | 
 | ||||||
| from json import dumps | from json import dumps | ||||||
| 
 | 
 | ||||||
| from django_filters.filters import AllValuesMultipleFilter, BooleanFilter |  | ||||||
| from django_filters.filterset import FilterSet |  | ||||||
| from drf_spectacular.types import OpenApiTypes | from drf_spectacular.types import OpenApiTypes | ||||||
| from drf_spectacular.utils import ( | from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | ||||||
|     OpenApiParameter, |  | ||||||
|     OpenApiResponse, |  | ||||||
|     extend_schema, |  | ||||||
|     extend_schema_field, |  | ||||||
| ) |  | ||||||
| from guardian.shortcuts import get_objects_for_user | from guardian.shortcuts import get_objects_for_user | ||||||
| from rest_framework import mixins | from rest_framework import mixins | ||||||
| from rest_framework.decorators import action | from rest_framework.decorators import action | ||||||
| from rest_framework.exceptions import PermissionDenied | from rest_framework.exceptions import PermissionDenied | ||||||
| from rest_framework.fields import BooleanField, CharField, SerializerMethodField | from rest_framework.fields import BooleanField, CharField | ||||||
| from rest_framework.relations import PrimaryKeyRelatedField |  | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
|  | from rest_framework.serializers import ModelSerializer, SerializerMethodField | ||||||
| from rest_framework.viewsets import GenericViewSet | from rest_framework.viewsets import GenericViewSet | ||||||
| 
 | 
 | ||||||
| from authentik.blueprints.api import ManagedSerializer | from authentik.blueprints.api import ManagedSerializer | ||||||
| from authentik.core.api.object_types import TypesMixin |  | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import ( | from authentik.core.api.utils import MetaNameSerializer, PassiveSerializer, TypeCreateSerializer | ||||||
|     MetaNameSerializer, |  | ||||||
|     ModelSerializer, |  | ||||||
|     PassiveSerializer, |  | ||||||
| ) |  | ||||||
| from authentik.core.expression.evaluator import PropertyMappingEvaluator | from authentik.core.expression.evaluator import PropertyMappingEvaluator | ||||||
| from authentik.core.expression.exceptions import PropertyMappingExpressionException | from authentik.core.models import PropertyMapping | ||||||
| from authentik.core.models import Group, PropertyMapping, User |  | ||||||
| from authentik.events.utils import sanitize_item | from authentik.events.utils import sanitize_item | ||||||
| from authentik.lib.utils.errors import exception_to_string | from authentik.lib.utils.reflection import all_subclasses | ||||||
| from authentik.policies.api.exec import PolicyTestSerializer | from authentik.policies.api.exec import PolicyTestSerializer | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
| 
 | 
 | ||||||
| @ -76,20 +63,7 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri | |||||||
|         ] |         ] | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class PropertyMappingFilterSet(FilterSet): |  | ||||||
|     """Filter for PropertyMapping""" |  | ||||||
| 
 |  | ||||||
|     managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed")) |  | ||||||
| 
 |  | ||||||
|     managed__isnull = BooleanFilter(field_name="managed", lookup_expr="isnull") |  | ||||||
| 
 |  | ||||||
|     class Meta: |  | ||||||
|         model = PropertyMapping |  | ||||||
|         fields = ["name", "managed"] |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class PropertyMappingViewSet( | class PropertyMappingViewSet( | ||||||
|     TypesMixin, |  | ||||||
|     mixins.RetrieveModelMixin, |     mixins.RetrieveModelMixin, | ||||||
|     mixins.DestroyModelMixin, |     mixins.DestroyModelMixin, | ||||||
|     UsedByMixin, |     UsedByMixin, | ||||||
| @ -98,23 +72,37 @@ class PropertyMappingViewSet( | |||||||
| ): | ): | ||||||
|     """PropertyMapping Viewset""" |     """PropertyMapping Viewset""" | ||||||
| 
 | 
 | ||||||
|     class PropertyMappingTestSerializer(PolicyTestSerializer): |     queryset = PropertyMapping.objects.none() | ||||||
|         """Test property mapping execution for a user/group with context""" |  | ||||||
| 
 |  | ||||||
|         user = PrimaryKeyRelatedField(queryset=User.objects.all(), required=False, allow_null=True) |  | ||||||
|         group = PrimaryKeyRelatedField( |  | ||||||
|             queryset=Group.objects.all(), required=False, allow_null=True |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|     queryset = PropertyMapping.objects.select_subclasses() |  | ||||||
|     serializer_class = PropertyMappingSerializer |     serializer_class = PropertyMappingSerializer | ||||||
|     filterset_class = PropertyMappingFilterSet |     search_fields = [ | ||||||
|  |         "name", | ||||||
|  |     ] | ||||||
|  |     filterset_fields = {"managed": ["isnull"]} | ||||||
|     ordering = ["name"] |     ordering = ["name"] | ||||||
|     search_fields = ["name"] | 
 | ||||||
|  |     def get_queryset(self):  # pragma: no cover | ||||||
|  |         return PropertyMapping.objects.select_subclasses() | ||||||
|  | 
 | ||||||
|  |     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||||
|  |     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||||
|  |     def types(self, request: Request) -> Response: | ||||||
|  |         """Get all creatable property-mapping types""" | ||||||
|  |         data = [] | ||||||
|  |         for subclass in all_subclasses(self.queryset.model): | ||||||
|  |             subclass: PropertyMapping | ||||||
|  |             data.append( | ||||||
|  |                 { | ||||||
|  |                     "name": subclass._meta.verbose_name, | ||||||
|  |                     "description": subclass.__doc__, | ||||||
|  |                     "component": subclass().component, | ||||||
|  |                     "model_name": subclass._meta.model_name, | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |         return Response(TypeCreateSerializer(data, many=True).data) | ||||||
| 
 | 
 | ||||||
|     @permission_required("authentik_core.view_propertymapping") |     @permission_required("authentik_core.view_propertymapping") | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         request=PropertyMappingTestSerializer(), |         request=PolicyTestSerializer(), | ||||||
|         responses={ |         responses={ | ||||||
|             200: PropertyMappingTestResultSerializer, |             200: PropertyMappingTestResultSerializer, | ||||||
|             400: OpenApiResponse(description="Invalid parameters"), |             400: OpenApiResponse(description="Invalid parameters"), | ||||||
| @ -132,47 +120,34 @@ class PropertyMappingViewSet( | |||||||
|         """Test Property Mapping""" |         """Test Property Mapping""" | ||||||
|         _mapping: PropertyMapping = self.get_object() |         _mapping: PropertyMapping = self.get_object() | ||||||
|         # Use `get_subclass` to get correct class and correct `.evaluate` implementation |         # Use `get_subclass` to get correct class and correct `.evaluate` implementation | ||||||
|         mapping: PropertyMapping = PropertyMapping.objects.get_subclass(pk=_mapping.pk) |         mapping = PropertyMapping.objects.get_subclass(pk=_mapping.pk) | ||||||
|         # FIXME: when we separate policy mappings between ones for sources |         # FIXME: when we separate policy mappings between ones for sources | ||||||
|         # and ones for providers, we need to make the user field optional for the source mapping |         # and ones for providers, we need to make the user field optional for the source mapping | ||||||
|         test_params = self.PropertyMappingTestSerializer(data=request.data) |         test_params = PolicyTestSerializer(data=request.data) | ||||||
|         if not test_params.is_valid(): |         if not test_params.is_valid(): | ||||||
|             return Response(test_params.errors, status=400) |             return Response(test_params.errors, status=400) | ||||||
| 
 | 
 | ||||||
|         format_result = str(request.GET.get("format_result", "false")).lower() == "true" |         format_result = str(request.GET.get("format_result", "false")).lower() == "true" | ||||||
| 
 | 
 | ||||||
|         context: dict = test_params.validated_data.get("context", {}) |  | ||||||
|         context.setdefault("user", None) |  | ||||||
| 
 |  | ||||||
|         if user := test_params.validated_data.get("user"): |  | ||||||
|         # User permission check, only allow mapping testing for users that are readable |         # User permission check, only allow mapping testing for users that are readable | ||||||
|         users = get_objects_for_user(request.user, "authentik_core.view_user").filter( |         users = get_objects_for_user(request.user, "authentik_core.view_user").filter( | ||||||
|                 pk=user.pk |             pk=test_params.validated_data["user"].pk | ||||||
|         ) |         ) | ||||||
|         if not users.exists(): |         if not users.exists(): | ||||||
|             raise PermissionDenied() |             raise PermissionDenied() | ||||||
|             context["user"] = user |  | ||||||
|         if group := test_params.validated_data.get("group"): |  | ||||||
|             # Group permission check, only allow mapping testing for groups that are readable |  | ||||||
|             groups = get_objects_for_user(request.user, "authentik_core.view_group").filter( |  | ||||||
|                 pk=group.pk |  | ||||||
|             ) |  | ||||||
|             if not groups.exists(): |  | ||||||
|                 raise PermissionDenied() |  | ||||||
|             context["group"] = group |  | ||||||
|         context["request"] = self.request |  | ||||||
| 
 | 
 | ||||||
|         response_data = {"successful": True, "result": ""} |         response_data = {"successful": True, "result": ""} | ||||||
|         try: |         try: | ||||||
|             result = mapping.evaluate(dry_run=True, **context) |             result = mapping.evaluate( | ||||||
|  |                 users.first(), | ||||||
|  |                 self.request, | ||||||
|  |                 **test_params.validated_data.get("context", {}), | ||||||
|  |             ) | ||||||
|             response_data["result"] = dumps( |             response_data["result"] = dumps( | ||||||
|                 sanitize_item(result), indent=(4 if format_result else None) |                 sanitize_item(result), indent=(4 if format_result else None) | ||||||
|             ) |             ) | ||||||
|         except PropertyMappingExpressionException as exc: |  | ||||||
|             response_data["result"] = exception_to_string(exc.exc) |  | ||||||
|             response_data["successful"] = False |  | ||||||
|         except Exception as exc: |         except Exception as exc: | ||||||
|             response_data["result"] = exception_to_string(exc) |             response_data["result"] = str(exc) | ||||||
|             response_data["successful"] = False |             response_data["successful"] = False | ||||||
|         response = PropertyMappingTestResultSerializer(response_data) |         response = PropertyMappingTestResultSerializer(response_data) | ||||||
|         return Response(response.data) |         return Response(response.data) | ||||||
| @ -5,14 +5,20 @@ from django.db.models.query import Q | |||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from django_filters.filters import BooleanFilter | from django_filters.filters import BooleanFilter | ||||||
| from django_filters.filterset import FilterSet | from django_filters.filterset import FilterSet | ||||||
|  | from drf_spectacular.utils import extend_schema | ||||||
| from rest_framework import mixins | from rest_framework import mixins | ||||||
| from rest_framework.fields import ReadOnlyField, SerializerMethodField | from rest_framework.decorators import action | ||||||
|  | from rest_framework.fields import ReadOnlyField | ||||||
|  | from rest_framework.request import Request | ||||||
|  | from rest_framework.response import Response | ||||||
|  | from rest_framework.serializers import ModelSerializer, SerializerMethodField | ||||||
| from rest_framework.viewsets import GenericViewSet | from rest_framework.viewsets import GenericViewSet | ||||||
|  |  | ||||||
| from authentik.core.api.object_types import TypesMixin |  | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import MetaNameSerializer, ModelSerializer | from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer | ||||||
| from authentik.core.models import Provider | from authentik.core.models import Provider | ||||||
|  | from authentik.enterprise.apps import EnterpriseConfig | ||||||
|  | from authentik.lib.utils.reflection import all_subclasses | ||||||
|  |  | ||||||
|  |  | ||||||
| class ProviderSerializer(ModelSerializer, MetaNameSerializer): | class ProviderSerializer(ModelSerializer, MetaNameSerializer): | ||||||
| @ -38,7 +44,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer): | |||||||
|             "name", |             "name", | ||||||
|             "authentication_flow", |             "authentication_flow", | ||||||
|             "authorization_flow", |             "authorization_flow", | ||||||
|             "invalidation_flow", |  | ||||||
|             "property_mappings", |             "property_mappings", | ||||||
|             "component", |             "component", | ||||||
|             "assigned_application_slug", |             "assigned_application_slug", | ||||||
| @ -51,7 +56,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer): | |||||||
|         ] |         ] | ||||||
|         extra_kwargs = { |         extra_kwargs = { | ||||||
|             "authorization_flow": {"required": True, "allow_null": False}, |             "authorization_flow": {"required": True, "allow_null": False}, | ||||||
|             "invalidation_flow": {"required": True, "allow_null": False}, |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -59,12 +63,8 @@ class ProviderFilter(FilterSet): | |||||||
|     """Filter for providers""" |     """Filter for providers""" | ||||||
|  |  | ||||||
|     application__isnull = BooleanFilter(method="filter_application__isnull") |     application__isnull = BooleanFilter(method="filter_application__isnull") | ||||||
|     backchannel = BooleanFilter( |     backchannel_only = BooleanFilter( | ||||||
|         method="filter_backchannel", |         method="filter_backchannel_only", | ||||||
|         label=_( |  | ||||||
|             "When not set all providers are returned. When set to true, only backchannel " |  | ||||||
|             "providers are returned. When set to false, backchannel providers are excluded" |  | ||||||
|         ), |  | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     def filter_application__isnull(self, queryset: QuerySet, name, value): |     def filter_application__isnull(self, queryset: QuerySet, name, value): | ||||||
| @ -75,14 +75,12 @@ class ProviderFilter(FilterSet): | |||||||
|             | Q(application__isnull=value) |             | Q(application__isnull=value) | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def filter_backchannel(self, queryset: QuerySet, name, value): |     def filter_backchannel_only(self, queryset: QuerySet, name, value): | ||||||
|         """By default all providers are returned. When set to true, only backchannel providers are |         """Only return backchannel providers""" | ||||||
|         returned. When set to false, backchannel providers are excluded""" |  | ||||||
|         return queryset.filter(is_backchannel=value) |         return queryset.filter(is_backchannel=value) | ||||||
|  |  | ||||||
|  |  | ||||||
| class ProviderViewSet( | class ProviderViewSet( | ||||||
|     TypesMixin, |  | ||||||
|     mixins.RetrieveModelMixin, |     mixins.RetrieveModelMixin, | ||||||
|     mixins.DestroyModelMixin, |     mixins.DestroyModelMixin, | ||||||
|     UsedByMixin, |     UsedByMixin, | ||||||
| @ -101,3 +99,31 @@ class ProviderViewSet( | |||||||
|  |  | ||||||
|     def get_queryset(self):  # pragma: no cover |     def get_queryset(self):  # pragma: no cover | ||||||
|         return Provider.objects.select_subclasses() |         return Provider.objects.select_subclasses() | ||||||
|  |  | ||||||
|  |     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||||
|  |     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||||
|  |     def types(self, request: Request) -> Response: | ||||||
|  |         """Get all creatable provider types""" | ||||||
|  |         data = [] | ||||||
|  |         for subclass in all_subclasses(self.queryset.model): | ||||||
|  |             subclass: Provider | ||||||
|  |             if subclass._meta.abstract: | ||||||
|  |                 continue | ||||||
|  |             data.append( | ||||||
|  |                 { | ||||||
|  |                     "name": subclass._meta.verbose_name, | ||||||
|  |                     "description": subclass.__doc__, | ||||||
|  |                     "component": subclass().component, | ||||||
|  |                     "model_name": subclass._meta.model_name, | ||||||
|  |                     "requires_enterprise": isinstance(subclass._meta.app_config, EnterpriseConfig), | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |         data.append( | ||||||
|  |             { | ||||||
|  |                 "name": _("SAML Provider from Metadata"), | ||||||
|  |                 "description": _("Create a SAML Provider by importing its Metadata."), | ||||||
|  |                 "component": "ak-provider-saml-import-form", | ||||||
|  |                 "model_name": "", | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |         return Response(TypeCreateSerializer(data, many=True).data) | ||||||
|  | |||||||
| @ -11,15 +11,15 @@ from rest_framework.filters import OrderingFilter, SearchFilter | |||||||
| from rest_framework.parsers import MultiPartParser | from rest_framework.parsers import MultiPartParser | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
|  | from rest_framework.serializers import ModelSerializer | ||||||
| from rest_framework.viewsets import GenericViewSet | from rest_framework.viewsets import GenericViewSet | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions | from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions | ||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||||
| from authentik.core.api.object_types import TypesMixin |  | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import MetaNameSerializer, ModelSerializer | from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer | ||||||
| from authentik.core.models import GroupSourceConnection, Source, UserSourceConnection | from authentik.core.models import Source, UserSourceConnection | ||||||
| from authentik.core.types import UserSettingSerializer | from authentik.core.types import UserSettingSerializer | ||||||
| from authentik.lib.utils.file import ( | from authentik.lib.utils.file import ( | ||||||
|     FilePathSerializer, |     FilePathSerializer, | ||||||
| @ -27,6 +27,7 @@ from authentik.lib.utils.file import ( | |||||||
|     set_file, |     set_file, | ||||||
|     set_file_url, |     set_file_url, | ||||||
| ) | ) | ||||||
|  | from authentik.lib.utils.reflection import all_subclasses | ||||||
| from authentik.policies.engine import PolicyEngine | from authentik.policies.engine import PolicyEngine | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
|  |  | ||||||
| @ -60,8 +61,6 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer): | |||||||
|             "enabled", |             "enabled", | ||||||
|             "authentication_flow", |             "authentication_flow", | ||||||
|             "enrollment_flow", |             "enrollment_flow", | ||||||
|             "user_property_mappings", |  | ||||||
|             "group_property_mappings", |  | ||||||
|             "component", |             "component", | ||||||
|             "verbose_name", |             "verbose_name", | ||||||
|             "verbose_name_plural", |             "verbose_name_plural", | ||||||
| @ -75,7 +74,6 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer): | |||||||
|  |  | ||||||
|  |  | ||||||
| class SourceViewSet( | class SourceViewSet( | ||||||
|     TypesMixin, |  | ||||||
|     mixins.RetrieveModelMixin, |     mixins.RetrieveModelMixin, | ||||||
|     mixins.DestroyModelMixin, |     mixins.DestroyModelMixin, | ||||||
|     UsedByMixin, |     UsedByMixin, | ||||||
| @ -134,6 +132,30 @@ class SourceViewSet( | |||||||
|         source: Source = self.get_object() |         source: Source = self.get_object() | ||||||
|         return set_file_url(request, source, "icon") |         return set_file_url(request, source, "icon") | ||||||
|  |  | ||||||
|  |     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||||
|  |     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||||
|  |     def types(self, request: Request) -> Response: | ||||||
|  |         """Get all creatable source types""" | ||||||
|  |         data = [] | ||||||
|  |         for subclass in all_subclasses(self.queryset.model): | ||||||
|  |             subclass: Source | ||||||
|  |             component = "" | ||||||
|  |             if len(subclass.__subclasses__()) > 0: | ||||||
|  |                 continue | ||||||
|  |             if subclass._meta.abstract: | ||||||
|  |                 component = subclass.__bases__[0]().component | ||||||
|  |             else: | ||||||
|  |                 component = subclass().component | ||||||
|  |             data.append( | ||||||
|  |                 { | ||||||
|  |                     "name": subclass._meta.verbose_name, | ||||||
|  |                     "description": subclass.__doc__, | ||||||
|  |                     "component": component, | ||||||
|  |                     "model_name": subclass._meta.model_name, | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |         return Response(TypeCreateSerializer(data, many=True).data) | ||||||
|  |  | ||||||
|     @extend_schema(responses={200: UserSettingSerializer(many=True)}) |     @extend_schema(responses={200: UserSettingSerializer(many=True)}) | ||||||
|     @action(detail=False, pagination_class=None, filter_backends=[]) |     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||||
|     def user_settings(self, request: Request) -> Response: |     def user_settings(self, request: Request) -> Response: | ||||||
| @ -190,47 +212,6 @@ class UserSourceConnectionViewSet( | |||||||
|     queryset = UserSourceConnection.objects.all() |     queryset = UserSourceConnection.objects.all() | ||||||
|     serializer_class = UserSourceConnectionSerializer |     serializer_class = UserSourceConnectionSerializer | ||||||
|     permission_classes = [OwnerSuperuserPermissions] |     permission_classes = [OwnerSuperuserPermissions] | ||||||
|     filterset_fields = ["user", "source__slug"] |     filterset_fields = ["user"] | ||||||
|     search_fields = ["source__slug"] |  | ||||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] |     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||||
|     ordering = ["source__slug", "pk"] |     ordering = ["pk"] | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupSourceConnectionSerializer(SourceSerializer): |  | ||||||
|     """Group Source Connection Serializer""" |  | ||||||
|  |  | ||||||
|     source = SourceSerializer(read_only=True) |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         model = GroupSourceConnection |  | ||||||
|         fields = [ |  | ||||||
|             "pk", |  | ||||||
|             "group", |  | ||||||
|             "source", |  | ||||||
|             "identifier", |  | ||||||
|             "created", |  | ||||||
|         ] |  | ||||||
|         extra_kwargs = { |  | ||||||
|             "group": {"read_only": True}, |  | ||||||
|             "identifier": {"read_only": True}, |  | ||||||
|             "created": {"read_only": True}, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupSourceConnectionViewSet( |  | ||||||
|     mixins.RetrieveModelMixin, |  | ||||||
|     mixins.UpdateModelMixin, |  | ||||||
|     mixins.DestroyModelMixin, |  | ||||||
|     UsedByMixin, |  | ||||||
|     mixins.ListModelMixin, |  | ||||||
|     GenericViewSet, |  | ||||||
| ): |  | ||||||
|     """Group-source connection Viewset""" |  | ||||||
|  |  | ||||||
|     queryset = GroupSourceConnection.objects.all() |  | ||||||
|     serializer_class = GroupSourceConnectionSerializer |  | ||||||
|     permission_classes = [OwnerSuperuserPermissions] |  | ||||||
|     filterset_fields = ["group", "source__slug"] |  | ||||||
|     search_fields = ["source__slug"] |  | ||||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] |  | ||||||
|     ordering = ["source__slug", "pk"] |  | ||||||
|  | |||||||
| @ -2,7 +2,6 @@ | |||||||
|  |  | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.utils.timezone import now |  | ||||||
| from django_filters.rest_framework import DjangoFilterBackend | from django_filters.rest_framework import DjangoFilterBackend | ||||||
| from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer | from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer | ||||||
| from guardian.shortcuts import assign_perm, get_anonymous_user | from guardian.shortcuts import assign_perm, get_anonymous_user | ||||||
| @ -12,6 +11,7 @@ from rest_framework.fields import CharField | |||||||
| from rest_framework.filters import OrderingFilter, SearchFilter | from rest_framework.filters import OrderingFilter, SearchFilter | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
|  | from rest_framework.serializers import ModelSerializer | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
| from authentik.api.authorization import OwnerSuperuserPermissions | from authentik.api.authorization import OwnerSuperuserPermissions | ||||||
| @ -19,7 +19,7 @@ from authentik.blueprints.api import ManagedSerializer | |||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.users import UserSerializer | from authentik.core.api.users import UserSerializer | ||||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.core.models import ( | from authentik.core.models import ( | ||||||
|     USER_ATTRIBUTE_TOKEN_EXPIRING, |     USER_ATTRIBUTE_TOKEN_EXPIRING, | ||||||
|     USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME, |     USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME, | ||||||
| @ -27,6 +27,7 @@ from authentik.core.models import ( | |||||||
|     TokenIntents, |     TokenIntents, | ||||||
|     User, |     User, | ||||||
|     default_token_duration, |     default_token_duration, | ||||||
|  |     token_expires_from_timedelta, | ||||||
| ) | ) | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
| from authentik.events.utils import model_to_dict | from authentik.events.utils import model_to_dict | ||||||
| @ -44,13 +45,6 @@ class TokenSerializer(ManagedSerializer, ModelSerializer): | |||||||
|         if SERIALIZER_CONTEXT_BLUEPRINT in self.context: |         if SERIALIZER_CONTEXT_BLUEPRINT in self.context: | ||||||
|             self.fields["key"] = CharField(required=False) |             self.fields["key"] = CharField(required=False) | ||||||
|  |  | ||||||
|     def validate_user(self, user: User): |  | ||||||
|         """Ensure user of token cannot be changed""" |  | ||||||
|         if self.instance and self.instance.user_id: |  | ||||||
|             if user.pk != self.instance.user_id: |  | ||||||
|                 raise ValidationError("User cannot be changed") |  | ||||||
|         return user |  | ||||||
|  |  | ||||||
|     def validate(self, attrs: dict[Any, str]) -> dict[Any, str]: |     def validate(self, attrs: dict[Any, str]) -> dict[Any, str]: | ||||||
|         """Ensure only API or App password tokens are created.""" |         """Ensure only API or App password tokens are created.""" | ||||||
|         request: Request = self.context.get("request") |         request: Request = self.context.get("request") | ||||||
| @ -74,17 +68,15 @@ class TokenSerializer(ManagedSerializer, ModelSerializer): | |||||||
|             max_token_lifetime_dt = default_token_duration() |             max_token_lifetime_dt = default_token_duration() | ||||||
|             if max_token_lifetime is not None: |             if max_token_lifetime is not None: | ||||||
|                 try: |                 try: | ||||||
|                     max_token_lifetime_dt = now() + timedelta_from_string(max_token_lifetime) |                     max_token_lifetime_dt = timedelta_from_string(max_token_lifetime) | ||||||
|                 except ValueError: |                 except ValueError: | ||||||
|                     pass |                     max_token_lifetime_dt = default_token_duration() | ||||||
|  |  | ||||||
|             if "expires" in attrs and attrs.get("expires") > max_token_lifetime_dt: |             if "expires" in attrs and attrs.get("expires") > token_expires_from_timedelta( | ||||||
|  |                 max_token_lifetime_dt | ||||||
|  |             ): | ||||||
|                 raise ValidationError( |                 raise ValidationError( | ||||||
|                     { |                     {"expires": f"Token expires exceeds maximum lifetime ({max_token_lifetime})."} | ||||||
|                         "expires": ( |  | ||||||
|                             f"Token expires exceeds maximum lifetime ({max_token_lifetime_dt} UTC)." |  | ||||||
|                         ) |  | ||||||
|                     } |  | ||||||
|                 ) |                 ) | ||||||
|         elif attrs.get("intent") == TokenIntents.INTENT_API: |         elif attrs.get("intent") == TokenIntents.INTENT_API: | ||||||
|             # For API tokens, expires cannot be overridden |             # For API tokens, expires cannot be overridden | ||||||
|  | |||||||
| @ -14,7 +14,6 @@ from rest_framework.request import Request | |||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
|  |  | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.rbac.filters import ObjectFilter |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeleteAction(Enum): | class DeleteAction(Enum): | ||||||
| @ -40,12 +39,12 @@ def get_delete_action(manager: Manager) -> str: | |||||||
|     """Get the delete action from the Foreign key, falls back to cascade""" |     """Get the delete action from the Foreign key, falls back to cascade""" | ||||||
|     if hasattr(manager, "field"): |     if hasattr(manager, "field"): | ||||||
|         if manager.field.remote_field.on_delete.__name__ == SET_NULL.__name__: |         if manager.field.remote_field.on_delete.__name__ == SET_NULL.__name__: | ||||||
|             return DeleteAction.SET_NULL.value |             return DeleteAction.SET_NULL.name | ||||||
|         if manager.field.remote_field.on_delete.__name__ == SET_DEFAULT.__name__: |         if manager.field.remote_field.on_delete.__name__ == SET_DEFAULT.__name__: | ||||||
|             return DeleteAction.SET_DEFAULT.value |             return DeleteAction.SET_DEFAULT.name | ||||||
|     if hasattr(manager, "source_field"): |     if hasattr(manager, "source_field"): | ||||||
|         return DeleteAction.CASCADE_MANY.value |         return DeleteAction.CASCADE_MANY.name | ||||||
|     return DeleteAction.CASCADE.value |     return DeleteAction.CASCADE.name | ||||||
|  |  | ||||||
|  |  | ||||||
| class UsedByMixin: | class UsedByMixin: | ||||||
| @ -54,7 +53,7 @@ class UsedByMixin: | |||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         responses={200: UsedBySerializer(many=True)}, |         responses={200: UsedBySerializer(many=True)}, | ||||||
|     ) |     ) | ||||||
|     @action(detail=True, pagination_class=None, filter_backends=[ObjectFilter]) |     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||||
|     def used_by(self, request: Request, *args, **kwargs) -> Response: |     def used_by(self, request: Request, *args, **kwargs) -> Response: | ||||||
|         """Get a list of all objects that use this object""" |         """Get a list of all objects that use this object""" | ||||||
|         model: Model = self.get_object() |         model: Model = self.get_object() | ||||||
|  | |||||||
| @ -5,7 +5,6 @@ from json import loads | |||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.contrib.auth import update_session_auth_hash | from django.contrib.auth import update_session_auth_hash | ||||||
| from django.contrib.auth.models import Permission |  | ||||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.db.models.functions import ExtractHour | from django.db.models.functions import ExtractHour | ||||||
| @ -34,21 +33,16 @@ from drf_spectacular.utils import ( | |||||||
| ) | ) | ||||||
| from guardian.shortcuts import get_objects_for_user | from guardian.shortcuts import get_objects_for_user | ||||||
| from rest_framework.decorators import action | from rest_framework.decorators import action | ||||||
| from rest_framework.exceptions import ValidationError | from rest_framework.fields import CharField, IntegerField, ListField, SerializerMethodField | ||||||
| from rest_framework.fields import ( |  | ||||||
|     BooleanField, |  | ||||||
|     CharField, |  | ||||||
|     ChoiceField, |  | ||||||
|     DateTimeField, |  | ||||||
|     IntegerField, |  | ||||||
|     ListField, |  | ||||||
|     SerializerMethodField, |  | ||||||
| ) |  | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.serializers import ( | from rest_framework.serializers import ( | ||||||
|  |     BooleanField, | ||||||
|  |     DateTimeField, | ||||||
|     ListSerializer, |     ListSerializer, | ||||||
|  |     ModelSerializer, | ||||||
|     PrimaryKeyRelatedField, |     PrimaryKeyRelatedField, | ||||||
|  |     ValidationError, | ||||||
| ) | ) | ||||||
| from rest_framework.validators import UniqueValidator | from rest_framework.validators import UniqueValidator | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
| @ -58,12 +52,7 @@ from authentik.admin.api.metrics import CoordinateSerializer | |||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import ( | from authentik.core.api.utils import JSONDictField, LinkSerializer, PassiveSerializer | ||||||
|     JSONDictField, |  | ||||||
|     LinkSerializer, |  | ||||||
|     ModelSerializer, |  | ||||||
|     PassiveSerializer, |  | ||||||
| ) |  | ||||||
| from authentik.core.middleware import ( | from authentik.core.middleware import ( | ||||||
|     SESSION_KEY_IMPERSONATE_ORIGINAL_USER, |     SESSION_KEY_IMPERSONATE_ORIGINAL_USER, | ||||||
|     SESSION_KEY_IMPERSONATE_USER, |     SESSION_KEY_IMPERSONATE_USER, | ||||||
| @ -85,7 +74,6 @@ from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner | |||||||
| from authentik.flows.views.executor import QS_KEY_TOKEN | from authentik.flows.views.executor import QS_KEY_TOKEN | ||||||
| from authentik.lib.avatars import get_avatar | from authentik.lib.avatars import get_avatar | ||||||
| from authentik.rbac.decorators import permission_required | from authentik.rbac.decorators import permission_required | ||||||
| from authentik.rbac.models import get_permission_choices |  | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.stages.email.tasks import send_mails | from authentik.stages.email.tasks import send_mails | ||||||
| from authentik.stages.email.utils import TemplateEmailMessage | from authentik.stages.email.utils import TemplateEmailMessage | ||||||
| @ -149,19 +137,12 @@ class UserSerializer(ModelSerializer): | |||||||
|         super().__init__(*args, **kwargs) |         super().__init__(*args, **kwargs) | ||||||
|         if SERIALIZER_CONTEXT_BLUEPRINT in self.context: |         if SERIALIZER_CONTEXT_BLUEPRINT in self.context: | ||||||
|             self.fields["password"] = CharField(required=False, allow_null=True) |             self.fields["password"] = CharField(required=False, allow_null=True) | ||||||
|             self.fields["permissions"] = ListField( |  | ||||||
|                 required=False, child=ChoiceField(choices=get_permission_choices()) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     def create(self, validated_data: dict) -> User: |     def create(self, validated_data: dict) -> User: | ||||||
|         """If this serializer is used in the blueprint context, we allow for |         """If this serializer is used in the blueprint context, we allow for | ||||||
|         directly setting a password. However should be done via the `set_password` |         directly setting a password. However should be done via the `set_password` | ||||||
|         method instead of directly setting it like rest_framework.""" |         method instead of directly setting it like rest_framework.""" | ||||||
|         password = validated_data.pop("password", None) |         password = validated_data.pop("password", None) | ||||||
|         permissions = Permission.objects.filter( |  | ||||||
|             codename__in=[x.split(".")[1] for x in validated_data.pop("permissions", [])] |  | ||||||
|         ) |  | ||||||
|         validated_data["user_permissions"] = permissions |  | ||||||
|         instance: User = super().create(validated_data) |         instance: User = super().create(validated_data) | ||||||
|         self._set_password(instance, password) |         self._set_password(instance, password) | ||||||
|         return instance |         return instance | ||||||
| @ -170,10 +151,6 @@ class UserSerializer(ModelSerializer): | |||||||
|         """Same as `create` above, set the password directly if we're in a blueprint |         """Same as `create` above, set the password directly if we're in a blueprint | ||||||
|         context""" |         context""" | ||||||
|         password = validated_data.pop("password", None) |         password = validated_data.pop("password", None) | ||||||
|         permissions = Permission.objects.filter( |  | ||||||
|             codename__in=[x.split(".")[1] for x in validated_data.pop("permissions", [])] |  | ||||||
|         ) |  | ||||||
|         validated_data["user_permissions"] = permissions |  | ||||||
|         instance = super().update(instance, validated_data) |         instance = super().update(instance, validated_data) | ||||||
|         self._set_password(instance, password) |         self._set_password(instance, password) | ||||||
|         return instance |         return instance | ||||||
| @ -430,11 +407,8 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|     search_fields = ["username", "name", "is_active", "email", "uuid"] |     search_fields = ["username", "name", "is_active", "email", "uuid"] | ||||||
|     filterset_class = UsersFilter |     filterset_class = UsersFilter | ||||||
|  |  | ||||||
|     def get_queryset(self): |     def get_queryset(self):  # pragma: no cover | ||||||
|         base_qs = User.objects.all().exclude_anonymous() |         return User.objects.all().exclude_anonymous().prefetch_related("ak_groups") | ||||||
|         if self.serializer_class(context={"request": self.request})._should_include_groups: |  | ||||||
|             base_qs = base_qs.prefetch_related("ak_groups") |  | ||||||
|         return base_qs |  | ||||||
|  |  | ||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         parameters=[ |         parameters=[ | ||||||
| @ -678,13 +652,10 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|         if not request.tenant.impersonation: |         if not request.tenant.impersonation: | ||||||
|             LOGGER.debug("User attempted to impersonate", user=request.user) |             LOGGER.debug("User attempted to impersonate", user=request.user) | ||||||
|             return Response(status=401) |             return Response(status=401) | ||||||
|         user_to_be = self.get_object() |         if not request.user.has_perm("impersonate"): | ||||||
|         # Check both object-level perms and global perms |  | ||||||
|         if not request.user.has_perm( |  | ||||||
|             "authentik_core.impersonate", user_to_be |  | ||||||
|         ) and not request.user.has_perm("authentik_core.impersonate"): |  | ||||||
|             LOGGER.debug("User attempted to impersonate without permissions", user=request.user) |             LOGGER.debug("User attempted to impersonate without permissions", user=request.user) | ||||||
|             return Response(status=401) |             return Response(status=401) | ||||||
|  |         user_to_be = self.get_object() | ||||||
|         if user_to_be.pk == self.request.user.pk: |         if user_to_be.pk == self.request.user.pk: | ||||||
|             LOGGER.debug("User attempted to impersonate themselves", user=request.user) |             LOGGER.debug("User attempted to impersonate themselves", user=request.user) | ||||||
|             return Response(status=401) |             return Response(status=401) | ||||||
|  | |||||||
| @ -6,19 +6,8 @@ from django.db.models import Model | |||||||
| from drf_spectacular.extensions import OpenApiSerializerFieldExtension | from drf_spectacular.extensions import OpenApiSerializerFieldExtension | ||||||
| from drf_spectacular.plumbing import build_basic_type | from drf_spectacular.plumbing import build_basic_type | ||||||
| from drf_spectacular.types import OpenApiTypes | from drf_spectacular.types import OpenApiTypes | ||||||
| from rest_framework.fields import ( | from rest_framework.fields import BooleanField, CharField, IntegerField, JSONField | ||||||
|     CharField, | from rest_framework.serializers import Serializer, SerializerMethodField, ValidationError | ||||||
|     IntegerField, |  | ||||||
|     JSONField, |  | ||||||
|     SerializerMethodField, |  | ||||||
| ) |  | ||||||
| from rest_framework.serializers import ModelSerializer as BaseModelSerializer |  | ||||||
| from rest_framework.serializers import ( |  | ||||||
|     Serializer, |  | ||||||
|     ValidationError, |  | ||||||
|     model_meta, |  | ||||||
|     raise_errors_on_nested_writes, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def is_dict(value: Any): | def is_dict(value: Any): | ||||||
| @ -28,39 +17,6 @@ def is_dict(value: Any): | |||||||
|     raise ValidationError("Value must be a dictionary, and not have any duplicate keys.") |     raise ValidationError("Value must be a dictionary, and not have any duplicate keys.") | ||||||
|  |  | ||||||
|  |  | ||||||
| class ModelSerializer(BaseModelSerializer): |  | ||||||
|  |  | ||||||
|     def update(self, instance: Model, validated_data): |  | ||||||
|         raise_errors_on_nested_writes("update", self, validated_data) |  | ||||||
|         info = model_meta.get_field_info(instance) |  | ||||||
|  |  | ||||||
|         # Simply set each attribute on the instance, and then save it. |  | ||||||
|         # Note that unlike `.create()` we don't need to treat many-to-many |  | ||||||
|         # relationships as being a special case. During updates we already |  | ||||||
|         # have an instance pk for the relationships to be associated with. |  | ||||||
|         m2m_fields = [] |  | ||||||
|         for attr, value in validated_data.items(): |  | ||||||
|             if attr in info.relations and info.relations[attr].to_many: |  | ||||||
|                 m2m_fields.append((attr, value)) |  | ||||||
|             else: |  | ||||||
|                 setattr(instance, attr, value) |  | ||||||
|  |  | ||||||
|         instance.save() |  | ||||||
|  |  | ||||||
|         # Note that many-to-many fields are set after updating instance. |  | ||||||
|         # Setting m2m fields triggers signals which could potentially change |  | ||||||
|         # updated instance and we do not want it to collide with .update() |  | ||||||
|         for attr, value in m2m_fields: |  | ||||||
|             field = getattr(instance, attr) |  | ||||||
|             # We can't check for inheritance here as m2m managers are generated dynamically |  | ||||||
|             if field.__class__.__name__ == "RelatedManager": |  | ||||||
|                 field.set(value, bulk=False) |  | ||||||
|             else: |  | ||||||
|                 field.set(value) |  | ||||||
|  |  | ||||||
|         return instance |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class JSONDictField(JSONField): | class JSONDictField(JSONField): | ||||||
|     """JSON Field which only allows dictionaries""" |     """JSON Field which only allows dictionaries""" | ||||||
|  |  | ||||||
| @ -112,6 +68,16 @@ class MetaNameSerializer(PassiveSerializer): | |||||||
|         return f"{obj._meta.app_label}.{obj._meta.model_name}" |         return f"{obj._meta.app_label}.{obj._meta.model_name}" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TypeCreateSerializer(PassiveSerializer): | ||||||
|  |     """Types of an object that can be created""" | ||||||
|  |  | ||||||
|  |     name = CharField(required=True) | ||||||
|  |     description = CharField(required=True) | ||||||
|  |     component = CharField(required=True) | ||||||
|  |     model_name = CharField(required=True) | ||||||
|  |     requires_enterprise = BooleanField(default=False) | ||||||
|  |  | ||||||
|  |  | ||||||
| class CacheSerializer(PassiveSerializer): | class CacheSerializer(PassiveSerializer): | ||||||
|     """Generic cache stats for an object""" |     """Generic cache stats for an object""" | ||||||
|  |  | ||||||
|  | |||||||
| @ -31,9 +31,8 @@ class InbuiltBackend(ModelBackend): | |||||||
|         # Since we can't directly pass other variables to signals, and we want to log the method |         # Since we can't directly pass other variables to signals, and we want to log the method | ||||||
|         # and the token used, we assume we're running in a flow and set a variable in the context |         # and the token used, we assume we're running in a flow and set a variable in the context | ||||||
|         flow_plan: FlowPlan = request.session.get(SESSION_KEY_PLAN, FlowPlan("")) |         flow_plan: FlowPlan = request.session.get(SESSION_KEY_PLAN, FlowPlan("")) | ||||||
|         flow_plan.context.setdefault(PLAN_CONTEXT_METHOD, method) |         flow_plan.context[PLAN_CONTEXT_METHOD] = method | ||||||
|         flow_plan.context.setdefault(PLAN_CONTEXT_METHOD_ARGS, {}) |         flow_plan.context[PLAN_CONTEXT_METHOD_ARGS] = cleanse_dict(sanitize_dict(kwargs)) | ||||||
|         flow_plan.context[PLAN_CONTEXT_METHOD_ARGS].update(cleanse_dict(sanitize_dict(kwargs))) |  | ||||||
|         request.session[SESSION_KEY_PLAN] = flow_plan |         request.session[SESSION_KEY_PLAN] = flow_plan | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										7
									
								
								authentik/core/exceptions.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								authentik/core/exceptions.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,7 @@ | |||||||
|  | """authentik core exceptions""" | ||||||
|  |  | ||||||
|  | from authentik.lib.sentry import SentryIgnoredException | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class PropertyMappingExpressionException(SentryIgnoredException): | ||||||
|  |     """Error when a PropertyMapping Exception expression could not be parsed or evaluated.""" | ||||||
| @ -1,13 +1,11 @@ | |||||||
| """Property Mapping Evaluator""" | """Property Mapping Evaluator""" | ||||||
|  |  | ||||||
| from types import CodeType |  | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.db.models import Model | from django.db.models import Model | ||||||
| from django.http import HttpRequest | from django.http import HttpRequest | ||||||
| from prometheus_client import Histogram | from prometheus_client import Histogram | ||||||
|  |  | ||||||
| from authentik.core.expression.exceptions import SkipObjectException |  | ||||||
| from authentik.core.models import User | from authentik.core.models import User | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
| from authentik.lib.expression.evaluator import BaseEvaluator | from authentik.lib.expression.evaluator import BaseEvaluator | ||||||
| @ -25,8 +23,6 @@ class PropertyMappingEvaluator(BaseEvaluator): | |||||||
|     """Custom Evaluator that adds some different context variables.""" |     """Custom Evaluator that adds some different context variables.""" | ||||||
|  |  | ||||||
|     dry_run: bool |     dry_run: bool | ||||||
|     model: Model |  | ||||||
|     _compiled: CodeType | None = None |  | ||||||
|  |  | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
| @ -36,32 +32,22 @@ class PropertyMappingEvaluator(BaseEvaluator): | |||||||
|         dry_run: bool | None = False, |         dry_run: bool | None = False, | ||||||
|         **kwargs, |         **kwargs, | ||||||
|     ): |     ): | ||||||
|         self.model = model |  | ||||||
|         if hasattr(model, "name"): |         if hasattr(model, "name"): | ||||||
|             _filename = model.name |             _filename = model.name | ||||||
|         else: |         else: | ||||||
|             _filename = str(model) |             _filename = str(model) | ||||||
|         super().__init__(filename=_filename) |         super().__init__(filename=_filename) | ||||||
|         self.dry_run = dry_run |  | ||||||
|         self.set_context(user, request, **kwargs) |  | ||||||
|  |  | ||||||
|     def set_context( |  | ||||||
|         self, |  | ||||||
|         user: User | None = None, |  | ||||||
|         request: HttpRequest | None = None, |  | ||||||
|         **kwargs, |  | ||||||
|     ): |  | ||||||
|         req = PolicyRequest(user=User()) |         req = PolicyRequest(user=User()) | ||||||
|         req.obj = self.model |         req.obj = model | ||||||
|         if user: |         if user: | ||||||
|             req.user = user |             req.user = user | ||||||
|             self._context["user"] = user |             self._context["user"] = user | ||||||
|         if request: |         if request: | ||||||
|             req.http_request = request |             req.http_request = request | ||||||
|         req.context.update(**kwargs) |  | ||||||
|         self._context["request"] = req |         self._context["request"] = req | ||||||
|  |         req.context.update(**kwargs) | ||||||
|         self._context.update(**kwargs) |         self._context.update(**kwargs) | ||||||
|         self._globals["SkipObject"] = SkipObjectException |         self.dry_run = dry_run | ||||||
|  |  | ||||||
|     def handle_error(self, exc: Exception, expression_source: str): |     def handle_error(self, exc: Exception, expression_source: str): | ||||||
|         """Exception Handler""" |         """Exception Handler""" | ||||||
| @ -76,19 +62,10 @@ class PropertyMappingEvaluator(BaseEvaluator): | |||||||
|         ) |         ) | ||||||
|         if "request" in self._context: |         if "request" in self._context: | ||||||
|             req: PolicyRequest = self._context["request"] |             req: PolicyRequest = self._context["request"] | ||||||
|             if req.http_request: |  | ||||||
|             event.from_http(req.http_request, req.user) |             event.from_http(req.http_request, req.user) | ||||||
|             return |             return | ||||||
|             elif req.user: |  | ||||||
|                 event.set_user(req.user) |  | ||||||
|         event.save() |         event.save() | ||||||
|  |  | ||||||
|     def evaluate(self, *args, **kwargs) -> Any: |     def evaluate(self, *args, **kwargs) -> Any: | ||||||
|         with PROPERTY_MAPPING_TIME.labels(mapping_name=self._filename).time(): |         with PROPERTY_MAPPING_TIME.labels(mapping_name=self._filename).time(): | ||||||
|             return super().evaluate(*args, **kwargs) |             return super().evaluate(*args, **kwargs) | ||||||
|  |  | ||||||
|     def compile(self, expression: str | None = None) -> Any: |  | ||||||
|         if not self._compiled: |  | ||||||
|             compiled = super().compile(expression or self.model.expression) |  | ||||||
|             self._compiled = compiled |  | ||||||
|         return self._compiled |  | ||||||
|  | |||||||
| @ -1,19 +0,0 @@ | |||||||
| """authentik core exceptions""" |  | ||||||
|  |  | ||||||
| from authentik.lib.expression.exceptions import ControlFlowException |  | ||||||
| from authentik.lib.sentry import SentryIgnoredException |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class PropertyMappingExpressionException(SentryIgnoredException): |  | ||||||
|     """Error when a PropertyMapping Exception expression could not be parsed or evaluated.""" |  | ||||||
|  |  | ||||||
|     def __init__(self, exc: Exception, mapping) -> None: |  | ||||||
|         super().__init__() |  | ||||||
|         self.exc = exc |  | ||||||
|         self.mapping = mapping |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SkipObjectException(ControlFlowException): |  | ||||||
|     """Exception which can be raised in a property mapping to skip syncing an object. |  | ||||||
|     Only applies to Property mappings which sync objects, and not on mappings which transitively |  | ||||||
|     apply to a single user""" |  | ||||||
| @ -1,32 +0,0 @@ | |||||||
| """Change user type""" |  | ||||||
|  |  | ||||||
| from authentik.core.models import User, UserTypes |  | ||||||
| from authentik.tenants.management import TenantCommand |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Command(TenantCommand): |  | ||||||
|     """Change user type""" |  | ||||||
|  |  | ||||||
|     def add_arguments(self, parser): |  | ||||||
|         parser.add_argument("--type", type=str, required=True) |  | ||||||
|         parser.add_argument("--all", action="store_true", default=False) |  | ||||||
|         parser.add_argument("usernames", nargs="*", type=str) |  | ||||||
|  |  | ||||||
|     def handle_per_tenant(self, **options): |  | ||||||
|         print(options) |  | ||||||
|         new_type = UserTypes(options["type"]) |  | ||||||
|         qs = ( |  | ||||||
|             User.objects.exclude_anonymous() |  | ||||||
|             .exclude(type=UserTypes.SERVICE_ACCOUNT) |  | ||||||
|             .exclude(type=UserTypes.INTERNAL_SERVICE_ACCOUNT) |  | ||||||
|         ) |  | ||||||
|         if options["usernames"] and options["all"]: |  | ||||||
|             self.stderr.write("--all and usernames specified, only one can be specified") |  | ||||||
|             return |  | ||||||
|         if not options["usernames"] and not options["all"]: |  | ||||||
|             self.stderr.write("--all or usernames must be specified") |  | ||||||
|             return |  | ||||||
|         if options["usernames"] and not options["all"]: |  | ||||||
|             qs = qs.filter(username__in=options["usernames"]) |  | ||||||
|         updated = qs.update(type=new_type) |  | ||||||
|         self.stdout.write(f"Updated {updated} users.") |  | ||||||
| @ -4,7 +4,6 @@ import code | |||||||
| import platform | import platform | ||||||
| import sys | import sys | ||||||
| import traceback | import traceback | ||||||
| from pprint import pprint |  | ||||||
|  |  | ||||||
| from django.apps import apps | from django.apps import apps | ||||||
| from django.core.management.base import BaseCommand | from django.core.management.base import BaseCommand | ||||||
| @ -35,9 +34,7 @@ class Command(BaseCommand): | |||||||
|  |  | ||||||
|     def get_namespace(self): |     def get_namespace(self): | ||||||
|         """Prepare namespace with all models""" |         """Prepare namespace with all models""" | ||||||
|         namespace = { |         namespace = {} | ||||||
|             "pprint": pprint, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         # Gather Django models and constants from each app |         # Gather Django models and constants from each app | ||||||
|         for app in apps.get_app_configs(): |         for app in apps.get_app_configs(): | ||||||
|  | |||||||
| @ -5,7 +5,7 @@ from contextvars import ContextVar | |||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from django.http import HttpRequest, HttpResponse | from django.http import HttpRequest, HttpResponse | ||||||
| from django.utils.translation import override | from django.utils.translation import activate | ||||||
| from sentry_sdk.api import set_tag | from sentry_sdk.api import set_tag | ||||||
| from structlog.contextvars import STRUCTLOG_KEY_PREFIX | from structlog.contextvars import STRUCTLOG_KEY_PREFIX | ||||||
|  |  | ||||||
| @ -31,18 +31,16 @@ class ImpersonateMiddleware: | |||||||
|     def __call__(self, request: HttpRequest) -> HttpResponse: |     def __call__(self, request: HttpRequest) -> HttpResponse: | ||||||
|         # No permission checks are done here, they need to be checked before |         # No permission checks are done here, they need to be checked before | ||||||
|         # SESSION_KEY_IMPERSONATE_USER is set. |         # SESSION_KEY_IMPERSONATE_USER is set. | ||||||
|         locale_to_set = None |  | ||||||
|         if request.user.is_authenticated: |         if request.user.is_authenticated: | ||||||
|             locale = request.user.locale(request) |             locale = request.user.locale(request) | ||||||
|             if locale != "": |             if locale != "": | ||||||
|                 locale_to_set = locale |                 activate(locale) | ||||||
|  |  | ||||||
|         if SESSION_KEY_IMPERSONATE_USER in request.session: |         if SESSION_KEY_IMPERSONATE_USER in request.session: | ||||||
|             request.user = request.session[SESSION_KEY_IMPERSONATE_USER] |             request.user = request.session[SESSION_KEY_IMPERSONATE_USER] | ||||||
|             # Ensure that the user is active, otherwise nothing will work |             # Ensure that the user is active, otherwise nothing will work | ||||||
|             request.user.is_active = True |             request.user.is_active = True | ||||||
|  |  | ||||||
|         with override(locale_to_set): |  | ||||||
|         return self.get_response(request) |         return self.get_response(request) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -7,13 +7,11 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor | |||||||
|  |  | ||||||
|  |  | ||||||
| def backport_is_backchannel(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | def backport_is_backchannel(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||||
|     db_alias = schema_editor.connection.alias |     from authentik.core.models import BackchannelProvider | ||||||
|     from authentik.providers.ldap.models import LDAPProvider |  | ||||||
|     from authentik.providers.scim.models import SCIMProvider |  | ||||||
|  |  | ||||||
|     for model in [LDAPProvider, SCIMProvider]: |     for model in BackchannelProvider.__subclasses__(): | ||||||
|         try: |         try: | ||||||
|             for obj in model.objects.using(db_alias).only("is_backchannel"): |             for obj in model.objects.only("is_backchannel"): | ||||||
|                 obj.is_backchannel = True |                 obj.is_backchannel = True | ||||||
|                 obj.save() |                 obj.save() | ||||||
|         except (DatabaseError, InternalError, ProgrammingError): |         except (DatabaseError, InternalError, ProgrammingError): | ||||||
|  | |||||||
| @ -1,43 +0,0 @@ | |||||||
| # Generated by Django 5.0.2 on 2024-02-29 11:05 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0035_alter_group_options_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="source", |  | ||||||
|             name="group_property_mappings", |  | ||||||
|             field=models.ManyToManyField( |  | ||||||
|                 blank=True, |  | ||||||
|                 default=None, |  | ||||||
|                 related_name="source_grouppropertymappings_set", |  | ||||||
|                 to="authentik_core.propertymapping", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="source", |  | ||||||
|             name="user_property_mappings", |  | ||||||
|             field=models.ManyToManyField( |  | ||||||
|                 blank=True, |  | ||||||
|                 default=None, |  | ||||||
|                 related_name="source_userpropertymappings_set", |  | ||||||
|                 to="authentik_core.propertymapping", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="source", |  | ||||||
|             name="property_mappings", |  | ||||||
|             field=models.ManyToManyField( |  | ||||||
|                 blank=True, |  | ||||||
|                 default=None, |  | ||||||
|                 related_name="source_set", |  | ||||||
|                 to="authentik_core.propertymapping", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,18 +0,0 @@ | |||||||
| # Generated by Django 5.0.2 on 2024-02-29 11:21 |  | ||||||
|  |  | ||||||
| from django.db import migrations |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_sources_ldap", "0005_remove_ldappropertymapping_object_field_and_more"), |  | ||||||
|         ("authentik_core", "0036_source_group_property_mappings_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.RemoveField( |  | ||||||
|             model_name="source", |  | ||||||
|             name="property_mappings", |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,19 +0,0 @@ | |||||||
| # Generated by Django 5.0.7 on 2024-07-22 13:32 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0037_remove_source_property_mappings"), |  | ||||||
|         ("authentik_flows", "0027_auto_20231028_1424"), |  | ||||||
|         ("authentik_policies", "0011_policybinding_failure_result_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="source", |  | ||||||
|             index=models.Index(fields=["enabled"], name="authentik_c_enabled_d72365_idx"), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,67 +0,0 @@ | |||||||
| # Generated by Django 5.0.7 on 2024-08-01 18:52 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0038_source_authentik_c_enabled_d72365_idx"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="source", |  | ||||||
|             name="group_matching_mode", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 choices=[ |  | ||||||
|                     ("identifier", "Use the source-specific identifier"), |  | ||||||
|                     ( |  | ||||||
|                         "name_link", |  | ||||||
|                         "Link to a group with identical name. Can have security implications when a group name is used with another source.", |  | ||||||
|                     ), |  | ||||||
|                     ( |  | ||||||
|                         "name_deny", |  | ||||||
|                         "Use the group name, but deny enrollment when the name already exists.", |  | ||||||
|                     ), |  | ||||||
|                 ], |  | ||||||
|                 default="identifier", |  | ||||||
|                 help_text="How the source determines if an existing group should be used or a new group created.", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="group", |  | ||||||
|             name="name", |  | ||||||
|             field=models.TextField(verbose_name="name"), |  | ||||||
|         ), |  | ||||||
|         migrations.CreateModel( |  | ||||||
|             name="GroupSourceConnection", |  | ||||||
|             fields=[ |  | ||||||
|                 ( |  | ||||||
|                     "id", |  | ||||||
|                     models.AutoField( |  | ||||||
|                         auto_created=True, primary_key=True, serialize=False, verbose_name="ID" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("created", models.DateTimeField(auto_now_add=True)), |  | ||||||
|                 ("last_updated", models.DateTimeField(auto_now=True)), |  | ||||||
|                 ("identifier", models.TextField()), |  | ||||||
|                 ( |  | ||||||
|                     "group", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, to="authentik_core.group" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "source", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, to="authentik_core.source" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             options={ |  | ||||||
|                 "unique_together": {("group", "source")}, |  | ||||||
|             }, |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,55 +0,0 @@ | |||||||
| # Generated by Django 5.0.9 on 2024-10-02 11:35 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
| from django.apps.registry import Apps |  | ||||||
| from django.db import migrations, models |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_invalidation_flow_default(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): |  | ||||||
|     from authentik.flows.models import FlowDesignation, FlowAuthenticationRequirement |  | ||||||
|  |  | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|  |  | ||||||
|     Flow = apps.get_model("authentik_flows", "Flow") |  | ||||||
|     Provider = apps.get_model("authentik_core", "Provider") |  | ||||||
|  |  | ||||||
|     # So this flow is managed via a blueprint, bue we're in a migration so we don't want to rely on that |  | ||||||
|     # since the blueprint is just an empty flow we can just create it here |  | ||||||
|     # and let it be managed by the blueprint later |  | ||||||
|     flow, _ = Flow.objects.using(db_alias).update_or_create( |  | ||||||
|         slug="default-provider-invalidation-flow", |  | ||||||
|         defaults={ |  | ||||||
|             "name": "Logged out of application", |  | ||||||
|             "title": "You've logged out of %(app)s.", |  | ||||||
|             "authentication": FlowAuthenticationRequirement.NONE, |  | ||||||
|             "designation": FlowDesignation.INVALIDATION, |  | ||||||
|         }, |  | ||||||
|     ) |  | ||||||
|     Provider.objects.using(db_alias).filter(invalidation_flow=None).update(invalidation_flow=flow) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_core", "0039_source_group_matching_mode_alter_group_name_and_more"), |  | ||||||
|         ("authentik_flows", "0027_auto_20231028_1424"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="provider", |  | ||||||
|             name="invalidation_flow", |  | ||||||
|             field=models.ForeignKey( |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="Flow used ending the session from a provider.", |  | ||||||
|                 null=True, |  | ||||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, |  | ||||||
|                 related_name="provider_invalidation", |  | ||||||
|                 to="authentik_flows.flow", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython(migrate_invalidation_flow_default), |  | ||||||
|     ] |  | ||||||
| @ -1,6 +1,6 @@ | |||||||
| """authentik core models""" | """authentik core models""" | ||||||
|  |  | ||||||
| from datetime import datetime | from datetime import datetime, timedelta | ||||||
| from hashlib import sha256 | from hashlib import sha256 | ||||||
| from typing import Any, Optional, Self | from typing import Any, Optional, Self | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
| @ -11,12 +11,10 @@ from django.contrib.auth.models import AbstractUser | |||||||
| from django.contrib.auth.models import UserManager as DjangoUserManager | from django.contrib.auth.models import UserManager as DjangoUserManager | ||||||
| from django.db import models | from django.db import models | ||||||
| from django.db.models import Q, QuerySet, options | from django.db.models import Q, QuerySet, options | ||||||
| from django.db.models.constants import LOOKUP_SEP |  | ||||||
| from django.http import HttpRequest | from django.http import HttpRequest | ||||||
| from django.utils.functional import SimpleLazyObject, cached_property | from django.utils.functional import SimpleLazyObject, cached_property | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from django_cte import CTEQuerySet, With |  | ||||||
| from guardian.conf import settings | from guardian.conf import settings | ||||||
| from guardian.mixins import GuardianUserMixin | from guardian.mixins import GuardianUserMixin | ||||||
| from model_utils.managers import InheritanceManager | from model_utils.managers import InheritanceManager | ||||||
| @ -24,12 +22,10 @@ from rest_framework.serializers import Serializer | |||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.blueprints.models import ManagedModel | from authentik.blueprints.models import ManagedModel | ||||||
| from authentik.core.expression.exceptions import PropertyMappingExpressionException | from authentik.core.exceptions import PropertyMappingExpressionException | ||||||
| from authentik.core.types import UILoginButton, UserSettingSerializer | from authentik.core.types import UILoginButton, UserSettingSerializer | ||||||
| from authentik.lib.avatars import get_avatar | from authentik.lib.avatars import get_avatar | ||||||
| from authentik.lib.expression.exceptions import ControlFlowException |  | ||||||
| from authentik.lib.generators import generate_id | from authentik.lib.generators import generate_id | ||||||
| from authentik.lib.merge import MERGE_LIST_UNIQUE |  | ||||||
| from authentik.lib.models import ( | from authentik.lib.models import ( | ||||||
|     CreatedUpdatedModel, |     CreatedUpdatedModel, | ||||||
|     DomainlessFormattedURLValidator, |     DomainlessFormattedURLValidator, | ||||||
| @ -58,10 +54,11 @@ options.DEFAULT_NAMES = options.DEFAULT_NAMES + ( | |||||||
|     # used_by API that allows models to specify if they shadow an object |     # used_by API that allows models to specify if they shadow an object | ||||||
|     # for example the proxy provider which is built on top of an oauth provider |     # for example the proxy provider which is built on top of an oauth provider | ||||||
|     "authentik_used_by_shadows", |     "authentik_used_by_shadows", | ||||||
|  |     # List fields for which changes are not logged (due to them having dedicated objects) | ||||||
|  |     # for example user's password and last_login | ||||||
|  |     "authentik_signals_ignored_fields", | ||||||
| ) | ) | ||||||
|  |  | ||||||
| GROUP_RECURSION_LIMIT = 20 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def default_token_duration() -> datetime: | def default_token_duration() -> datetime: | ||||||
|     """Default duration a Token is valid""" |     """Default duration a Token is valid""" | ||||||
| @ -74,6 +71,11 @@ def default_token_duration() -> datetime: | |||||||
|     return now() + timedelta_from_string(token_duration) |     return now() + timedelta_from_string(token_duration) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def token_expires_from_timedelta(dt: timedelta) -> datetime: | ||||||
|  |     """Return a `datetime.datetime` object with the duration of the Token""" | ||||||
|  |     return now() + dt | ||||||
|  |  | ||||||
|  |  | ||||||
| def default_token_key() -> str: | def default_token_key() -> str: | ||||||
|     """Default token key""" |     """Default token key""" | ||||||
|     current_tenant = get_current_tenant() |     current_tenant = get_current_tenant() | ||||||
| @ -102,78 +104,12 @@ class UserTypes(models.TextChoices): | |||||||
|     INTERNAL_SERVICE_ACCOUNT = "internal_service_account" |     INTERNAL_SERVICE_ACCOUNT = "internal_service_account" | ||||||
|  |  | ||||||
|  |  | ||||||
| class AttributesMixin(models.Model): | class Group(SerializerModel): | ||||||
|     """Adds an attributes property to a model""" |  | ||||||
|  |  | ||||||
|     attributes = models.JSONField(default=dict, blank=True) |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         abstract = True |  | ||||||
|  |  | ||||||
|     def update_attributes(self, properties: dict[str, Any]): |  | ||||||
|         """Update fields and attributes, but correctly by merging dicts""" |  | ||||||
|         for key, value in properties.items(): |  | ||||||
|             if key == "attributes": |  | ||||||
|                 continue |  | ||||||
|             setattr(self, key, value) |  | ||||||
|         final_attributes = {} |  | ||||||
|         MERGE_LIST_UNIQUE.merge(final_attributes, self.attributes) |  | ||||||
|         MERGE_LIST_UNIQUE.merge(final_attributes, properties.get("attributes", {})) |  | ||||||
|         self.attributes = final_attributes |  | ||||||
|         self.save() |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def update_or_create_attributes( |  | ||||||
|         cls, query: dict[str, Any], properties: dict[str, Any] |  | ||||||
|     ) -> tuple[models.Model, bool]: |  | ||||||
|         """Same as django's update_or_create but correctly updates attributes by merging dicts""" |  | ||||||
|         instance = cls.objects.filter(**query).first() |  | ||||||
|         if not instance: |  | ||||||
|             return cls.objects.create(**properties), True |  | ||||||
|         instance.update_attributes(properties) |  | ||||||
|         return instance, False |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupQuerySet(CTEQuerySet): |  | ||||||
|     def with_children_recursive(self): |  | ||||||
|         """Recursively get all groups that have the current queryset as parents |  | ||||||
|         or are indirectly related.""" |  | ||||||
|  |  | ||||||
|         def make_cte(cte): |  | ||||||
|             """Build the query that ends up in WITH RECURSIVE""" |  | ||||||
|             # Start from self, aka the current query |  | ||||||
|             # Add a depth attribute to limit the recursion |  | ||||||
|             return self.annotate( |  | ||||||
|                 relative_depth=models.Value(0, output_field=models.IntegerField()) |  | ||||||
|             ).union( |  | ||||||
|                 # Here is the recursive part of the query. cte refers to the previous iteration |  | ||||||
|                 # Only select groups for which the parent is part of the previous iteration |  | ||||||
|                 # and increase the depth |  | ||||||
|                 # Finally, limit the depth |  | ||||||
|                 cte.join(Group, group_uuid=cte.col.parent_id) |  | ||||||
|                 .annotate( |  | ||||||
|                     relative_depth=models.ExpressionWrapper( |  | ||||||
|                         cte.col.relative_depth |  | ||||||
|                         + models.Value(1, output_field=models.IntegerField()), |  | ||||||
|                         output_field=models.IntegerField(), |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
|                 .filter(relative_depth__lt=GROUP_RECURSION_LIMIT), |  | ||||||
|                 all=True, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         # Build the recursive query, see above |  | ||||||
|         cte = With.recursive(make_cte) |  | ||||||
|         # Return the result, as a usable queryset for Group. |  | ||||||
|         return cte.join(Group, group_uuid=cte.col.group_uuid).with_cte(cte) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Group(SerializerModel, AttributesMixin): |  | ||||||
|     """Group model which supports a basic hierarchy and has attributes""" |     """Group model which supports a basic hierarchy and has attributes""" | ||||||
|  |  | ||||||
|     group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4) |     group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4) | ||||||
|  |  | ||||||
|     name = models.TextField(_("name")) |     name = models.CharField(_("name"), max_length=80) | ||||||
|     is_superuser = models.BooleanField( |     is_superuser = models.BooleanField( | ||||||
|         default=False, help_text=_("Users added to this group will be superusers.") |         default=False, help_text=_("Users added to this group will be superusers.") | ||||||
|     ) |     ) | ||||||
| @ -188,26 +124,7 @@ class Group(SerializerModel, AttributesMixin): | |||||||
|         on_delete=models.SET_NULL, |         on_delete=models.SET_NULL, | ||||||
|         related_name="children", |         related_name="children", | ||||||
|     ) |     ) | ||||||
|  |     attributes = models.JSONField(default=dict, blank=True) | ||||||
|     objects = GroupQuerySet.as_manager() |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         unique_together = ( |  | ||||||
|             ( |  | ||||||
|                 "name", |  | ||||||
|                 "parent", |  | ||||||
|             ), |  | ||||||
|         ) |  | ||||||
|         indexes = [models.Index(fields=["name"])] |  | ||||||
|         verbose_name = _("Group") |  | ||||||
|         verbose_name_plural = _("Groups") |  | ||||||
|         permissions = [ |  | ||||||
|             ("add_user_to_group", _("Add user to group")), |  | ||||||
|             ("remove_user_from_group", _("Remove user from group")), |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|     def __str__(self): |  | ||||||
|         return f"Group {self.name}" |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> Serializer: |     def serializer(self) -> Serializer: | ||||||
| @ -227,11 +144,54 @@ class Group(SerializerModel, AttributesMixin): | |||||||
|         return user.all_groups().filter(group_uuid=self.group_uuid).exists() |         return user.all_groups().filter(group_uuid=self.group_uuid).exists() | ||||||
|  |  | ||||||
|     def children_recursive(self: Self | QuerySet["Group"]) -> QuerySet["Group"]: |     def children_recursive(self: Self | QuerySet["Group"]) -> QuerySet["Group"]: | ||||||
|         """Compatibility layer for Group.objects.with_children_recursive()""" |         """Recursively get all groups that have this as parent or are indirectly related""" | ||||||
|         qs = self |         direct_groups = [] | ||||||
|         if not isinstance(self, QuerySet): |         if isinstance(self, QuerySet): | ||||||
|             qs = Group.objects.filter(group_uuid=self.group_uuid) |             direct_groups = list(x for x in self.all().values_list("pk", flat=True).iterator()) | ||||||
|         return qs.with_children_recursive() |         else: | ||||||
|  |             direct_groups = [self.pk] | ||||||
|  |         if len(direct_groups) < 1: | ||||||
|  |             return Group.objects.none() | ||||||
|  |         query = """ | ||||||
|  |         WITH RECURSIVE parents AS ( | ||||||
|  |             SELECT authentik_core_group.*, 0 AS relative_depth | ||||||
|  |             FROM authentik_core_group | ||||||
|  |             WHERE authentik_core_group.group_uuid = ANY(%s) | ||||||
|  |  | ||||||
|  |             UNION ALL | ||||||
|  |  | ||||||
|  |             SELECT authentik_core_group.*, parents.relative_depth + 1 | ||||||
|  |             FROM authentik_core_group, parents | ||||||
|  |             WHERE ( | ||||||
|  |                 authentik_core_group.group_uuid = parents.parent_id and | ||||||
|  |                 parents.relative_depth < 20 | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
|  |         SELECT group_uuid | ||||||
|  |         FROM parents | ||||||
|  |         GROUP BY group_uuid, name | ||||||
|  |         ORDER BY name; | ||||||
|  |         """ | ||||||
|  |         group_pks = [group.pk for group in Group.objects.raw(query, [direct_groups]).iterator()] | ||||||
|  |         return Group.objects.filter(pk__in=group_pks) | ||||||
|  |  | ||||||
|  |     def __str__(self): | ||||||
|  |         return f"Group {self.name}" | ||||||
|  |  | ||||||
|  |     class Meta: | ||||||
|  |         unique_together = ( | ||||||
|  |             ( | ||||||
|  |                 "name", | ||||||
|  |                 "parent", | ||||||
|  |             ), | ||||||
|  |         ) | ||||||
|  |         indexes = [models.Index(fields=["name"])] | ||||||
|  |         verbose_name = _("Group") | ||||||
|  |         verbose_name_plural = _("Groups") | ||||||
|  |         permissions = [ | ||||||
|  |             ("add_user_to_group", _("Add user to group")), | ||||||
|  |             ("remove_user_from_group", _("Remove user from group")), | ||||||
|  |         ] | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserQuerySet(models.QuerySet): | class UserQuerySet(models.QuerySet): | ||||||
| @ -258,7 +218,7 @@ class UserManager(DjangoUserManager): | |||||||
|         return self.get_queryset().exclude_anonymous() |         return self.get_queryset().exclude_anonymous() | ||||||
|  |  | ||||||
|  |  | ||||||
| class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser): | class User(SerializerModel, GuardianUserMixin, AbstractUser): | ||||||
|     """authentik User model, based on django's contrib auth user model.""" |     """authentik User model, based on django's contrib auth user model.""" | ||||||
|  |  | ||||||
|     uuid = models.UUIDField(default=uuid4, editable=False, unique=True) |     uuid = models.UUIDField(default=uuid4, editable=False, unique=True) | ||||||
| @ -270,38 +230,20 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser): | |||||||
|     ak_groups = models.ManyToManyField("Group", related_name="users") |     ak_groups = models.ManyToManyField("Group", related_name="users") | ||||||
|     password_change_date = models.DateTimeField(auto_now_add=True) |     password_change_date = models.DateTimeField(auto_now_add=True) | ||||||
|  |  | ||||||
|  |     attributes = models.JSONField(default=dict, blank=True) | ||||||
|  |  | ||||||
|     objects = UserManager() |     objects = UserManager() | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _("User") |  | ||||||
|         verbose_name_plural = _("Users") |  | ||||||
|         permissions = [ |  | ||||||
|             ("reset_user_password", _("Reset Password")), |  | ||||||
|             ("impersonate", _("Can impersonate other users")), |  | ||||||
|             ("assign_user_permissions", _("Can assign permissions to users")), |  | ||||||
|             ("unassign_user_permissions", _("Can unassign permissions from users")), |  | ||||||
|             ("preview_user", _("Can preview user data sent to providers")), |  | ||||||
|             ("view_user_applications", _("View applications the user has access to")), |  | ||||||
|         ] |  | ||||||
|         indexes = [ |  | ||||||
|             models.Index(fields=["last_login"]), |  | ||||||
|             models.Index(fields=["password_change_date"]), |  | ||||||
|             models.Index(fields=["uuid"]), |  | ||||||
|             models.Index(fields=["path"]), |  | ||||||
|             models.Index(fields=["type"]), |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|     def __str__(self): |  | ||||||
|         return self.username |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def default_path() -> str: |     def default_path() -> str: | ||||||
|         """Get the default user path""" |         """Get the default user path""" | ||||||
|         return User._meta.get_field("path").default |         return User._meta.get_field("path").default | ||||||
|  |  | ||||||
|     def all_groups(self) -> QuerySet[Group]: |     def all_groups(self) -> QuerySet[Group]: | ||||||
|         """Recursively get all groups this user is a member of.""" |         """Recursively get all groups this user is a member of. | ||||||
|         return self.ak_groups.all().with_children_recursive() |         At least one query is done to get the direct groups of the user, with groups | ||||||
|  |         there are at most 3 queries done""" | ||||||
|  |         return Group.children_recursive(self.ak_groups.all()) | ||||||
|  |  | ||||||
|     def group_attributes(self, request: HttpRequest | None = None) -> dict[str, Any]: |     def group_attributes(self, request: HttpRequest | None = None) -> dict[str, Any]: | ||||||
|         """Get a dictionary containing the attributes from all groups the user belongs to, |         """Get a dictionary containing the attributes from all groups the user belongs to, | ||||||
| @ -330,13 +272,11 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser): | |||||||
|         """superuser == staff user""" |         """superuser == staff user""" | ||||||
|         return self.is_superuser  # type: ignore |         return self.is_superuser  # type: ignore | ||||||
|  |  | ||||||
|     def set_password(self, raw_password, signal=True, sender=None): |     def set_password(self, raw_password, signal=True): | ||||||
|         if self.pk and signal: |         if self.pk and signal: | ||||||
|             from authentik.core.signals import password_changed |             from authentik.core.signals import password_changed | ||||||
|  |  | ||||||
|             if not sender: |             password_changed.send(sender=self, user=self, password=raw_password) | ||||||
|                 sender = self |  | ||||||
|             password_changed.send(sender=sender, user=self, password=raw_password) |  | ||||||
|         self.password_change_date = now() |         self.password_change_date = now() | ||||||
|         return super().set_password(raw_password) |         return super().set_password(raw_password) | ||||||
|  |  | ||||||
| @ -377,6 +317,33 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser): | |||||||
|         """Get avatar, depending on authentik.avatar setting""" |         """Get avatar, depending on authentik.avatar setting""" | ||||||
|         return get_avatar(self) |         return get_avatar(self) | ||||||
|  |  | ||||||
|  |     class Meta: | ||||||
|  |         verbose_name = _("User") | ||||||
|  |         verbose_name_plural = _("Users") | ||||||
|  |         permissions = [ | ||||||
|  |             ("reset_user_password", _("Reset Password")), | ||||||
|  |             ("impersonate", _("Can impersonate other users")), | ||||||
|  |             ("assign_user_permissions", _("Can assign permissions to users")), | ||||||
|  |             ("unassign_user_permissions", _("Can unassign permissions from users")), | ||||||
|  |             ("preview_user", _("Can preview user data sent to providers")), | ||||||
|  |             ("view_user_applications", _("View applications the user has access to")), | ||||||
|  |         ] | ||||||
|  |         indexes = [ | ||||||
|  |             models.Index(fields=["last_login"]), | ||||||
|  |             models.Index(fields=["password_change_date"]), | ||||||
|  |             models.Index(fields=["uuid"]), | ||||||
|  |             models.Index(fields=["path"]), | ||||||
|  |             models.Index(fields=["type"]), | ||||||
|  |         ] | ||||||
|  |         authentik_signals_ignored_fields = [ | ||||||
|  |             # Logged by the events `password_set` | ||||||
|  |             # the `password_set` action/signal doesn't currently convey which user | ||||||
|  |             # initiated the password change, so for now we'll log two actions | ||||||
|  |             # ("password", "password_change_date"), | ||||||
|  |             # Logged by `login` | ||||||
|  |             ("last_login",), | ||||||
|  |         ] | ||||||
|  |  | ||||||
|  |  | ||||||
| class Provider(SerializerModel): | class Provider(SerializerModel): | ||||||
|     """Application-independent Provider instance. For example SAML2 Remote, OAuth2 Application""" |     """Application-independent Provider instance. For example SAML2 Remote, OAuth2 Application""" | ||||||
| @ -393,23 +360,14 @@ class Provider(SerializerModel): | |||||||
|         ), |         ), | ||||||
|         related_name="provider_authentication", |         related_name="provider_authentication", | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     authorization_flow = models.ForeignKey( |     authorization_flow = models.ForeignKey( | ||||||
|         "authentik_flows.Flow", |         "authentik_flows.Flow", | ||||||
|         # Set to cascade even though null is allowed, since most providers |  | ||||||
|         # still require an authorization flow set |  | ||||||
|         on_delete=models.CASCADE, |         on_delete=models.CASCADE, | ||||||
|         null=True, |         null=True, | ||||||
|         help_text=_("Flow used when authorizing this provider."), |         help_text=_("Flow used when authorizing this provider."), | ||||||
|         related_name="provider_authorization", |         related_name="provider_authorization", | ||||||
|     ) |     ) | ||||||
|     invalidation_flow = models.ForeignKey( |  | ||||||
|         "authentik_flows.Flow", |  | ||||||
|         on_delete=models.SET_DEFAULT, |  | ||||||
|         default=None, |  | ||||||
|         null=True, |  | ||||||
|         help_text=_("Flow used ending the session from a provider."), |  | ||||||
|         related_name="provider_invalidation", |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True) |     property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True) | ||||||
|  |  | ||||||
| @ -435,10 +393,6 @@ class Provider(SerializerModel): | |||||||
|         Can return None for providers that are not URL-based""" |         Can return None for providers that are not URL-based""" | ||||||
|         return None |         return None | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def icon_url(self) -> str | None: |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def component(self) -> str: |     def component(self) -> str: | ||||||
|         """Return component used to edit this object""" |         """Return component used to edit this object""" | ||||||
| @ -473,14 +427,6 @@ class BackchannelProvider(Provider): | |||||||
|         abstract = True |         abstract = True | ||||||
|  |  | ||||||
|  |  | ||||||
| class ApplicationQuerySet(QuerySet): |  | ||||||
|     def with_provider(self) -> "QuerySet[Application]": |  | ||||||
|         qs = self.select_related("provider") |  | ||||||
|         for subclass in Provider.objects.get_queryset()._get_subclasses_recurse(Provider): |  | ||||||
|             qs = qs.select_related(f"provider__{subclass}") |  | ||||||
|         return qs |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Application(SerializerModel, PolicyBindingModel): | class Application(SerializerModel, PolicyBindingModel): | ||||||
|     """Every Application which uses authentik for authentication/identification/authorization |     """Every Application which uses authentik for authentication/identification/authorization | ||||||
|     needs an Application record. Other authentication types can subclass this Model to |     needs an Application record. Other authentication types can subclass this Model to | ||||||
| @ -512,8 +458,6 @@ class Application(SerializerModel, PolicyBindingModel): | |||||||
|     meta_description = models.TextField(default="", blank=True) |     meta_description = models.TextField(default="", blank=True) | ||||||
|     meta_publisher = models.TextField(default="", blank=True) |     meta_publisher = models.TextField(default="", blank=True) | ||||||
|  |  | ||||||
|     objects = ApplicationQuerySet.as_manager() |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> Serializer: |     def serializer(self) -> Serializer: | ||||||
|         from authentik.core.api.applications import ApplicationSerializer |         from authentik.core.api.applications import ApplicationSerializer | ||||||
| @ -550,28 +494,16 @@ class Application(SerializerModel, PolicyBindingModel): | |||||||
|         return url |         return url | ||||||
|  |  | ||||||
|     def get_provider(self) -> Provider | None: |     def get_provider(self) -> Provider | None: | ||||||
|         """Get casted provider instance. Needs Application queryset with_provider""" |         """Get casted provider instance""" | ||||||
|         if not self.provider: |         if not self.provider: | ||||||
|             return None |             return None | ||||||
|  |         # if the Application class has been cache, self.provider is set | ||||||
|         candidates = [] |         # but doing a direct query lookup will fail. | ||||||
|         base_class = Provider |         # In that case, just return None | ||||||
|         for subclass in base_class.objects.get_queryset()._get_subclasses_recurse(base_class): |  | ||||||
|             parent = self.provider |  | ||||||
|             for level in subclass.split(LOOKUP_SEP): |  | ||||||
|         try: |         try: | ||||||
|                     parent = getattr(parent, level) |             return Provider.objects.get_subclass(pk=self.provider.pk) | ||||||
|                 except AttributeError: |         except Provider.DoesNotExist: | ||||||
|                     break |  | ||||||
|             if parent in candidates: |  | ||||||
|                 continue |  | ||||||
|             idx = subclass.count(LOOKUP_SEP) |  | ||||||
|             if type(parent) is not base_class: |  | ||||||
|                 idx += 1 |  | ||||||
|             candidates.insert(idx, parent) |  | ||||||
|         if not candidates: |  | ||||||
|             return None |             return None | ||||||
|         return candidates[-1] |  | ||||||
|  |  | ||||||
|     def __str__(self): |     def __str__(self): | ||||||
|         return str(self.name) |         return str(self.name) | ||||||
| @ -601,19 +533,6 @@ class SourceUserMatchingModes(models.TextChoices): | |||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class SourceGroupMatchingModes(models.TextChoices): |  | ||||||
|     """Different modes a source can handle new/returning groups""" |  | ||||||
|  |  | ||||||
|     IDENTIFIER = "identifier", _("Use the source-specific identifier") |  | ||||||
|     NAME_LINK = "name_link", _( |  | ||||||
|         "Link to a group with identical name. Can have security implications " |  | ||||||
|         "when a group name is used with another source." |  | ||||||
|     ) |  | ||||||
|     NAME_DENY = "name_deny", _( |  | ||||||
|         "Use the group name, but deny enrollment when the name already exists." |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Source(ManagedModel, SerializerModel, PolicyBindingModel): | class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||||
|     """Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server""" |     """Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server""" | ||||||
|  |  | ||||||
| @ -623,12 +542,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|     user_path_template = models.TextField(default="goauthentik.io/sources/%(slug)s") |     user_path_template = models.TextField(default="goauthentik.io/sources/%(slug)s") | ||||||
|  |  | ||||||
|     enabled = models.BooleanField(default=True) |     enabled = models.BooleanField(default=True) | ||||||
|     user_property_mappings = models.ManyToManyField( |     property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True) | ||||||
|         "PropertyMapping", default=None, blank=True, related_name="source_userpropertymappings_set" |  | ||||||
|     ) |  | ||||||
|     group_property_mappings = models.ManyToManyField( |  | ||||||
|         "PropertyMapping", default=None, blank=True, related_name="source_grouppropertymappings_set" |  | ||||||
|     ) |  | ||||||
|     icon = models.FileField( |     icon = models.FileField( | ||||||
|         upload_to="source-icons/", |         upload_to="source-icons/", | ||||||
|         default=None, |         default=None, | ||||||
| @ -663,14 +577,6 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|             "a new user enrolled." |             "a new user enrolled." | ||||||
|         ), |         ), | ||||||
|     ) |     ) | ||||||
|     group_matching_mode = models.TextField( |  | ||||||
|         choices=SourceGroupMatchingModes.choices, |  | ||||||
|         default=SourceGroupMatchingModes.IDENTIFIER, |  | ||||||
|         help_text=_( |  | ||||||
|             "How the source determines if an existing group should be used or " |  | ||||||
|             "a new group created." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     objects = InheritanceManager() |     objects = InheritanceManager() | ||||||
|  |  | ||||||
| @ -700,11 +606,6 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|         """Return component used to edit this object""" |         """Return component used to edit this object""" | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def property_mapping_type(self) -> "type[PropertyMapping]": |  | ||||||
|         """Return property mapping type used by this object""" |  | ||||||
|         raise NotImplementedError |  | ||||||
|  |  | ||||||
|     def ui_login_button(self, request: HttpRequest) -> UILoginButton | None: |     def ui_login_button(self, request: HttpRequest) -> UILoginButton | None: | ||||||
|         """If source uses a http-based flow, return UI Information about the login |         """If source uses a http-based flow, return UI Information about the login | ||||||
|         button. If source doesn't use http-based flow, return None.""" |         button. If source doesn't use http-based flow, return None.""" | ||||||
| @ -715,14 +616,6 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|         user settings are available, or UserSettingSerializer.""" |         user settings are available, or UserSettingSerializer.""" | ||||||
|         return None |         return None | ||||||
|  |  | ||||||
|     def get_base_user_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: |  | ||||||
|         """Get base properties for a user to build final properties upon.""" |  | ||||||
|         raise NotImplementedError |  | ||||||
|  |  | ||||||
|     def get_base_group_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: |  | ||||||
|         """Get base properties for a group to build final properties upon.""" |  | ||||||
|         raise NotImplementedError |  | ||||||
|  |  | ||||||
|     def __str__(self): |     def __str__(self): | ||||||
|         return str(self.name) |         return str(self.name) | ||||||
|  |  | ||||||
| @ -738,11 +631,6 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|                     "name", |                     "name", | ||||||
|                 ] |                 ] | ||||||
|             ), |             ), | ||||||
|             models.Index( |  | ||||||
|                 fields=[ |  | ||||||
|                     "enabled", |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|         ] |         ] | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -760,33 +648,12 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | |||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return f"User-source connection (user={self.user_id}, source={self.source_id})" |         return f"User-source connection (user={self.user.username}, source={self.source.slug})" | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         unique_together = (("user", "source"),) |         unique_together = (("user", "source"),) | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupSourceConnection(SerializerModel, CreatedUpdatedModel): |  | ||||||
|     """Connection between Group and Source.""" |  | ||||||
|  |  | ||||||
|     group = models.ForeignKey(Group, on_delete=models.CASCADE) |  | ||||||
|     source = models.ForeignKey(Source, on_delete=models.CASCADE) |  | ||||||
|     identifier = models.TextField() |  | ||||||
|  |  | ||||||
|     objects = InheritanceManager() |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def serializer(self) -> type[Serializer]: |  | ||||||
|         """Get serializer for this model""" |  | ||||||
|         raise NotImplementedError |  | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |  | ||||||
|         return f"Group-source connection (group={self.group_id}, source={self.source_id})" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         unique_together = (("group", "source"),) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ExpiringModel(models.Model): | class ExpiringModel(models.Model): | ||||||
|     """Base Model which can expire, and is automatically cleaned up.""" |     """Base Model which can expire, and is automatically cleaned up.""" | ||||||
|  |  | ||||||
| @ -916,10 +783,8 @@ class PropertyMapping(SerializerModel, ManagedModel): | |||||||
|         evaluator = PropertyMappingEvaluator(self, user, request, **kwargs) |         evaluator = PropertyMappingEvaluator(self, user, request, **kwargs) | ||||||
|         try: |         try: | ||||||
|             return evaluator.evaluate(self.expression) |             return evaluator.evaluate(self.expression) | ||||||
|         except ControlFlowException as exc: |  | ||||||
|             raise exc |  | ||||||
|         except Exception as exc: |         except Exception as exc: | ||||||
|             raise PropertyMappingExpressionException(exc, self) from exc |             raise PropertyMappingExpressionException(exc) from exc | ||||||
|  |  | ||||||
|     def __str__(self): |     def __str__(self): | ||||||
|         return f"Property Mapping {self.name}" |         return f"Property Mapping {self.name}" | ||||||
|  | |||||||
| @ -52,8 +52,6 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_): | |||||||
| @receiver(user_logged_out) | @receiver(user_logged_out) | ||||||
| def user_logged_out_session(sender, request: HttpRequest, user: User, **_): | def user_logged_out_session(sender, request: HttpRequest, user: User, **_): | ||||||
|     """Delete AuthenticatedSession if it exists""" |     """Delete AuthenticatedSession if it exists""" | ||||||
|     if not request.session or not request.session.session_key: |  | ||||||
|         return |  | ||||||
|     AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete() |     AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,28 +1,19 @@ | |||||||
| """Source decision helper""" | """Source decision helper""" | ||||||
|  |  | ||||||
|  | from enum import Enum | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.contrib import messages | from django.contrib import messages | ||||||
| from django.db import IntegrityError, transaction | from django.db import IntegrityError | ||||||
|  | from django.db.models.query_utils import Q | ||||||
| from django.http import HttpRequest, HttpResponse | from django.http import HttpRequest, HttpResponse | ||||||
| from django.shortcuts import redirect | from django.shortcuts import redirect | ||||||
| from django.urls import reverse | from django.urls import reverse | ||||||
| from django.utils.translation import gettext as _ | from django.utils.translation import gettext as _ | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.core.models import ( | from authentik.core.models import Source, SourceUserMatchingModes, User, UserSourceConnection | ||||||
|     Group, | from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostUserEnrollmentStage | ||||||
|     GroupSourceConnection, |  | ||||||
|     Source, |  | ||||||
|     User, |  | ||||||
|     UserSourceConnection, |  | ||||||
| ) |  | ||||||
| from authentik.core.sources.mapper import SourceMapper |  | ||||||
| from authentik.core.sources.matcher import Action, SourceMatcher |  | ||||||
| from authentik.core.sources.stage import ( |  | ||||||
|     PLAN_CONTEXT_SOURCES_CONNECTION, |  | ||||||
|     PostSourceStage, |  | ||||||
| ) |  | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
| from authentik.flows.exceptions import FlowNonApplicableException | from authentik.flows.exceptions import FlowNonApplicableException | ||||||
| from authentik.flows.models import Flow, FlowToken, Stage, in_memory_stage | from authentik.flows.models import Flow, FlowToken, Stage, in_memory_stage | ||||||
| @ -45,10 +36,17 @@ from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND | |||||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||||
| from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH | from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH | ||||||
|  |  | ||||||
| LOGGER = get_logger() |  | ||||||
|  |  | ||||||
| SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | ||||||
| PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" |  | ||||||
|  |  | ||||||
|  | class Action(Enum): | ||||||
|  |     """Actions that can be decided based on the request | ||||||
|  |     and source settings""" | ||||||
|  |  | ||||||
|  |     LINK = "link" | ||||||
|  |     AUTH = "auth" | ||||||
|  |     ENROLL = "enroll" | ||||||
|  |     DENY = "deny" | ||||||
|  |  | ||||||
|  |  | ||||||
| class MessageStage(StageView): | class MessageStage(StageView): | ||||||
| @ -72,79 +70,99 @@ class SourceFlowManager: | |||||||
|     or deny the request.""" |     or deny the request.""" | ||||||
|  |  | ||||||
|     source: Source |     source: Source | ||||||
|     mapper: SourceMapper |  | ||||||
|     matcher: SourceMatcher |  | ||||||
|     request: HttpRequest |     request: HttpRequest | ||||||
|  |  | ||||||
|     identifier: str |     identifier: str | ||||||
|  |  | ||||||
|     user_connection_type: type[UserSourceConnection] = UserSourceConnection |     connection_type: type[UserSourceConnection] = UserSourceConnection | ||||||
|     group_connection_type: type[GroupSourceConnection] = GroupSourceConnection |  | ||||||
|  |  | ||||||
|     user_info: dict[str, Any] |     enroll_info: dict[str, Any] | ||||||
|     policy_context: dict[str, Any] |     policy_context: dict[str, Any] | ||||||
|     user_properties: dict[str, Any | dict[str, Any]] |  | ||||||
|     groups_properties: dict[str, dict[str, Any | dict[str, Any]]] |  | ||||||
|  |  | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         source: Source, |         source: Source, | ||||||
|         request: HttpRequest, |         request: HttpRequest, | ||||||
|         identifier: str, |         identifier: str, | ||||||
|         user_info: dict[str, Any], |         enroll_info: dict[str, Any], | ||||||
|         policy_context: dict[str, Any], |  | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         self.source = source |         self.source = source | ||||||
|         self.mapper = SourceMapper(self.source) |  | ||||||
|         self.matcher = SourceMatcher( |  | ||||||
|             self.source, self.user_connection_type, self.group_connection_type |  | ||||||
|         ) |  | ||||||
|         self.request = request |         self.request = request | ||||||
|         self.identifier = identifier |         self.identifier = identifier | ||||||
|         self.user_info = user_info |         self.enroll_info = enroll_info | ||||||
|         self._logger = get_logger().bind(source=source, identifier=identifier) |         self._logger = get_logger().bind(source=source, identifier=identifier) | ||||||
|         self.policy_context = policy_context |         self.policy_context = {} | ||||||
|  |  | ||||||
|         self.user_properties = self.mapper.build_object_properties( |  | ||||||
|             object_type=User, request=request, user=None, **self.user_info |  | ||||||
|         ) |  | ||||||
|         self.groups_properties = { |  | ||||||
|             group_id: self.mapper.build_object_properties( |  | ||||||
|                 object_type=Group, |  | ||||||
|                 request=request, |  | ||||||
|                 user=None, |  | ||||||
|                 group_id=group_id, |  | ||||||
|                 **self.user_info, |  | ||||||
|             ) |  | ||||||
|             for group_id in self.user_properties.setdefault("groups", []) |  | ||||||
|         } |  | ||||||
|         del self.user_properties["groups"] |  | ||||||
|  |  | ||||||
|     def get_action(self, **kwargs) -> tuple[Action, UserSourceConnection | None]:  # noqa: PLR0911 |     def get_action(self, **kwargs) -> tuple[Action, UserSourceConnection | None]:  # noqa: PLR0911 | ||||||
|         """decide which action should be taken""" |         """decide which action should be taken""" | ||||||
|  |         new_connection = self.connection_type(source=self.source, identifier=self.identifier) | ||||||
|         # When request is authenticated, always link |         # When request is authenticated, always link | ||||||
|         if self.request.user.is_authenticated: |         if self.request.user.is_authenticated: | ||||||
|             new_connection = self.user_connection_type( |  | ||||||
|                 source=self.source, identifier=self.identifier |  | ||||||
|             ) |  | ||||||
|             new_connection.user = self.request.user |             new_connection.user = self.request.user | ||||||
|             new_connection = self.update_user_connection(new_connection, **kwargs) |             new_connection = self.update_connection(new_connection, **kwargs) | ||||||
|             if existing := self.user_connection_type.objects.filter( |  | ||||||
|                 source=self.source, identifier=self.identifier |             new_connection.save() | ||||||
|             ).first(): |  | ||||||
|                 existing = self.update_user_connection(existing) |  | ||||||
|                 return Action.AUTH, existing |  | ||||||
|             return Action.LINK, new_connection |             return Action.LINK, new_connection | ||||||
|  |  | ||||||
|         action, connection = self.matcher.get_user_action(self.identifier, self.user_properties) |         existing_connections = self.connection_type.objects.filter( | ||||||
|         if connection: |             source=self.source, identifier=self.identifier | ||||||
|             connection = self.update_user_connection(connection, **kwargs) |         ) | ||||||
|         return action, connection |         if existing_connections.exists(): | ||||||
|  |             connection = existing_connections.first() | ||||||
|  |             return Action.AUTH, self.update_connection(connection, **kwargs) | ||||||
|  |         # No connection exists, but we match on identifier, so enroll | ||||||
|  |         if self.source.user_matching_mode == SourceUserMatchingModes.IDENTIFIER: | ||||||
|  |             # We don't save the connection here cause it doesn't have a user assigned yet | ||||||
|  |             return Action.ENROLL, self.update_connection(new_connection, **kwargs) | ||||||
|  |  | ||||||
|     def update_user_connection( |         # Check for existing users with matching attributes | ||||||
|  |         query = Q() | ||||||
|  |         # Either query existing user based on email or username | ||||||
|  |         if self.source.user_matching_mode in [ | ||||||
|  |             SourceUserMatchingModes.EMAIL_LINK, | ||||||
|  |             SourceUserMatchingModes.EMAIL_DENY, | ||||||
|  |         ]: | ||||||
|  |             if not self.enroll_info.get("email", None): | ||||||
|  |                 self._logger.warning("Refusing to use none email", source=self.source) | ||||||
|  |                 return Action.DENY, None | ||||||
|  |             query = Q(email__exact=self.enroll_info.get("email", None)) | ||||||
|  |         if self.source.user_matching_mode in [ | ||||||
|  |             SourceUserMatchingModes.USERNAME_LINK, | ||||||
|  |             SourceUserMatchingModes.USERNAME_DENY, | ||||||
|  |         ]: | ||||||
|  |             if not self.enroll_info.get("username", None): | ||||||
|  |                 self._logger.warning("Refusing to use none username", source=self.source) | ||||||
|  |                 return Action.DENY, None | ||||||
|  |             query = Q(username__exact=self.enroll_info.get("username", None)) | ||||||
|  |         self._logger.debug("trying to link with existing user", query=query) | ||||||
|  |         matching_users = User.objects.filter(query) | ||||||
|  |         # No matching users, always enroll | ||||||
|  |         if not matching_users.exists(): | ||||||
|  |             self._logger.debug("no matching users found, enrolling") | ||||||
|  |             return Action.ENROLL, self.update_connection(new_connection, **kwargs) | ||||||
|  |  | ||||||
|  |         user = matching_users.first() | ||||||
|  |         if self.source.user_matching_mode in [ | ||||||
|  |             SourceUserMatchingModes.EMAIL_LINK, | ||||||
|  |             SourceUserMatchingModes.USERNAME_LINK, | ||||||
|  |         ]: | ||||||
|  |             new_connection.user = user | ||||||
|  |             new_connection = self.update_connection(new_connection, **kwargs) | ||||||
|  |             new_connection.save() | ||||||
|  |             return Action.LINK, new_connection | ||||||
|  |         if self.source.user_matching_mode in [ | ||||||
|  |             SourceUserMatchingModes.EMAIL_DENY, | ||||||
|  |             SourceUserMatchingModes.USERNAME_DENY, | ||||||
|  |         ]: | ||||||
|  |             self._logger.info("denying source because user exists", user=user) | ||||||
|  |             return Action.DENY, None | ||||||
|  |         # Should never get here as default enroll case is returned above. | ||||||
|  |         return Action.DENY, None  # pragma: no cover | ||||||
|  |  | ||||||
|  |     def update_connection( | ||||||
|         self, connection: UserSourceConnection, **kwargs |         self, connection: UserSourceConnection, **kwargs | ||||||
|     ) -> UserSourceConnection:  # pragma: no cover |     ) -> UserSourceConnection:  # pragma: no cover | ||||||
|         """Optionally make changes to the user connection after it is looked up/created.""" |         """Optionally make changes to the connection after it is looked up/created.""" | ||||||
|         return connection |         return connection | ||||||
|  |  | ||||||
|     def get_flow(self, **kwargs) -> HttpResponse: |     def get_flow(self, **kwargs) -> HttpResponse: | ||||||
| @ -191,40 +209,38 @@ class SourceFlowManager: | |||||||
|  |  | ||||||
|     def get_stages_to_append(self, flow: Flow) -> list[Stage]: |     def get_stages_to_append(self, flow: Flow) -> list[Stage]: | ||||||
|         """Hook to override stages which are appended to the flow""" |         """Hook to override stages which are appended to the flow""" | ||||||
|  |         if not self.source.enrollment_flow: | ||||||
|  |             return [] | ||||||
|  |         if flow.slug == self.source.enrollment_flow.slug: | ||||||
|             return [ |             return [ | ||||||
|             in_memory_stage(PostSourceStage), |                 in_memory_stage(PostUserEnrollmentStage), | ||||||
|             ] |             ] | ||||||
|  |         return [] | ||||||
|  |  | ||||||
|     def _prepare_flow( |     def _prepare_flow( | ||||||
|         self, |         self, | ||||||
|         flow: Flow | None, |         flow: Flow, | ||||||
|         connection: UserSourceConnection, |         connection: UserSourceConnection, | ||||||
|         stages: list[StageView] | None = None, |         stages: list[StageView] | None = None, | ||||||
|         **flow_context, |         **kwargs, | ||||||
|     ) -> HttpResponse: |     ) -> HttpResponse: | ||||||
|         """Prepare Authentication Plan, redirect user FlowExecutor""" |         """Prepare Authentication Plan, redirect user FlowExecutor""" | ||||||
|         # Ensure redirect is carried through when user was trying to |         kwargs.update( | ||||||
|         # authorize application |  | ||||||
|         final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get( |  | ||||||
|             NEXT_ARG_NAME, "authentik_core:if-user" |  | ||||||
|         ) |  | ||||||
|         flow_context.update( |  | ||||||
|             { |             { | ||||||
|                 # Since we authenticate the user by their token, they have no backend set |                 # Since we authenticate the user by their token, they have no backend set | ||||||
|                 PLAN_CONTEXT_AUTHENTICATION_BACKEND: BACKEND_INBUILT, |                 PLAN_CONTEXT_AUTHENTICATION_BACKEND: BACKEND_INBUILT, | ||||||
|                 PLAN_CONTEXT_SSO: True, |                 PLAN_CONTEXT_SSO: True, | ||||||
|                 PLAN_CONTEXT_SOURCE: self.source, |                 PLAN_CONTEXT_SOURCE: self.source, | ||||||
|                 PLAN_CONTEXT_SOURCES_CONNECTION: connection, |                 PLAN_CONTEXT_SOURCES_CONNECTION: connection, | ||||||
|                 PLAN_CONTEXT_SOURCE_GROUPS: self.groups_properties, |  | ||||||
|             } |             } | ||||||
|         ) |         ) | ||||||
|         flow_context.update(self.policy_context) |         kwargs.update(self.policy_context) | ||||||
|         if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: |         if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: | ||||||
|             token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) |             token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) | ||||||
|             self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) |             self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) | ||||||
|             plan = token.plan |             plan = token.plan | ||||||
|             plan.context[PLAN_CONTEXT_IS_RESTORED] = token |             plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||||
|             plan.context.update(flow_context) |             plan.context.update(kwargs) | ||||||
|             for stage in self.get_stages_to_append(flow): |             for stage in self.get_stages_to_append(flow): | ||||||
|                 plan.append_stage(stage) |                 plan.append_stage(stage) | ||||||
|             if stages: |             if stages: | ||||||
| @ -243,8 +259,8 @@ class SourceFlowManager: | |||||||
|         final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get( |         final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get( | ||||||
|             NEXT_ARG_NAME, "authentik_core:if-user" |             NEXT_ARG_NAME, "authentik_core:if-user" | ||||||
|         ) |         ) | ||||||
|         if PLAN_CONTEXT_REDIRECT not in flow_context: |         if PLAN_CONTEXT_REDIRECT not in kwargs: | ||||||
|             flow_context[PLAN_CONTEXT_REDIRECT] = final_redirect |             kwargs[PLAN_CONTEXT_REDIRECT] = final_redirect | ||||||
|  |  | ||||||
|         if not flow: |         if not flow: | ||||||
|             return bad_request_message( |             return bad_request_message( | ||||||
| @ -253,15 +269,9 @@ class SourceFlowManager: | |||||||
|             ) |             ) | ||||||
|         # We run the Flow planner here so we can pass the Pending user in the context |         # We run the Flow planner here so we can pass the Pending user in the context | ||||||
|         planner = FlowPlanner(flow) |         planner = FlowPlanner(flow) | ||||||
|         # We append some stages so the initial flow we get might be empty |         plan = planner.plan(self.request, kwargs) | ||||||
|         planner.allow_empty_flows = True |  | ||||||
|         planner.use_cache = False |  | ||||||
|         plan = planner.plan(self.request, flow_context) |  | ||||||
|         for stage in self.get_stages_to_append(flow): |         for stage in self.get_stages_to_append(flow): | ||||||
|             plan.append_stage(stage) |             plan.append_stage(stage) | ||||||
|         plan.append_stage( |  | ||||||
|             in_memory_stage(GroupUpdateStage, group_connection_type=self.group_connection_type) |  | ||||||
|         ) |  | ||||||
|         if stages: |         if stages: | ||||||
|             for stage in stages: |             for stage in stages: | ||||||
|                 plan.append_stage(stage) |                 plan.append_stage(stage) | ||||||
| @ -277,6 +287,7 @@ class SourceFlowManager: | |||||||
|         connection: UserSourceConnection, |         connection: UserSourceConnection, | ||||||
|     ) -> HttpResponse: |     ) -> HttpResponse: | ||||||
|         """Login user and redirect.""" |         """Login user and redirect.""" | ||||||
|  |         flow_kwargs = {PLAN_CONTEXT_PENDING_USER: connection.user} | ||||||
|         return self._prepare_flow( |         return self._prepare_flow( | ||||||
|             self.source.authentication_flow, |             self.source.authentication_flow, | ||||||
|             connection, |             connection, | ||||||
| @ -290,11 +301,7 @@ class SourceFlowManager: | |||||||
|                     ), |                     ), | ||||||
|                 ) |                 ) | ||||||
|             ], |             ], | ||||||
|             **{ |             **flow_kwargs, | ||||||
|                 PLAN_CONTEXT_PENDING_USER: connection.user, |  | ||||||
|                 PLAN_CONTEXT_PROMPT: delete_none_values(self.user_properties), |  | ||||||
|                 PLAN_CONTEXT_USER_PATH: self.source.get_user_path(), |  | ||||||
|             }, |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def handle_existing_link( |     def handle_existing_link( | ||||||
| @ -306,9 +313,7 @@ class SourceFlowManager: | |||||||
|         # When request isn't authenticated we jump straight to auth |         # When request isn't authenticated we jump straight to auth | ||||||
|         if not self.request.user.is_authenticated: |         if not self.request.user.is_authenticated: | ||||||
|             return self.handle_auth(connection) |             return self.handle_auth(connection) | ||||||
|         if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: |         # Connection has already been saved | ||||||
|             return self._prepare_flow(None, connection) |  | ||||||
|         connection.save() |  | ||||||
|         Event.new( |         Event.new( | ||||||
|             EventAction.SOURCE_LINKED, |             EventAction.SOURCE_LINKED, | ||||||
|             message="Linked Source", |             message="Linked Source", | ||||||
| @ -322,7 +327,7 @@ class SourceFlowManager: | |||||||
|             reverse( |             reverse( | ||||||
|                 "authentik_core:if-user", |                 "authentik_core:if-user", | ||||||
|             ) |             ) | ||||||
|             + "#/settings;page-sources" |             + f"#/settings;page-{self.source.slug}" | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def handle_enroll( |     def handle_enroll( | ||||||
| @ -351,66 +356,7 @@ class SourceFlowManager: | |||||||
|                 ) |                 ) | ||||||
|             ], |             ], | ||||||
|             **{ |             **{ | ||||||
|                 PLAN_CONTEXT_PROMPT: delete_none_values(self.user_properties), |                 PLAN_CONTEXT_PROMPT: delete_none_values(self.enroll_info), | ||||||
|                 PLAN_CONTEXT_USER_PATH: self.source.get_user_path(), |                 PLAN_CONTEXT_USER_PATH: self.source.get_user_path(), | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupUpdateStage(StageView): |  | ||||||
|     """Dynamically injected stage which updates the user after enrollment/authentication.""" |  | ||||||
|  |  | ||||||
|     def handle_group( |  | ||||||
|         self, group_id: str, group_properties: dict[str, Any | dict[str, Any]] |  | ||||||
|     ) -> Group | None: |  | ||||||
|         action, connection = self.matcher.get_group_action(group_id, group_properties) |  | ||||||
|         if action == Action.ENROLL: |  | ||||||
|             group = Group.objects.create(**group_properties) |  | ||||||
|             connection.group = group |  | ||||||
|             connection.save() |  | ||||||
|             return group |  | ||||||
|         elif action in (Action.LINK, Action.AUTH): |  | ||||||
|             group = connection.group |  | ||||||
|             group.update_attributes(group_properties) |  | ||||||
|             connection.save() |  | ||||||
|             return group |  | ||||||
|  |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def handle_groups(self) -> bool: |  | ||||||
|         self.source: Source = self.executor.plan.context[PLAN_CONTEXT_SOURCE] |  | ||||||
|         self.user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] |  | ||||||
|         self.group_connection_type: GroupSourceConnection = ( |  | ||||||
|             self.executor.current_stage.group_connection_type |  | ||||||
|         ) |  | ||||||
|         self.matcher = SourceMatcher(self.source, None, self.group_connection_type) |  | ||||||
|  |  | ||||||
|         raw_groups: dict[str, dict[str, Any | dict[str, Any]]] = self.executor.plan.context[ |  | ||||||
|             PLAN_CONTEXT_SOURCE_GROUPS |  | ||||||
|         ] |  | ||||||
|         groups: list[Group] = [] |  | ||||||
|  |  | ||||||
|         for group_id, group_properties in raw_groups.items(): |  | ||||||
|             group = self.handle_group(group_id, group_properties) |  | ||||||
|             if not group: |  | ||||||
|                 return False |  | ||||||
|             groups.append(group) |  | ||||||
|  |  | ||||||
|         with transaction.atomic(): |  | ||||||
|             self.user.ak_groups.remove( |  | ||||||
|                 *self.user.ak_groups.filter(groupsourceconnection__source=self.source) |  | ||||||
|             ) |  | ||||||
|             self.user.ak_groups.add(*groups) |  | ||||||
|  |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse: |  | ||||||
|         """Stage used after the user has been enrolled to sync their groups from source data""" |  | ||||||
|         if self.handle_groups(): |  | ||||||
|             return self.executor.stage_ok() |  | ||||||
|         else: |  | ||||||
|             return self.executor.stage_invalid("Failed to update groups. Please try again later.") |  | ||||||
|  |  | ||||||
|     def post(self, request: HttpRequest) -> HttpResponse: |  | ||||||
|         """Wrapper for post requests""" |  | ||||||
|         return self.get(request) |  | ||||||
|  | |||||||
| @ -1,103 +0,0 @@ | |||||||
| from typing import Any |  | ||||||
|  |  | ||||||
| from django.http import HttpRequest |  | ||||||
| from structlog.stdlib import get_logger |  | ||||||
|  |  | ||||||
| from authentik.core.expression.exceptions import PropertyMappingExpressionException |  | ||||||
| from authentik.core.models import Group, PropertyMapping, Source, User |  | ||||||
| from authentik.events.models import Event, EventAction |  | ||||||
| from authentik.lib.merge import MERGE_LIST_UNIQUE |  | ||||||
| from authentik.lib.sync.mapper import PropertyMappingManager |  | ||||||
| from authentik.policies.utils import delete_none_values |  | ||||||
|  |  | ||||||
| LOGGER = get_logger() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SourceMapper: |  | ||||||
|     def __init__(self, source: Source): |  | ||||||
|         self.source = source |  | ||||||
|  |  | ||||||
|     def get_manager( |  | ||||||
|         self, object_type: type[User | Group], context_keys: list[str] |  | ||||||
|     ) -> PropertyMappingManager: |  | ||||||
|         """Get property mapping manager for this source.""" |  | ||||||
|  |  | ||||||
|         qs = PropertyMapping.objects.none() |  | ||||||
|         if object_type == User: |  | ||||||
|             qs = self.source.user_property_mappings.all().select_subclasses() |  | ||||||
|         elif object_type == Group: |  | ||||||
|             qs = self.source.group_property_mappings.all().select_subclasses() |  | ||||||
|         qs = qs.order_by("name") |  | ||||||
|         return PropertyMappingManager( |  | ||||||
|             qs, |  | ||||||
|             self.source.property_mapping_type, |  | ||||||
|             ["source", "properties"] + context_keys, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def get_base_properties( |  | ||||||
|         self, object_type: type[User | Group], **kwargs |  | ||||||
|     ) -> dict[str, Any | dict[str, Any]]: |  | ||||||
|         """Get base properties for a user or a group to build final properties upon.""" |  | ||||||
|         if object_type == User: |  | ||||||
|             properties = self.source.get_base_user_properties(**kwargs) |  | ||||||
|             properties.setdefault("path", self.source.get_user_path()) |  | ||||||
|             return properties |  | ||||||
|         if object_type == Group: |  | ||||||
|             return self.source.get_base_group_properties(**kwargs) |  | ||||||
|         return {} |  | ||||||
|  |  | ||||||
|     def build_object_properties( |  | ||||||
|         self, |  | ||||||
|         object_type: type[User | Group], |  | ||||||
|         manager: "PropertyMappingManager | None" = None, |  | ||||||
|         user: User | None = None, |  | ||||||
|         request: HttpRequest | None = None, |  | ||||||
|         **kwargs, |  | ||||||
|     ) -> dict[str, Any | dict[str, Any]]: |  | ||||||
|         """Build a user or group properties from the source configured property mappings.""" |  | ||||||
|  |  | ||||||
|         properties = self.get_base_properties(object_type, **kwargs) |  | ||||||
|         if "attributes" not in properties: |  | ||||||
|             properties["attributes"] = {} |  | ||||||
|  |  | ||||||
|         if not manager: |  | ||||||
|             manager = self.get_manager(object_type, list(kwargs.keys())) |  | ||||||
|         evaluations = manager.iter_eval( |  | ||||||
|             user=user, |  | ||||||
|             request=request, |  | ||||||
|             return_mapping=True, |  | ||||||
|             source=self.source, |  | ||||||
|             properties=properties, |  | ||||||
|             **kwargs, |  | ||||||
|         ) |  | ||||||
|         while True: |  | ||||||
|             try: |  | ||||||
|                 value, mapping = next(evaluations) |  | ||||||
|             except StopIteration: |  | ||||||
|                 break |  | ||||||
|             except PropertyMappingExpressionException as exc: |  | ||||||
|                 Event.new( |  | ||||||
|                     EventAction.CONFIGURATION_ERROR, |  | ||||||
|                     message=f"Failed to evaluate property mapping: '{exc.mapping.name}'", |  | ||||||
|                     source=self, |  | ||||||
|                     mapping=exc.mapping, |  | ||||||
|                 ).save() |  | ||||||
|                 LOGGER.warning( |  | ||||||
|                     "Mapping failed to evaluate", |  | ||||||
|                     exc=exc, |  | ||||||
|                     source=self, |  | ||||||
|                     mapping=exc.mapping, |  | ||||||
|                 ) |  | ||||||
|                 raise exc |  | ||||||
|  |  | ||||||
|             if not value or not isinstance(value, dict): |  | ||||||
|                 LOGGER.debug( |  | ||||||
|                     "Mapping evaluated to None or is not a dict. Skipping", |  | ||||||
|                     source=self, |  | ||||||
|                     mapping=mapping, |  | ||||||
|                 ) |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             MERGE_LIST_UNIQUE.merge(properties, value) |  | ||||||
|  |  | ||||||
|         return delete_none_values(properties) |  | ||||||
| @ -1,152 +0,0 @@ | |||||||
| """Source user and group matching""" |  | ||||||
|  |  | ||||||
| from dataclasses import dataclass |  | ||||||
| from enum import Enum |  | ||||||
| from typing import Any |  | ||||||
|  |  | ||||||
| from django.db.models import Q |  | ||||||
| from structlog import get_logger |  | ||||||
|  |  | ||||||
| from authentik.core.models import ( |  | ||||||
|     Group, |  | ||||||
|     GroupSourceConnection, |  | ||||||
|     Source, |  | ||||||
|     SourceGroupMatchingModes, |  | ||||||
|     SourceUserMatchingModes, |  | ||||||
|     User, |  | ||||||
|     UserSourceConnection, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Action(Enum): |  | ||||||
|     """Actions that can be decided based on the request and source settings""" |  | ||||||
|  |  | ||||||
|     LINK = "link" |  | ||||||
|     AUTH = "auth" |  | ||||||
|     ENROLL = "enroll" |  | ||||||
|     DENY = "deny" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass |  | ||||||
| class MatchableProperty: |  | ||||||
|     property: str |  | ||||||
|     link_mode: SourceUserMatchingModes | SourceGroupMatchingModes |  | ||||||
|     deny_mode: SourceUserMatchingModes | SourceGroupMatchingModes |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SourceMatcher: |  | ||||||
|     def __init__( |  | ||||||
|         self, |  | ||||||
|         source: Source, |  | ||||||
|         user_connection_type: type[UserSourceConnection], |  | ||||||
|         group_connection_type: type[GroupSourceConnection], |  | ||||||
|     ): |  | ||||||
|         self.source = source |  | ||||||
|         self.user_connection_type = user_connection_type |  | ||||||
|         self.group_connection_type = group_connection_type |  | ||||||
|         self._logger = get_logger().bind(source=self.source) |  | ||||||
|  |  | ||||||
|     def get_action( |  | ||||||
|         self, |  | ||||||
|         object_type: type[User | Group], |  | ||||||
|         matchable_properties: list[MatchableProperty], |  | ||||||
|         identifier: str, |  | ||||||
|         properties: dict[str, Any | dict[str, Any]], |  | ||||||
|     ) -> tuple[Action, UserSourceConnection | GroupSourceConnection | None]: |  | ||||||
|         connection_type = None |  | ||||||
|         matching_mode = None |  | ||||||
|         identifier_matching_mode = None |  | ||||||
|         if object_type == User: |  | ||||||
|             connection_type = self.user_connection_type |  | ||||||
|             matching_mode = self.source.user_matching_mode |  | ||||||
|             identifier_matching_mode = SourceUserMatchingModes.IDENTIFIER |  | ||||||
|         if object_type == Group: |  | ||||||
|             connection_type = self.group_connection_type |  | ||||||
|             matching_mode = self.source.group_matching_mode |  | ||||||
|             identifier_matching_mode = SourceGroupMatchingModes.IDENTIFIER |  | ||||||
|         if not connection_type or not matching_mode or not identifier_matching_mode: |  | ||||||
|             return Action.DENY, None |  | ||||||
|  |  | ||||||
|         new_connection = connection_type(source=self.source, identifier=identifier) |  | ||||||
|  |  | ||||||
|         existing_connections = connection_type.objects.filter( |  | ||||||
|             source=self.source, identifier=identifier |  | ||||||
|         ) |  | ||||||
|         if existing_connections.exists(): |  | ||||||
|             return Action.AUTH, existing_connections.first() |  | ||||||
|         # No connection exists, but we match on identifier, so enroll |  | ||||||
|         if matching_mode == identifier_matching_mode: |  | ||||||
|             # We don't save the connection here cause it doesn't have a user/group assigned yet |  | ||||||
|             return Action.ENROLL, new_connection |  | ||||||
|  |  | ||||||
|         # Check for existing users with matching attributes |  | ||||||
|         query = Q() |  | ||||||
|         for matchable_property in matchable_properties: |  | ||||||
|             property = matchable_property.property |  | ||||||
|             if matching_mode in [matchable_property.link_mode, matchable_property.deny_mode]: |  | ||||||
|                 if not properties.get(property, None): |  | ||||||
|                     self._logger.warning( |  | ||||||
|                         "Refusing to use none property", identifier=identifier, property=property |  | ||||||
|                     ) |  | ||||||
|                     return Action.DENY, None |  | ||||||
|                 query_args = { |  | ||||||
|                     f"{property}__exact": properties[property], |  | ||||||
|                 } |  | ||||||
|                 query = Q(**query_args) |  | ||||||
|         self._logger.debug( |  | ||||||
|             "Trying to link with existing object", query=query, identifier=identifier |  | ||||||
|         ) |  | ||||||
|         matching_objects = object_type.objects.filter(query) |  | ||||||
|         # Not matching objects, always enroll |  | ||||||
|         if not matching_objects.exists(): |  | ||||||
|             self._logger.debug("No matching objects found, enrolling") |  | ||||||
|             return Action.ENROLL, new_connection |  | ||||||
|  |  | ||||||
|         obj = matching_objects.first() |  | ||||||
|         if matching_mode in [mp.link_mode for mp in matchable_properties]: |  | ||||||
|             attr = None |  | ||||||
|             if object_type == User: |  | ||||||
|                 attr = "user" |  | ||||||
|             if object_type == Group: |  | ||||||
|                 attr = "group" |  | ||||||
|             setattr(new_connection, attr, obj) |  | ||||||
|             return Action.LINK, new_connection |  | ||||||
|         if matching_mode in [mp.deny_mode for mp in matchable_properties]: |  | ||||||
|             self._logger.info("Denying source because object exists", obj=obj) |  | ||||||
|             return Action.DENY, None |  | ||||||
|  |  | ||||||
|         # Should never get here as default enroll case is returned above. |  | ||||||
|         return Action.DENY, None  # pragma: no cover |  | ||||||
|  |  | ||||||
|     def get_user_action( |  | ||||||
|         self, identifier: str, properties: dict[str, Any | dict[str, Any]] |  | ||||||
|     ) -> tuple[Action, UserSourceConnection | None]: |  | ||||||
|         return self.get_action( |  | ||||||
|             User, |  | ||||||
|             [ |  | ||||||
|                 MatchableProperty( |  | ||||||
|                     "username", |  | ||||||
|                     SourceUserMatchingModes.USERNAME_LINK, |  | ||||||
|                     SourceUserMatchingModes.USERNAME_DENY, |  | ||||||
|                 ), |  | ||||||
|                 MatchableProperty( |  | ||||||
|                     "email", SourceUserMatchingModes.EMAIL_LINK, SourceUserMatchingModes.EMAIL_DENY |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             identifier, |  | ||||||
|             properties, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def get_group_action( |  | ||||||
|         self, identifier: str, properties: dict[str, Any | dict[str, Any]] |  | ||||||
|     ) -> tuple[Action, GroupSourceConnection | None]: |  | ||||||
|         return self.get_action( |  | ||||||
|             Group, |  | ||||||
|             [ |  | ||||||
|                 MatchableProperty( |  | ||||||
|                     "name", SourceGroupMatchingModes.NAME_LINK, SourceGroupMatchingModes.NAME_DENY |  | ||||||
|                 ), |  | ||||||
|             ], |  | ||||||
|             identifier, |  | ||||||
|             properties, |  | ||||||
|         ) |  | ||||||
| @ -10,7 +10,7 @@ from authentik.flows.stage import StageView | |||||||
| PLAN_CONTEXT_SOURCES_CONNECTION = "goauthentik.io/sources/connection" | PLAN_CONTEXT_SOURCES_CONNECTION = "goauthentik.io/sources/connection" | ||||||
|  |  | ||||||
|  |  | ||||||
| class PostSourceStage(StageView): | class PostUserEnrollmentStage(StageView): | ||||||
|     """Dynamically injected stage which saves the Connection after |     """Dynamically injected stage which saves the Connection after | ||||||
|     the user has been enrolled.""" |     the user has been enrolled.""" | ||||||
|  |  | ||||||
| @ -21,9 +21,7 @@ class PostSourceStage(StageView): | |||||||
|         ] |         ] | ||||||
|         user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] |         user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] | ||||||
|         connection.user = user |         connection.user = user | ||||||
|         linked = connection.pk is None |  | ||||||
|         connection.save() |         connection.save() | ||||||
|         if linked: |  | ||||||
|         Event.new( |         Event.new( | ||||||
|             EventAction.SOURCE_LINKED, |             EventAction.SOURCE_LINKED, | ||||||
|             message="Linked Source", |             message="Linked Source", | ||||||
|  | |||||||
| @ -2,9 +2,7 @@ | |||||||
|  |  | ||||||
| from datetime import datetime, timedelta | from datetime import datetime, timedelta | ||||||
|  |  | ||||||
| from django.conf import ImproperlyConfigured |  | ||||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
| from django.contrib.sessions.backends.db import SessionStore as DBSessionStore |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| @ -17,7 +15,6 @@ from authentik.core.models import ( | |||||||
|     User, |     User, | ||||||
| ) | ) | ||||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||||
| from authentik.lib.config import CONFIG |  | ||||||
| from authentik.root.celery import CELERY_APP | from authentik.root.celery import CELERY_APP | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| @ -42,8 +39,6 @@ def clean_expired_models(self: SystemTask): | |||||||
|     amount = 0 |     amount = 0 | ||||||
|  |  | ||||||
|     for session in AuthenticatedSession.objects.all(): |     for session in AuthenticatedSession.objects.all(): | ||||||
|         match CONFIG.get("session_storage", "cache"): |  | ||||||
|             case "cache": |  | ||||||
|         cache_key = f"{KEY_PREFIX}{session.session_key}" |         cache_key = f"{KEY_PREFIX}{session.session_key}" | ||||||
|         value = None |         value = None | ||||||
|         try: |         try: | ||||||
| @ -54,19 +49,6 @@ def clean_expired_models(self: SystemTask): | |||||||
|         if not value: |         if not value: | ||||||
|             session.delete() |             session.delete() | ||||||
|             amount += 1 |             amount += 1 | ||||||
|             case "db": |  | ||||||
|                 if not ( |  | ||||||
|                     DBSessionStore.get_model_class() |  | ||||||
|                     .objects.filter(session_key=session.session_key, expire_date__gt=now()) |  | ||||||
|                     .exists() |  | ||||||
|                 ): |  | ||||||
|                     session.delete() |  | ||||||
|                     amount += 1 |  | ||||||
|             case _: |  | ||||||
|                 # Should never happen, as we check for other values in authentik/root/settings.py |  | ||||||
|                 raise ImproperlyConfigured( |  | ||||||
|                     "Invalid session_storage setting, allowed values are db and cache" |  | ||||||
|                 ) |  | ||||||
|     LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount) |     LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount) | ||||||
|  |  | ||||||
|     messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}") |     messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}") | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ | |||||||
|         versionSubdomain: "{{ version_subdomain }}", |         versionSubdomain: "{{ version_subdomain }}", | ||||||
|         build: "{{ build }}", |         build: "{{ build }}", | ||||||
|     }; |     }; | ||||||
|     window.addEventListener("DOMContentLoaded", function () { |     window.addEventListener("DOMContentLoaded", () => { | ||||||
|         {% for message in messages %} |         {% for message in messages %} | ||||||
|         window.dispatchEvent( |         window.dispatchEvent( | ||||||
|             new CustomEvent("ak-message", { |             new CustomEvent("ak-message", { | ||||||
|  | |||||||
| @ -1,10 +1,9 @@ | |||||||
| {% load static %} | {% load static %} | ||||||
| {% load i18n %} | {% load i18n %} | ||||||
| {% load authentik_core %} |  | ||||||
|  |  | ||||||
| <!DOCTYPE html> | <!DOCTYPE html> | ||||||
|  |  | ||||||
| <html> | <html lang="en"> | ||||||
|     <head> |     <head> | ||||||
|         <meta charset="UTF-8"> |         <meta charset="UTF-8"> | ||||||
|         <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"> |         <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"> | ||||||
| @ -15,8 +14,8 @@ | |||||||
|         {% endblock %} |         {% endblock %} | ||||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> |         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> | ||||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject> |         <link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject> | ||||||
|         <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> |         <script src="{% static 'dist/poly.js' %}?version={{ version }}" type="module"></script> | ||||||
|         <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> |         <script src="{% static 'dist/standalone/loading/index.js' %}?version={{ version }}" type="module"></script> | ||||||
|         {% block head %} |         {% block head %} | ||||||
|         {% endblock %} |         {% endblock %} | ||||||
|         <meta name="sentry-trace" content="{{ sentry_trace }}" /> |         <meta name="sentry-trace" content="{{ sentry_trace }}" /> | ||||||
|  | |||||||
| @ -1,9 +1,9 @@ | |||||||
| {% extends "base/skeleton.html" %} | {% extends "base/skeleton.html" %} | ||||||
|  |  | ||||||
| {% load authentik_core %} | {% load static %} | ||||||
|  |  | ||||||
| {% block head %} | {% block head %} | ||||||
| <script src="{% versioned_script 'dist/admin/AdminInterface-%v.js' %}" type="module"></script> | <script src="{% static 'dist/admin/AdminInterface.js' %}?version={{ version }}" type="module"></script> | ||||||
| <meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)"> | <meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)"> | ||||||
| <meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)"> | <meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)"> | ||||||
| {% include "base/header_js.html" %} | {% include "base/header_js.html" %} | ||||||
|  | |||||||
							
								
								
									
										43
									
								
								authentik/core/templates/if/end_session.html
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								authentik/core/templates/if/end_session.html
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,43 @@ | |||||||
|  | {% extends 'login/base_full.html' %} | ||||||
|  |  | ||||||
|  | {% load static %} | ||||||
|  | {% load i18n %} | ||||||
|  |  | ||||||
|  | {% block title %} | ||||||
|  | {% trans 'End session' %} - {{ brand.branding_title }} | ||||||
|  | {% endblock %} | ||||||
|  |  | ||||||
|  | {% block card_title %} | ||||||
|  | {% blocktrans with application=application.name %} | ||||||
|  | You've logged out of {{ application }}. | ||||||
|  | {% endblocktrans %} | ||||||
|  | {% endblock %} | ||||||
|  |  | ||||||
|  | {% block card %} | ||||||
|  | <form method="POST" class="pf-c-form"> | ||||||
|  |     <p> | ||||||
|  |         {% blocktrans with application=application.name branding_title=brand.branding_title %} | ||||||
|  |             You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your {{ branding_title }} account. | ||||||
|  |         {% endblocktrans %} | ||||||
|  |     </p> | ||||||
|  |  | ||||||
|  |     <a id="ak-back-home" href="{% url 'authentik_core:root-redirect' %}" class="pf-c-button pf-m-primary"> | ||||||
|  |         {% trans 'Go back to overview' %} | ||||||
|  |     </a> | ||||||
|  |  | ||||||
|  |     <a id="logout" href="{% url 'authentik_flows:default-invalidation' %}" class="pf-c-button pf-m-secondary"> | ||||||
|  |         {% blocktrans with branding_title=brand.branding_title %} | ||||||
|  |             Log out of {{ branding_title }} | ||||||
|  |         {% endblocktrans %} | ||||||
|  |     </a> | ||||||
|  |  | ||||||
|  |     {% if application.get_launch_url %} | ||||||
|  |     <a href="{{ application.get_launch_url }}" class="pf-c-button pf-m-secondary"> | ||||||
|  |         {% blocktrans with application=application.name %} | ||||||
|  |             Log back into {{ application }} | ||||||
|  |         {% endblocktrans %} | ||||||
|  |     </a> | ||||||
|  |     {% endif %} | ||||||
|  |  | ||||||
|  | </form> | ||||||
|  | {% endblock %} | ||||||
| @ -1,7 +1,6 @@ | |||||||
| {% extends "base/skeleton.html" %} | {% extends "base/skeleton.html" %} | ||||||
| 
 | 
 | ||||||
| {% load static %} | {% load static %} | ||||||
| {% load authentik_core %} |  | ||||||
| 
 | 
 | ||||||
| {% block head_before %} | {% block head_before %} | ||||||
| {{ block.super }} | {{ block.super }} | ||||||
| @ -18,7 +17,7 @@ window.authentik.flow = { | |||||||
| {% endblock %} | {% endblock %} | ||||||
| 
 | 
 | ||||||
| {% block head %} | {% block head %} | ||||||
| <script src="{% versioned_script 'dist/flow/FlowInterface-%v.js' %}" type="module"></script> | <script src="{% static 'dist/flow/FlowInterface.js' %}?version={{ version }}" type="module"></script> | ||||||
| <style> | <style> | ||||||
| :root { | :root { | ||||||
|     --ak-flow-background: url("{{ flow.background_url }}"); |     --ak-flow-background: url("{{ flow.background_url }}"); | ||||||
| @ -1,9 +1,9 @@ | |||||||
| {% extends "base/skeleton.html" %} | {% extends "base/skeleton.html" %} | ||||||
|  |  | ||||||
| {% load authentik_core %} | {% load static %} | ||||||
|  |  | ||||||
| {% block head %} | {% block head %} | ||||||
| <script src="{% versioned_script 'dist/user/UserInterface-%v.js' %}" type="module"></script> | <script src="{% static 'dist/user/UserInterface.js' %}?version={{ version }}" type="module"></script> | ||||||
| <meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: light)"> | <meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: light)"> | ||||||
| <meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: dark)"> | <meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: dark)"> | ||||||
| {% include "base/header_js.html" %} | {% include "base/header_js.html" %} | ||||||
|  | |||||||
| @ -71,9 +71,9 @@ | |||||||
|                 </li> |                 </li> | ||||||
|                 {% endfor %} |                 {% endfor %} | ||||||
|                 <li> |                 <li> | ||||||
|                     <span> |                     <a href="https://goauthentik.io?utm_source=authentik"> | ||||||
|                         {% trans 'Powered by authentik' %} |                         {% trans 'Powered by authentik' %} | ||||||
|                     </span> |                     </a> | ||||||
|                 </li> |                 </li> | ||||||
|             </ul> |             </ul> | ||||||
|         </footer> |         </footer> | ||||||
|  | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	