Compare commits
	
		
			5 Commits
		
	
	
		
			version/20
			...
			web/testin
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| f0256a0535 | |||
| 142a985914 | |||
| a8531d498a | |||
| f8cb4e880b | |||
| 3ced637db3 | 
| @ -1,5 +1,5 @@ | ||||
| [bumpversion] | ||||
| current_version = 2024.8.6 | ||||
| current_version = 2024.6.1 | ||||
| tag = True | ||||
| commit = True | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||
|  | ||||
| @ -29,15 +29,9 @@ outputs: | ||||
|   imageTags: | ||||
|     description: "Docker image tags" | ||||
|     value: ${{ steps.ev.outputs.imageTags }} | ||||
|   attestImageNames: | ||||
|     description: "Docker image names used for attestation" | ||||
|     value: ${{ steps.ev.outputs.attestImageNames }} | ||||
|   imageMainTag: | ||||
|     description: "Docker image main tag" | ||||
|     value: ${{ steps.ev.outputs.imageMainTag }} | ||||
|   imageMainName: | ||||
|     description: "Docker image main name" | ||||
|     value: ${{ steps.ev.outputs.imageMainName }} | ||||
|  | ||||
| runs: | ||||
|   using: "composite" | ||||
|  | ||||
| @ -7,7 +7,7 @@ from time import time | ||||
| parser = configparser.ConfigParser() | ||||
| parser.read(".bumpversion.cfg") | ||||
|  | ||||
| should_build = str(len(os.environ.get("DOCKER_USERNAME", "")) > 0).lower() | ||||
| should_build = str(os.environ.get("DOCKER_USERNAME", None) is not None).lower() | ||||
|  | ||||
| branch_name = os.environ["GITHUB_REF"] | ||||
| if os.environ.get("GITHUB_HEAD_REF", "") != "": | ||||
| @ -50,25 +50,13 @@ else: | ||||
|             f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}",  # Use by FluxCD | ||||
|         ] | ||||
|  | ||||
| image_main_tag = image_tags[0].split(":")[-1] | ||||
|  | ||||
|  | ||||
| def get_attest_image_names(image_with_tags: list[str]): | ||||
|     """Attestation only for GHCR""" | ||||
|     image_tags = [] | ||||
|     for image_name in set(name.split(":")[0] for name in image_with_tags): | ||||
|         if not image_name.startswith("ghcr.io"): | ||||
|             continue | ||||
|         image_tags.append(image_name) | ||||
|     return ",".join(set(image_tags)) | ||||
|  | ||||
| image_main_tag = image_tags[0] | ||||
| image_tags_rendered = ",".join(image_tags) | ||||
|  | ||||
| with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output: | ||||
|     print(f"shouldBuild={should_build}", file=_output) | ||||
|     print(f"sha={sha}", file=_output) | ||||
|     print(f"version={version}", file=_output) | ||||
|     print(f"prerelease={prerelease}", file=_output) | ||||
|     print(f"imageTags={','.join(image_tags)}", file=_output) | ||||
|     print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output) | ||||
|     print(f"imageTags={image_tags_rendered}", file=_output) | ||||
|     print(f"imageMainTag={image_main_tag}", file=_output) | ||||
|     print(f"imageMainName={image_tags[0]}", file=_output) | ||||
|  | ||||
							
								
								
									
										4
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -58,10 +58,6 @@ updates: | ||||
|         patterns: | ||||
|           - "@rollup/*" | ||||
|           - "rollup-*" | ||||
|       swc: | ||||
|         patterns: | ||||
|           - "@swc/*" | ||||
|           - "swc-*" | ||||
|       wdio: | ||||
|         patterns: | ||||
|           - "@wdio/*" | ||||
|  | ||||
							
								
								
									
										4
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -35,8 +35,8 @@ jobs: | ||||
|         run: | | ||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` | ||||
|           npm i @goauthentik/api@$VERSION | ||||
|       - name: Upgrade /web/packages/sfe | ||||
|         working-directory: web/packages/sfe | ||||
|       - name: Upgrade /web/sfe | ||||
|         working-directory: web/sfe | ||||
|         run: | | ||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` | ||||
|           npm i @goauthentik/api@$VERSION | ||||
|  | ||||
							
								
								
									
										18
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										18
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -213,16 +213,13 @@ jobs: | ||||
|     permissions: | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     timeout-minutes: 120 | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|         uses: docker/setup-qemu-action@v3.1.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
| @ -244,7 +241,6 @@ jobs: | ||||
|         run: make gen-client-ts | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         id: push | ||||
|         with: | ||||
|           context: . | ||||
|           secrets: | | ||||
| @ -255,15 +251,8 @@ jobs: | ||||
|           build-args: | | ||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache | ||||
|           cache-to: ${{ steps.ev.outputs.shouldBuild == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max' || '' }} | ||||
|           cache-to: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max | ||||
|           platforms: linux/${{ matrix.arch }} | ||||
|       - uses: actions/attest-build-provenance@v1 | ||||
|         id: attest | ||||
|         if: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|   pr-comment: | ||||
|     needs: | ||||
|       - build | ||||
| @ -285,7 +274,6 @@ jobs: | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/dev-server | ||||
|       - name: Comment on PR | ||||
|         if: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|         uses: ./.github/actions/comment-pr-instructions | ||||
|         with: | ||||
|           tag: ${{ steps.ev.outputs.imageMainTag }} | ||||
|           tag: gh-${{ steps.ev.outputs.imageMainTag }} | ||||
|  | ||||
							
								
								
									
										17
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										17
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -31,7 +31,7 @@ jobs: | ||||
|       - name: golangci-lint | ||||
|         uses: golangci/golangci-lint-action@v6 | ||||
|         with: | ||||
|           version: latest | ||||
|           version: v1.54.2 | ||||
|           args: --timeout 5000s --verbose | ||||
|           skip-cache: true | ||||
|   test-unittest: | ||||
| @ -71,15 +71,12 @@ jobs: | ||||
|     permissions: | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|         uses: docker/setup-qemu-action@v3.1.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
| @ -99,7 +96,6 @@ jobs: | ||||
|       - name: Generate API | ||||
|         run: make gen-client-go | ||||
|       - name: Build Docker Image | ||||
|         id: push | ||||
|         uses: docker/build-push-action@v6 | ||||
|         with: | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
| @ -110,14 +106,7 @@ jobs: | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache | ||||
|           cache-to: ${{ steps.ev.outputs.shouldBuild == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }} | ||||
|       - uses: actions/attest-build-provenance@v1 | ||||
|         id: attest | ||||
|         if: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|           cache-to: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache,mode=max | ||||
|   build-binary: | ||||
|     timeout-minutes: 120 | ||||
|     needs: | ||||
|  | ||||
							
								
								
									
										9
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							| @ -28,8 +28,15 @@ jobs: | ||||
|         include: | ||||
|           - command: tsc | ||||
|             project: web | ||||
|             extra_setup: | | ||||
|               cd sfe/ && npm ci | ||||
|           - command: lit-analyse | ||||
|             project: web | ||||
|             extra_setup: | | ||||
|               # lit-analyse doesn't understand path rewrites, so make it | ||||
|               # belive it's an actual module | ||||
|               cd node_modules/@goauthentik | ||||
|               ln -s ../../src/ web | ||||
|         exclude: | ||||
|           - command: lint:lockfile | ||||
|             project: tests/wdio | ||||
| @ -92,4 +99,4 @@ jobs: | ||||
|         run: make gen-client-ts | ||||
|       - name: test | ||||
|         working-directory: web/ | ||||
|         run: npm run test || exit 0 | ||||
|         run: npm run test | ||||
|  | ||||
							
								
								
									
										32
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										32
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,13 +11,10 @@ jobs: | ||||
|     permissions: | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|         uses: docker/setup-qemu-action@v3.1.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
| @ -44,31 +41,19 @@ jobs: | ||||
|           mkdir -p ./gen-go-api | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         id: push | ||||
|         with: | ||||
|           context: . | ||||
|           push: true | ||||
|           secrets: | | ||||
|             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||
|             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||
|           build-args: | | ||||
|             VERSION=${{ github.ref }} | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|       - uses: actions/attest-build-provenance@v1 | ||||
|         id: attest | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|   build-outpost: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
| @ -83,7 +68,7 @@ jobs: | ||||
|         with: | ||||
|           go-version-file: "go.mod" | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|         uses: docker/setup-qemu-action@v3.1.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
| @ -110,21 +95,12 @@ jobs: | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         id: push | ||||
|         with: | ||||
|           push: true | ||||
|           build-args: | | ||||
|             VERSION=${{ github.ref }} | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           file: ${{ matrix.type }}.Dockerfile | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|       - uses: actions/attest-build-provenance@v1 | ||||
|         id: attest | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|   build-outpost-binary: | ||||
|     timeout-minutes: 120 | ||||
|     runs-on: ubuntu-latest | ||||
| @ -202,8 +178,8 @@ jobs: | ||||
|           image-name: ghcr.io/goauthentik/server | ||||
|       - name: Get static files from docker image | ||||
|         run: | | ||||
|           docker pull ${{ steps.ev.outputs.imageMainName }} | ||||
|           container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) | ||||
|           docker pull ${{ steps.ev.outputs.imageMainTag }} | ||||
|           container=$(docker container create ${{ steps.ev.outputs.imageMainTag }}) | ||||
|           docker cp ${container}:web/ . | ||||
|       - name: Create a Sentry.io release | ||||
|         uses: getsentry/action-release@v1 | ||||
|  | ||||
							
								
								
									
										2
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @ -16,6 +16,6 @@ | ||||
|         "ms-python.black-formatter", | ||||
|         "redhat.vscode-yaml", | ||||
|         "Tobermory.es6-string-html", | ||||
|         "unifiedjs.vscode-mdx" | ||||
|         "unifiedjs.vscode-mdx", | ||||
|     ] | ||||
| } | ||||
|  | ||||
							
								
								
									
										2
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							| @ -22,6 +22,6 @@ | ||||
|             }, | ||||
|             "justMyCode": true, | ||||
|             "django": true | ||||
|         } | ||||
|         }, | ||||
|     ] | ||||
| } | ||||
|  | ||||
							
								
								
									
										21
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										21
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -18,21 +18,20 @@ | ||||
|         "sso", | ||||
|         "totp", | ||||
|         "traefik", | ||||
|         "webauthn" | ||||
|         "webauthn", | ||||
|     ], | ||||
|     "todo-tree.tree.showCountsInTree": true, | ||||
|     "todo-tree.tree.showBadges": true, | ||||
|     "yaml.customTags": [ | ||||
|         "!Condition sequence", | ||||
|         "!Context scalar", | ||||
|         "!Enumerate sequence", | ||||
|         "!Env scalar", | ||||
|         "!Find sequence", | ||||
|         "!Format sequence", | ||||
|         "!If sequence", | ||||
|         "!Index scalar", | ||||
|         "!KeyOf scalar", | ||||
|         "!Value scalar" | ||||
|         "!Context scalar", | ||||
|         "!Context sequence", | ||||
|         "!Format sequence", | ||||
|         "!Condition sequence", | ||||
|         "!Env sequence", | ||||
|         "!Env scalar", | ||||
|         "!If sequence" | ||||
|     ], | ||||
|     "typescript.preferences.importModuleSpecifier": "non-relative", | ||||
|     "typescript.preferences.importModuleSpecifierEnding": "index", | ||||
| @ -49,7 +48,9 @@ | ||||
|             "ignoreCase": false | ||||
|         } | ||||
|     ], | ||||
|     "go.testFlags": ["-count=1"], | ||||
|     "go.testFlags": [ | ||||
|         "-count=1" | ||||
|     ], | ||||
|     "github-actions.workflows.pinned.workflows": [ | ||||
|         ".github/workflows/ci-main.yml" | ||||
|     ] | ||||
|  | ||||
							
								
								
									
										62
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										62
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @ -2,67 +2,85 @@ | ||||
|     "version": "2.0.0", | ||||
|     "tasks": [ | ||||
|         { | ||||
|             "label": "authentik/core: make", | ||||
|             "label": "authentik[core]: format & test", | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "make", "lint-fix", "lint"], | ||||
|             "presentation": { | ||||
|                 "panel": "new" | ||||
|             }, | ||||
|             "group": "test" | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make" | ||||
|             ], | ||||
|             "group": "build", | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/core: run", | ||||
|             "label": "authentik[core]: run", | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "ak", "server"], | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make", | ||||
|                 "run", | ||||
|             ], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
|                 "group": "running" | ||||
|             } | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/web: make", | ||||
|             "label": "authentik[web]: format", | ||||
|             "command": "make", | ||||
|             "args": ["web"], | ||||
|             "group": "build" | ||||
|             "group": "build", | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/web: watch", | ||||
|             "label": "authentik[web]: watch", | ||||
|             "command": "make", | ||||
|             "args": ["web-watch"], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
|                 "group": "running" | ||||
|             } | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik: install", | ||||
|             "command": "make", | ||||
|             "args": ["install", "-j4"], | ||||
|             "group": "build" | ||||
|             "args": ["install"], | ||||
|             "group": "build", | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/website: make", | ||||
|             "label": "authentik: i18n-extract", | ||||
|             "command": "poetry", | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make", | ||||
|                 "i18n-extract" | ||||
|             ], | ||||
|             "group": "build", | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik[website]: format", | ||||
|             "command": "make", | ||||
|             "args": ["website"], | ||||
|             "group": "build" | ||||
|             "group": "build", | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/website: watch", | ||||
|             "label": "authentik[website]: watch", | ||||
|             "command": "make", | ||||
|             "args": ["website-watch"], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
|                 "group": "running" | ||||
|             } | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/api: generate", | ||||
|             "label": "authentik[api]: generate", | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "make", "gen"], | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make", | ||||
|                 "gen" | ||||
|             ], | ||||
|             "group": "build" | ||||
|         } | ||||
|         }, | ||||
|     ] | ||||
| } | ||||
|  | ||||
							
								
								
									
										36
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										36
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,7 @@ | ||||
| # syntax=docker/dockerfile:1 | ||||
|  | ||||
| # Stage 1: Build website | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:22 as website-builder | ||||
|  | ||||
| ENV NODE_ENV=production | ||||
|  | ||||
| @ -20,7 +20,7 @@ COPY ./SECURITY.md /work/ | ||||
| RUN npm run build-bundled | ||||
|  | ||||
| # Stage 2: Build webui | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:22 as web-builder | ||||
|  | ||||
| ARG GIT_BUILD_HASH | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
| @ -30,9 +30,12 @@ WORKDIR /work/web | ||||
|  | ||||
| RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | ||||
|     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ | ||||
|     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ | ||||
|     --mount=type=bind,target=/work/web/sfe/package.json,src=./web/sfe/package.json \ | ||||
|     --mount=type=bind,target=/work/web/sfe/package-lock.json,src=./web/sfe/package-lock.json \ | ||||
|     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ | ||||
|     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ | ||||
|     npm ci --include=dev && \ | ||||
|     cd sfe && \ | ||||
|     npm ci --include=dev | ||||
|  | ||||
| COPY ./package.json /work | ||||
| @ -40,10 +43,12 @@ COPY ./web /work/web/ | ||||
| COPY ./website /work/website/ | ||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||
|  | ||||
| RUN npm run build | ||||
| RUN npm run build && \ | ||||
|     cd sfe && \ | ||||
|     npm run build | ||||
|  | ||||
| # Stage 3: Build go proxy | ||||
| FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder | ||||
| FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.22-fips-bookworm AS go-builder | ||||
|  | ||||
| ARG TARGETOS | ||||
| ARG TARGETARCH | ||||
| @ -80,7 +85,7 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | ||||
|     go build -o /go/authentik ./cmd/server | ||||
|  | ||||
| # Stage 4: MaxMind GeoIP | ||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 AS geoip | ||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 as geoip | ||||
|  | ||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||
| ENV GEOIPUPDATE_VERBOSE="1" | ||||
| @ -94,10 +99,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||
|     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|  | ||||
| # Stage 5: Python dependencies | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.5-slim-bookworm-fips-full AS python-deps | ||||
|  | ||||
| ARG TARGETARCH | ||||
| ARG TARGETVARIANT | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS python-deps | ||||
|  | ||||
| WORKDIR /ak-root/poetry | ||||
|  | ||||
| @ -124,17 +126,17 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||
|     pip install --force-reinstall /wheels/*" | ||||
|  | ||||
| # Stage 6: Run | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.5-slim-bookworm-fips-full AS final-image | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS final-image | ||||
|  | ||||
| ARG VERSION | ||||
| ARG GIT_BUILD_HASH | ||||
| ARG VERSION | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
|  | ||||
| LABEL org.opencontainers.image.url=https://goauthentik.io | ||||
| LABEL org.opencontainers.image.description="goauthentik.io Main server image, see https://goauthentik.io for more info." | ||||
| LABEL org.opencontainers.image.source=https://github.com/goauthentik/authentik | ||||
| LABEL org.opencontainers.image.version=${VERSION} | ||||
| LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH} | ||||
| LABEL org.opencontainers.image.url https://goauthentik.io | ||||
| LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info. | ||||
| LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik | ||||
| LABEL org.opencontainers.image.version ${VERSION} | ||||
| LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH} | ||||
|  | ||||
| WORKDIR / | ||||
|  | ||||
|  | ||||
							
								
								
									
										5
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										5
									
								
								Makefile
									
									
									
									
									
								
							| @ -43,7 +43,7 @@ help:  ## Show this help | ||||
| 		sort | ||||
| 	@echo "" | ||||
|  | ||||
| go-test: | ||||
| test-go: | ||||
| 	go test -timeout 0 -v -race -cover ./... | ||||
|  | ||||
| test-docker:  ## Run all tests in a docker-compose | ||||
| @ -210,9 +210,6 @@ web: web-lint-fix web-lint web-check-compile  ## Automatically fix formatting is | ||||
| web-install:  ## Install the necessary libraries to build the Authentik UI | ||||
| 	cd web && npm ci | ||||
|  | ||||
| web-test: ## Run tests for the Authentik UI | ||||
| 	cd web && npm run test | ||||
|  | ||||
| web-watch:  ## Build and watch the Authentik UI for changes, updating automatically | ||||
| 	rm -rf web/dist/ | ||||
| 	mkdir web/dist/ | ||||
|  | ||||
| @ -15,9 +15,7 @@ | ||||
|  | ||||
| ## What is authentik? | ||||
|  | ||||
| authentik is an open-source Identity Provider that emphasizes flexibility and versatility, with support for a wide set of protocols. | ||||
|  | ||||
| Our [enterprise offer](https://goauthentik.io/pricing) can also be used as a self-hosted replacement for large-scale deployments of Okta/Auth0, Entra ID, Ping Identity, or other legacy IdPs for employees and B2B2C use. | ||||
| authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them. | ||||
|  | ||||
| ## Installation | ||||
|  | ||||
|  | ||||
| @ -2,7 +2,7 @@ | ||||
|  | ||||
| from os import environ | ||||
|  | ||||
| __version__ = "2024.8.6" | ||||
| __version__ = "2024.6.1" | ||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -73,7 +73,7 @@ class SystemInfoSerializer(PassiveSerializer): | ||||
|             "authentik_version": get_full_version(), | ||||
|             "environment": get_env(), | ||||
|             "openssl_fips_enabled": ( | ||||
|                 backend._fips_enabled if LicenseKey.get_total().status().is_valid else None | ||||
|                 backend._fips_enabled if LicenseKey.get_total().is_valid() else None | ||||
|             ), | ||||
|             "openssl_version": OPENSSL_VERSION, | ||||
|             "platform": platform.platform(), | ||||
|  | ||||
| @ -12,7 +12,6 @@ from rest_framework.views import APIView | ||||
| from authentik import __version__, get_build_hash | ||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.outposts.models import Outpost | ||||
|  | ||||
|  | ||||
| class VersionSerializer(PassiveSerializer): | ||||
| @ -23,7 +22,6 @@ class VersionSerializer(PassiveSerializer): | ||||
|     version_latest_valid = SerializerMethodField() | ||||
|     build_hash = SerializerMethodField() | ||||
|     outdated = SerializerMethodField() | ||||
|     outpost_outdated = SerializerMethodField() | ||||
|  | ||||
|     def get_build_hash(self, _) -> str: | ||||
|         """Get build hash, if version is not latest or released""" | ||||
| @ -49,15 +47,6 @@ class VersionSerializer(PassiveSerializer): | ||||
|         """Check if we're running the latest version""" | ||||
|         return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance)) | ||||
|  | ||||
|     def get_outpost_outdated(self, _) -> bool: | ||||
|         """Check if any outpost is outdated/has a version mismatch""" | ||||
|         any_outdated = False | ||||
|         for outpost in Outpost.objects.all(): | ||||
|             for state in outpost.state: | ||||
|                 if state.version_outdated: | ||||
|                     any_outdated = True | ||||
|         return any_outdated | ||||
|  | ||||
|  | ||||
| class VersionView(APIView): | ||||
|     """Get running and latest version.""" | ||||
|  | ||||
| @ -51,11 +51,9 @@ class BlueprintInstanceSerializer(ModelSerializer): | ||||
|         context = self.instance.context if self.instance else {} | ||||
|         valid, logs = Importer.from_string(content, context).validate() | ||||
|         if not valid: | ||||
|             text_logs = "\n".join([x["event"] for x in logs]) | ||||
|             raise ValidationError( | ||||
|                 [ | ||||
|                     _("Failed to validate blueprint"), | ||||
|                     *[f"- {x.event}" for x in logs], | ||||
|                 ] | ||||
|                 _("Failed to validate blueprint: {logs}".format_map({"logs": text_logs})) | ||||
|             ) | ||||
|         return content | ||||
|  | ||||
|  | ||||
| @ -23,11 +23,9 @@ class Command(BaseCommand): | ||||
|                 for blueprint_path in options.get("blueprints", []): | ||||
|                     content = BlueprintInstance(path=blueprint_path).retrieve() | ||||
|                     importer = Importer.from_string(content) | ||||
|                     valid, logs = importer.validate() | ||||
|                     valid, _ = importer.validate() | ||||
|                     if not valid: | ||||
|                         self.stderr.write("Blueprint invalid") | ||||
|                         for log in logs: | ||||
|                             self.stderr.write(f"\t{log.logger}: {log.event}: {log.attributes}") | ||||
|                         self.stderr.write("blueprint invalid") | ||||
|                         sys_exit(1) | ||||
|                     importer.apply() | ||||
|  | ||||
|  | ||||
| @ -113,19 +113,16 @@ class Command(BaseCommand): | ||||
|             ) | ||||
|             model_path = f"{model._meta.app_label}.{model._meta.model_name}" | ||||
|             self.schema["properties"]["entries"]["items"]["oneOf"].append( | ||||
|                 self.template_entry(model_path, model, serializer) | ||||
|                 self.template_entry(model_path, serializer) | ||||
|             ) | ||||
|  | ||||
|     def template_entry(self, model_path: str, model: type[Model], serializer: Serializer) -> dict: | ||||
|     def template_entry(self, model_path: str, serializer: Serializer) -> dict: | ||||
|         """Template entry for a single model""" | ||||
|         model_schema = self.to_jsonschema(serializer) | ||||
|         model_schema["required"] = [] | ||||
|         def_name = f"model_{model_path}" | ||||
|         def_path = f"#/$defs/{def_name}" | ||||
|         self.schema["$defs"][def_name] = model_schema | ||||
|         def_name_perm = f"model_{model_path}_permissions" | ||||
|         def_path_perm = f"#/$defs/{def_name_perm}" | ||||
|         self.schema["$defs"][def_name_perm] = self.model_permissions(model) | ||||
|         return { | ||||
|             "type": "object", | ||||
|             "required": ["model", "identifiers"], | ||||
| @ -138,7 +135,6 @@ class Command(BaseCommand): | ||||
|                     "default": "present", | ||||
|                 }, | ||||
|                 "conditions": {"type": "array", "items": {"type": "boolean"}}, | ||||
|                 "permissions": {"$ref": def_path_perm}, | ||||
|                 "attrs": {"$ref": def_path}, | ||||
|                 "identifiers": {"$ref": def_path}, | ||||
|             }, | ||||
| @ -189,20 +185,3 @@ class Command(BaseCommand): | ||||
|         if required: | ||||
|             result["required"] = required | ||||
|         return result | ||||
|  | ||||
|     def model_permissions(self, model: type[Model]) -> dict: | ||||
|         perms = [x[0] for x in model._meta.permissions] | ||||
|         for action in model._meta.default_permissions: | ||||
|             perms.append(f"{action}_{model._meta.model_name}") | ||||
|         return { | ||||
|             "type": "array", | ||||
|             "items": { | ||||
|                 "type": "object", | ||||
|                 "required": ["permission"], | ||||
|                 "properties": { | ||||
|                     "permission": {"type": "string", "enum": perms}, | ||||
|                     "user": {"type": "integer"}, | ||||
|                     "role": {"type": "string"}, | ||||
|                 }, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
| @ -1,24 +0,0 @@ | ||||
| version: 1 | ||||
| entries: | ||||
|   - model: authentik_core.user | ||||
|     id: user | ||||
|     identifiers: | ||||
|       username: "%(id)s" | ||||
|     attrs: | ||||
|       name: "%(id)s" | ||||
|   - model: authentik_rbac.role | ||||
|     id: role | ||||
|     identifiers: | ||||
|       name: "%(id)s" | ||||
|   - model: authentik_flows.flow | ||||
|     identifiers: | ||||
|       slug: "%(id)s" | ||||
|     attrs: | ||||
|       designation: authentication | ||||
|       name: foo | ||||
|       title: foo | ||||
|     permissions: | ||||
|       - permission: view_flow | ||||
|         user: !KeyOf user | ||||
|       - permission: view_flow | ||||
|         role: !KeyOf role | ||||
| @ -1,8 +0,0 @@ | ||||
| version: 1 | ||||
| entries: | ||||
|   - model: authentik_rbac.role | ||||
|     identifiers: | ||||
|       name: "%(id)s" | ||||
|     attrs: | ||||
|       permissions: | ||||
|         - authentik_blueprints.view_blueprintinstance | ||||
| @ -1,9 +0,0 @@ | ||||
| version: 1 | ||||
| entries: | ||||
|   - model: authentik_core.user | ||||
|     identifiers: | ||||
|       username: "%(id)s" | ||||
|     attrs: | ||||
|       name: "%(id)s" | ||||
|       permissions: | ||||
|         - authentik_blueprints.view_blueprintinstance | ||||
| @ -78,5 +78,5 @@ class TestBlueprintsV1API(APITestCase): | ||||
|         self.assertEqual(res.status_code, 400) | ||||
|         self.assertJSONEqual( | ||||
|             res.content.decode(), | ||||
|             {"content": ["Failed to validate blueprint", "- Invalid blueprint version"]}, | ||||
|             {"content": ["Failed to validate blueprint: Invalid blueprint version"]}, | ||||
|         ) | ||||
|  | ||||
| @ -1,57 +0,0 @@ | ||||
| """Test blueprints v1""" | ||||
|  | ||||
| from django.test import TransactionTestCase | ||||
| from guardian.shortcuts import get_perms | ||||
|  | ||||
| from authentik.blueprints.v1.importer import Importer | ||||
| from authentik.core.models import User | ||||
| from authentik.flows.models import Flow | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.lib.tests.utils import load_fixture | ||||
| from authentik.rbac.models import Role | ||||
|  | ||||
|  | ||||
| class TestBlueprintsV1RBAC(TransactionTestCase): | ||||
|     """Test Blueprints rbac attribute""" | ||||
|  | ||||
|     def test_user_permission(self): | ||||
|         """Test permissions""" | ||||
|         uid = generate_id() | ||||
|         import_yaml = load_fixture("fixtures/rbac_user.yaml", id=uid) | ||||
|  | ||||
|         importer = Importer.from_string(import_yaml) | ||||
|         self.assertTrue(importer.validate()[0]) | ||||
|         self.assertTrue(importer.apply()) | ||||
|         user = User.objects.filter(username=uid).first() | ||||
|         self.assertIsNotNone(user) | ||||
|         self.assertTrue(user.has_perms(["authentik_blueprints.view_blueprintinstance"])) | ||||
|  | ||||
|     def test_role_permission(self): | ||||
|         """Test permissions""" | ||||
|         uid = generate_id() | ||||
|         import_yaml = load_fixture("fixtures/rbac_role.yaml", id=uid) | ||||
|  | ||||
|         importer = Importer.from_string(import_yaml) | ||||
|         self.assertTrue(importer.validate()[0]) | ||||
|         self.assertTrue(importer.apply()) | ||||
|         role = Role.objects.filter(name=uid).first() | ||||
|         self.assertIsNotNone(role) | ||||
|         self.assertEqual( | ||||
|             list(role.group.permissions.all().values_list("codename", flat=True)), | ||||
|             ["view_blueprintinstance"], | ||||
|         ) | ||||
|  | ||||
|     def test_object_permission(self): | ||||
|         """Test permissions""" | ||||
|         uid = generate_id() | ||||
|         import_yaml = load_fixture("fixtures/rbac_object.yaml", id=uid) | ||||
|  | ||||
|         importer = Importer.from_string(import_yaml) | ||||
|         self.assertTrue(importer.validate()[0]) | ||||
|         self.assertTrue(importer.apply()) | ||||
|         flow = Flow.objects.filter(slug=uid).first() | ||||
|         user = User.objects.filter(username=uid).first() | ||||
|         role = Role.objects.filter(name=uid).first() | ||||
|         self.assertIsNotNone(flow) | ||||
|         self.assertEqual(get_perms(user, flow), ["view_flow"]) | ||||
|         self.assertEqual(get_perms(role.group, flow), ["view_flow"]) | ||||
| @ -1,7 +1,7 @@ | ||||
| """transfer common classes""" | ||||
|  | ||||
| from collections import OrderedDict | ||||
| from collections.abc import Generator, Iterable, Mapping | ||||
| from collections.abc import Iterable, Mapping | ||||
| from copy import copy | ||||
| from dataclasses import asdict, dataclass, field, is_dataclass | ||||
| from enum import Enum | ||||
| @ -58,15 +58,6 @@ class BlueprintEntryDesiredState(Enum): | ||||
|     MUST_CREATED = "must_created" | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class BlueprintEntryPermission: | ||||
|     """Describe object-level permissions""" | ||||
|  | ||||
|     permission: Union[str, "YAMLTag"] | ||||
|     user: Union[int, "YAMLTag", None] = field(default=None) | ||||
|     role: Union[str, "YAMLTag", None] = field(default=None) | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class BlueprintEntry: | ||||
|     """Single entry of a blueprint""" | ||||
| @ -78,7 +69,6 @@ class BlueprintEntry: | ||||
|     conditions: list[Any] = field(default_factory=list) | ||||
|     identifiers: dict[str, Any] = field(default_factory=dict) | ||||
|     attrs: dict[str, Any] | None = field(default_factory=dict) | ||||
|     permissions: list[BlueprintEntryPermission] = field(default_factory=list) | ||||
|  | ||||
|     id: str | None = None | ||||
|  | ||||
| @ -160,17 +150,6 @@ class BlueprintEntry: | ||||
|         """Get the blueprint model, with yaml tags resolved if present""" | ||||
|         return str(self.tag_resolver(self.model, blueprint)) | ||||
|  | ||||
|     def get_permissions( | ||||
|         self, blueprint: "Blueprint" | ||||
|     ) -> Generator[BlueprintEntryPermission, None, None]: | ||||
|         """Get permissions of this entry, with all yaml tags resolved""" | ||||
|         for perm in self.permissions: | ||||
|             yield BlueprintEntryPermission( | ||||
|                 permission=self.tag_resolver(perm.permission, blueprint), | ||||
|                 user=self.tag_resolver(perm.user, blueprint), | ||||
|                 role=self.tag_resolver(perm.role, blueprint), | ||||
|             ) | ||||
|  | ||||
|     def check_all_conditions_match(self, blueprint: "Blueprint") -> bool: | ||||
|         """Check all conditions of this entry match (evaluate to True)""" | ||||
|         return all(self.tag_resolver(self.conditions, blueprint)) | ||||
| @ -328,10 +307,7 @@ class Find(YAMLTag): | ||||
|         else: | ||||
|             model_name = self.model_name | ||||
|  | ||||
|         try: | ||||
|         model_class = apps.get_model(*model_name.split(".")) | ||||
|         except LookupError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|  | ||||
|         query = Q() | ||||
|         for cond in self.conditions: | ||||
|  | ||||
| @ -16,7 +16,6 @@ from django.db.models.query_utils import Q | ||||
| from django.db.transaction import atomic | ||||
| from django.db.utils import IntegrityError | ||||
| from guardian.models import UserObjectPermission | ||||
| from guardian.shortcuts import assign_perm | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.serializers import BaseSerializer, Serializer | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
| @ -33,11 +32,9 @@ from authentik.blueprints.v1.common import ( | ||||
| from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | ||||
| from authentik.core.models import ( | ||||
|     AuthenticatedSession, | ||||
|     GroupSourceConnection, | ||||
|     PropertyMapping, | ||||
|     Provider, | ||||
|     Source, | ||||
|     User, | ||||
|     UserSourceConnection, | ||||
| ) | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| @ -57,13 +54,11 @@ from authentik.events.utils import cleanse_dict | ||||
| from authentik.flows.models import FlowToken, Stage | ||||
| from authentik.lib.models import SerializerModel | ||||
| from authentik.lib.sentry import SentryIgnoredException | ||||
| from authentik.lib.utils.reflection import get_apps | ||||
| from authentik.outposts.models import OutpostServiceConnection | ||||
| from authentik.policies.models import Policy, PolicyBindingModel | ||||
| from authentik.policies.reputation.models import Reputation | ||||
| from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken | ||||
| from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser | ||||
| from authentik.rbac.models import Role | ||||
| from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser | ||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType | ||||
| from authentik.tenants.models import Tenant | ||||
| @ -92,7 +87,6 @@ def excluded_models() -> list[type[Model]]: | ||||
|         Source, | ||||
|         PropertyMapping, | ||||
|         UserSourceConnection, | ||||
|         GroupSourceConnection, | ||||
|         Stage, | ||||
|         OutpostServiceConnection, | ||||
|         Policy, | ||||
| @ -142,16 +136,6 @@ def transaction_rollback(): | ||||
|         pass | ||||
|  | ||||
|  | ||||
| def rbac_models() -> dict: | ||||
|     models = {} | ||||
|     for app in get_apps(): | ||||
|         for model in app.get_models(): | ||||
|             if not is_model_allowed(model): | ||||
|                 continue | ||||
|             models[model._meta.model_name] = app.label | ||||
|     return models | ||||
|  | ||||
|  | ||||
| class Importer: | ||||
|     """Import Blueprint from raw dict or YAML/JSON""" | ||||
|  | ||||
| @ -170,10 +154,7 @@ class Importer: | ||||
|  | ||||
|     def default_context(self): | ||||
|         """Default context""" | ||||
|         return { | ||||
|             "goauthentik.io/enterprise/licensed": LicenseKey.get_total().status().is_valid, | ||||
|             "goauthentik.io/rbac/models": rbac_models(), | ||||
|         } | ||||
|         return {"goauthentik.io/enterprise/licensed": LicenseKey.get_total().is_valid()} | ||||
|  | ||||
|     @staticmethod | ||||
|     def from_string(yaml_input: str, context: dict | None = None) -> "Importer": | ||||
| @ -233,17 +214,14 @@ class Importer: | ||||
|  | ||||
|         return main_query | sub_query | ||||
|  | ||||
|     def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None:  # noqa: PLR0915 | ||||
|     def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None: | ||||
|         """Validate a single entry""" | ||||
|         if not entry.check_all_conditions_match(self._import): | ||||
|             self.logger.debug("One or more conditions of this entry are not fulfilled, skipping") | ||||
|             return None | ||||
|  | ||||
|         model_app_label, model_name = entry.get_model(self._import).split(".") | ||||
|         try: | ||||
|         model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||
|         except LookupError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|         # Don't use isinstance since we don't want to check for inheritance | ||||
|         if not is_model_allowed(model): | ||||
|             raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry) | ||||
| @ -318,7 +296,10 @@ class Importer: | ||||
|         try: | ||||
|             full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import)) | ||||
|         except ValueError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|             raise EntryInvalidError.from_entry( | ||||
|                 exc, | ||||
|                 entry, | ||||
|             ) from exc | ||||
|         always_merger.merge(full_data, updated_identifiers) | ||||
|         serializer_kwargs["data"] = full_data | ||||
|  | ||||
| @ -339,15 +320,6 @@ class Importer: | ||||
|             ) from exc | ||||
|         return serializer | ||||
|  | ||||
|     def _apply_permissions(self, instance: Model, entry: BlueprintEntry): | ||||
|         """Apply object-level permissions for an entry""" | ||||
|         for perm in entry.get_permissions(self._import): | ||||
|             if perm.user is not None: | ||||
|                 assign_perm(perm.permission, User.objects.get(pk=perm.user), instance) | ||||
|             if perm.role is not None: | ||||
|                 role = Role.objects.get(pk=perm.role) | ||||
|                 role.assign_permission(perm.permission, obj=instance) | ||||
|  | ||||
|     def apply(self) -> bool: | ||||
|         """Apply (create/update) models yaml, in database transaction""" | ||||
|         try: | ||||
| @ -412,7 +384,6 @@ class Importer: | ||||
|                 if "pk" in entry.identifiers: | ||||
|                     self.__pk_map[entry.identifiers["pk"]] = instance.pk | ||||
|                 entry._state = BlueprintEntryState(instance) | ||||
|                 self._apply_permissions(instance, entry) | ||||
|             elif state == BlueprintEntryDesiredState.ABSENT: | ||||
|                 instance: Model | None = serializer.instance | ||||
|                 if instance.pk: | ||||
| @ -429,7 +400,7 @@ class Importer: | ||||
|         orig_import = deepcopy(self._import) | ||||
|         if self._import.version != 1: | ||||
|             self.logger.warning("Invalid blueprint version") | ||||
|             return False, [LogEvent("Invalid blueprint version", log_level="warning", logger=None)] | ||||
|             return False, [{"event": "Invalid blueprint version"}] | ||||
|         with ( | ||||
|             transaction_rollback(), | ||||
|             capture_logs() as logs, | ||||
|  | ||||
| @ -55,7 +55,6 @@ class BrandSerializer(ModelSerializer): | ||||
|             "flow_unenrollment", | ||||
|             "flow_user_settings", | ||||
|             "flow_device_code", | ||||
|             "default_application", | ||||
|             "web_certificate", | ||||
|             "attributes", | ||||
|         ] | ||||
|  | ||||
| @ -9,6 +9,3 @@ class AuthentikBrandsConfig(AppConfig): | ||||
|     name = "authentik.brands" | ||||
|     label = "authentik_brands" | ||||
|     verbose_name = "authentik Brands" | ||||
|     mountpoints = { | ||||
|         "authentik.brands.urls_root": "", | ||||
|     } | ||||
|  | ||||
| @ -1,26 +0,0 @@ | ||||
| # Generated by Django 5.0.6 on 2024-07-04 20:32 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_brands", "0006_brand_authentik_b_domain_b9b24a_idx_and_more"), | ||||
|         ("authentik_core", "0035_alter_group_options_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="brand", | ||||
|             name="default_application", | ||||
|             field=models.ForeignKey( | ||||
|                 default=None, | ||||
|                 help_text="When set, external users will be redirected to this application after authenticating.", | ||||
|                 null=True, | ||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, | ||||
|                 to="authentik_core.application", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -3,7 +3,6 @@ | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from django.db import models | ||||
| from django.http import HttpRequest | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from rest_framework.serializers import Serializer | ||||
| from structlog.stdlib import get_logger | ||||
| @ -52,16 +51,6 @@ class Brand(SerializerModel): | ||||
|         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_device_code" | ||||
|     ) | ||||
|  | ||||
|     default_application = models.ForeignKey( | ||||
|         "authentik_core.Application", | ||||
|         null=True, | ||||
|         default=None, | ||||
|         on_delete=models.SET_DEFAULT, | ||||
|         help_text=_( | ||||
|             "When set, external users will be redirected to this application after authenticating." | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     web_certificate = models.ForeignKey( | ||||
|         CertificateKeyPair, | ||||
|         null=True, | ||||
| @ -99,13 +88,3 @@ class Brand(SerializerModel): | ||||
|             models.Index(fields=["domain"]), | ||||
|             models.Index(fields=["default"]), | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class WebfingerProvider(models.Model): | ||||
|     """Provider which supports webfinger discovery""" | ||||
|  | ||||
|     class Meta: | ||||
|         abstract = True | ||||
|  | ||||
|     def webfinger(self, resource: str, request: HttpRequest) -> dict: | ||||
|         raise NotImplementedError() | ||||
|  | ||||
| @ -5,11 +5,7 @@ from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.brands.api import Themes | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_brand | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.providers.oauth2.models import OAuth2Provider | ||||
| from authentik.providers.saml.models import SAMLProvider | ||||
|  | ||||
|  | ||||
| class TestBrands(APITestCase): | ||||
| @ -79,45 +75,3 @@ class TestBrands(APITestCase): | ||||
|             reverse("authentik_api:brand-list"), data={"domain": "bar", "default": True} | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 400) | ||||
|  | ||||
|     def test_webfinger_no_app(self): | ||||
|         """Test Webfinger""" | ||||
|         create_test_brand() | ||||
|         self.assertJSONEqual( | ||||
|             self.client.get(reverse("authentik_brands:webfinger")).content.decode(), {} | ||||
|         ) | ||||
|  | ||||
|     def test_webfinger_not_supported(self): | ||||
|         """Test Webfinger""" | ||||
|         brand = create_test_brand() | ||||
|         provider = SAMLProvider.objects.create( | ||||
|             name=generate_id(), | ||||
|         ) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id(), provider=provider) | ||||
|         brand.default_application = app | ||||
|         brand.save() | ||||
|         self.assertJSONEqual( | ||||
|             self.client.get(reverse("authentik_brands:webfinger")).content.decode(), {} | ||||
|         ) | ||||
|  | ||||
|     def test_webfinger_oidc(self): | ||||
|         """Test Webfinger""" | ||||
|         brand = create_test_brand() | ||||
|         provider = OAuth2Provider.objects.create( | ||||
|             name=generate_id(), | ||||
|         ) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id(), provider=provider) | ||||
|         brand.default_application = app | ||||
|         brand.save() | ||||
|         self.assertJSONEqual( | ||||
|             self.client.get(reverse("authentik_brands:webfinger")).content.decode(), | ||||
|             { | ||||
|                 "links": [ | ||||
|                     { | ||||
|                         "href": f"http://testserver/application/o/{app.slug}/", | ||||
|                         "rel": "http://openid.net/specs/connect/1.0/issuer", | ||||
|                     } | ||||
|                 ], | ||||
|                 "subject": None, | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
| @ -1,9 +0,0 @@ | ||||
| """authentik brand root URLs""" | ||||
|  | ||||
| from django.urls import path | ||||
|  | ||||
| from authentik.brands.views.webfinger import WebFingerView | ||||
|  | ||||
| urlpatterns = [ | ||||
|     path(".well-known/webfinger", WebFingerView.as_view(), name="webfinger"), | ||||
| ] | ||||
| @ -5,7 +5,7 @@ from typing import Any | ||||
| from django.db.models import F, Q | ||||
| from django.db.models import Value as V | ||||
| from django.http.request import HttpRequest | ||||
| from sentry_sdk import get_current_span | ||||
| from sentry_sdk.hub import Hub | ||||
|  | ||||
| from authentik import get_full_version | ||||
| from authentik.brands.models import Brand | ||||
| @ -33,7 +33,7 @@ def context_processor(request: HttpRequest) -> dict[str, Any]: | ||||
|     brand = getattr(request, "brand", DEFAULT_BRAND) | ||||
|     tenant = getattr(request, "tenant", Tenant()) | ||||
|     trace = "" | ||||
|     span = get_current_span() | ||||
|     span = Hub.current.scope.span | ||||
|     if span: | ||||
|         trace = span.to_traceparent() | ||||
|     return { | ||||
|  | ||||
| @ -1,29 +0,0 @@ | ||||
| from typing import Any | ||||
|  | ||||
| from django.http import HttpRequest, HttpResponse, JsonResponse | ||||
| from django.views import View | ||||
|  | ||||
| from authentik.brands.models import Brand, WebfingerProvider | ||||
| from authentik.core.models import Application | ||||
|  | ||||
|  | ||||
| class WebFingerView(View): | ||||
|     """Webfinger endpoint""" | ||||
|  | ||||
|     def get(self, request: HttpRequest) -> HttpResponse: | ||||
|         brand: Brand = request.brand | ||||
|         if not brand.default_application: | ||||
|             return JsonResponse({}) | ||||
|         application: Application = brand.default_application | ||||
|         provider = application.get_provider() | ||||
|         if not provider or not isinstance(provider, WebfingerProvider): | ||||
|             return JsonResponse({}) | ||||
|         webfinger_data = provider.webfinger(request.GET.get("resource"), request) | ||||
|         return JsonResponse(webfinger_data) | ||||
|  | ||||
|     def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse: | ||||
|         response = super().dispatch(request, *args, **kwargs) | ||||
|         # RFC7033 spec | ||||
|         response["Access-Control-Allow-Origin"] = "*" | ||||
|         response["Content-Type"] = "application/jrd+json" | ||||
|         return response | ||||
| @ -103,12 +103,7 @@ class ApplicationSerializer(ModelSerializer): | ||||
| class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|     """Application Viewset""" | ||||
|  | ||||
|     queryset = ( | ||||
|         Application.objects.all() | ||||
|         .with_provider() | ||||
|         .prefetch_related("policies") | ||||
|         .prefetch_related("backchannel_providers") | ||||
|     ) | ||||
|     queryset = Application.objects.all().prefetch_related("provider").prefetch_related("policies") | ||||
|     serializer_class = ApplicationSerializer | ||||
|     search_fields = [ | ||||
|         "name", | ||||
| @ -152,15 +147,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|                 applications.append(application) | ||||
|         return applications | ||||
|  | ||||
|     def _filter_applications_with_launch_url( | ||||
|         self, pagined_apps: Iterator[Application] | ||||
|     ) -> list[Application]: | ||||
|         applications = [] | ||||
|         for app in pagined_apps: | ||||
|             if app.get_launch_url(): | ||||
|                 applications.append(app) | ||||
|         return applications | ||||
|  | ||||
|     @extend_schema( | ||||
|         parameters=[ | ||||
|             OpenApiParameter( | ||||
| @ -218,11 +204,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|                 location=OpenApiParameter.QUERY, | ||||
|                 type=OpenApiTypes.INT, | ||||
|             ), | ||||
|             OpenApiParameter( | ||||
|                 name="only_with_launch_url", | ||||
|                 location=OpenApiParameter.QUERY, | ||||
|                 type=OpenApiTypes.BOOL, | ||||
|             ), | ||||
|         ] | ||||
|     ) | ||||
|     def list(self, request: Request) -> Response: | ||||
| @ -235,10 +216,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|         if superuser_full_list and request.user.is_superuser: | ||||
|             return super().list(request) | ||||
|  | ||||
|         only_with_launch_url = str( | ||||
|             request.query_params.get("only_with_launch_url", "false") | ||||
|         ).lower() | ||||
|  | ||||
|         queryset = self._filter_queryset_for_list(self.get_queryset()) | ||||
|         paginator: Pagination = self.paginator | ||||
|         paginated_apps = paginator.paginate_queryset(queryset, request) | ||||
| @ -274,10 +251,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|                     allowed_applications, | ||||
|                     timeout=86400, | ||||
|                 ) | ||||
|  | ||||
|         if only_with_launch_url == "true": | ||||
|             allowed_applications = self._filter_applications_with_launch_url(allowed_applications) | ||||
|  | ||||
|         serializer = self.get_serializer(allowed_applications, many=True) | ||||
|         return self.get_paginated_response(serializer.data) | ||||
|  | ||||
|  | ||||
| @ -2,13 +2,7 @@ | ||||
|  | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import OpenApiParameter, extend_schema | ||||
| from rest_framework.fields import ( | ||||
|     BooleanField, | ||||
|     CharField, | ||||
|     DateTimeField, | ||||
|     IntegerField, | ||||
|     SerializerMethodField, | ||||
| ) | ||||
| from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField | ||||
| from rest_framework.permissions import IsAdminUser, IsAuthenticated | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| @ -26,9 +20,6 @@ class DeviceSerializer(MetaNameSerializer): | ||||
|     name = CharField() | ||||
|     type = SerializerMethodField() | ||||
|     confirmed = BooleanField() | ||||
|     created = DateTimeField(read_only=True) | ||||
|     last_updated = DateTimeField(read_only=True) | ||||
|     last_used = DateTimeField(read_only=True, allow_null=True) | ||||
|  | ||||
|     def get_type(self, instance: Device) -> str: | ||||
|         """Get type of device""" | ||||
|  | ||||
| @ -2,15 +2,8 @@ | ||||
|  | ||||
| from json import dumps | ||||
|  | ||||
| from django_filters.filters import AllValuesMultipleFilter, BooleanFilter | ||||
| from django_filters.filterset import FilterSet | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import ( | ||||
|     OpenApiParameter, | ||||
|     OpenApiResponse, | ||||
|     extend_schema, | ||||
|     extend_schema_field, | ||||
| ) | ||||
| from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | ||||
| from guardian.shortcuts import get_objects_for_user | ||||
| from rest_framework import mixins | ||||
| from rest_framework.decorators import action | ||||
| @ -30,10 +23,8 @@ from authentik.core.api.utils import ( | ||||
|     PassiveSerializer, | ||||
| ) | ||||
| from authentik.core.expression.evaluator import PropertyMappingEvaluator | ||||
| from authentik.core.expression.exceptions import PropertyMappingExpressionException | ||||
| from authentik.core.models import Group, PropertyMapping, User | ||||
| from authentik.events.utils import sanitize_item | ||||
| from authentik.lib.utils.errors import exception_to_string | ||||
| from authentik.policies.api.exec import PolicyTestSerializer | ||||
| from authentik.rbac.decorators import permission_required | ||||
|  | ||||
| @ -76,18 +67,6 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class PropertyMappingFilterSet(FilterSet): | ||||
|     """Filter for PropertyMapping""" | ||||
|  | ||||
|     managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed")) | ||||
|  | ||||
|     managed__isnull = BooleanFilter(field_name="managed", lookup_expr="isnull") | ||||
|  | ||||
|     class Meta: | ||||
|         model = PropertyMapping | ||||
|         fields = ["name", "managed"] | ||||
|  | ||||
|  | ||||
| class PropertyMappingViewSet( | ||||
|     TypesMixin, | ||||
|     mixins.RetrieveModelMixin, | ||||
| @ -108,9 +87,11 @@ class PropertyMappingViewSet( | ||||
|  | ||||
|     queryset = PropertyMapping.objects.select_subclasses() | ||||
|     serializer_class = PropertyMappingSerializer | ||||
|     filterset_class = PropertyMappingFilterSet | ||||
|     search_fields = [ | ||||
|         "name", | ||||
|     ] | ||||
|     filterset_fields = {"managed": ["isnull"]} | ||||
|     ordering = ["name"] | ||||
|     search_fields = ["name"] | ||||
|  | ||||
|     @permission_required("authentik_core.view_propertymapping") | ||||
|     @extend_schema( | ||||
| @ -164,15 +145,12 @@ class PropertyMappingViewSet( | ||||
|  | ||||
|         response_data = {"successful": True, "result": ""} | ||||
|         try: | ||||
|             result = mapping.evaluate(dry_run=True, **context) | ||||
|             result = mapping.evaluate(**context) | ||||
|             response_data["result"] = dumps( | ||||
|                 sanitize_item(result), indent=(4 if format_result else None) | ||||
|             ) | ||||
|         except PropertyMappingExpressionException as exc: | ||||
|             response_data["result"] = exception_to_string(exc.exc) | ||||
|             response_data["successful"] = False | ||||
|         except Exception as exc: | ||||
|             response_data["result"] = exception_to_string(exc) | ||||
|             response_data["result"] = str(exc) | ||||
|             response_data["successful"] = False | ||||
|         response = PropertyMappingTestResultSerializer(response_data) | ||||
|         return Response(response.data) | ||||
|  | ||||
| @ -19,7 +19,7 @@ from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.core.api.object_types import TypesMixin | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import MetaNameSerializer, ModelSerializer | ||||
| from authentik.core.models import GroupSourceConnection, Source, UserSourceConnection | ||||
| from authentik.core.models import Source, UserSourceConnection | ||||
| from authentik.core.types import UserSettingSerializer | ||||
| from authentik.lib.utils.file import ( | ||||
|     FilePathSerializer, | ||||
| @ -60,8 +60,6 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer): | ||||
|             "enabled", | ||||
|             "authentication_flow", | ||||
|             "enrollment_flow", | ||||
|             "user_property_mappings", | ||||
|             "group_property_mappings", | ||||
|             "component", | ||||
|             "verbose_name", | ||||
|             "verbose_name_plural", | ||||
| @ -190,47 +188,6 @@ class UserSourceConnectionViewSet( | ||||
|     queryset = UserSourceConnection.objects.all() | ||||
|     serializer_class = UserSourceConnectionSerializer | ||||
|     permission_classes = [OwnerSuperuserPermissions] | ||||
|     filterset_fields = ["user", "source__slug"] | ||||
|     search_fields = ["source__slug"] | ||||
|     filterset_fields = ["user"] | ||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|     ordering = ["source__slug", "pk"] | ||||
|  | ||||
|  | ||||
| class GroupSourceConnectionSerializer(SourceSerializer): | ||||
|     """Group Source Connection Serializer""" | ||||
|  | ||||
|     source = SourceSerializer(read_only=True) | ||||
|  | ||||
|     class Meta: | ||||
|         model = GroupSourceConnection | ||||
|         fields = [ | ||||
|             "pk", | ||||
|             "group", | ||||
|             "source", | ||||
|             "identifier", | ||||
|             "created", | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
|             "group": {"read_only": True}, | ||||
|             "identifier": {"read_only": True}, | ||||
|             "created": {"read_only": True}, | ||||
|         } | ||||
|  | ||||
|  | ||||
| class GroupSourceConnectionViewSet( | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.UpdateModelMixin, | ||||
|     mixins.DestroyModelMixin, | ||||
|     UsedByMixin, | ||||
|     mixins.ListModelMixin, | ||||
|     GenericViewSet, | ||||
| ): | ||||
|     """Group-source connection Viewset""" | ||||
|  | ||||
|     queryset = GroupSourceConnection.objects.all() | ||||
|     serializer_class = GroupSourceConnectionSerializer | ||||
|     permission_classes = [OwnerSuperuserPermissions] | ||||
|     filterset_fields = ["group", "source__slug"] | ||||
|     search_fields = ["source__slug"] | ||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|     ordering = ["source__slug", "pk"] | ||||
|     ordering = ["pk"] | ||||
|  | ||||
| @ -14,7 +14,6 @@ from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
|  | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.rbac.filters import ObjectFilter | ||||
|  | ||||
|  | ||||
| class DeleteAction(Enum): | ||||
| @ -54,7 +53,7 @@ class UsedByMixin: | ||||
|     @extend_schema( | ||||
|         responses={200: UsedBySerializer(many=True)}, | ||||
|     ) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[ObjectFilter]) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     def used_by(self, request: Request, *args, **kwargs) -> Response: | ||||
|         """Get a list of all objects that use this object""" | ||||
|         model: Model = self.get_object() | ||||
|  | ||||
| @ -5,7 +5,6 @@ from json import loads | ||||
| from typing import Any | ||||
|  | ||||
| from django.contrib.auth import update_session_auth_hash | ||||
| from django.contrib.auth.models import Permission | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.core.cache import cache | ||||
| from django.db.models.functions import ExtractHour | ||||
| @ -34,21 +33,15 @@ from drf_spectacular.utils import ( | ||||
| ) | ||||
| from guardian.shortcuts import get_objects_for_user | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.fields import ( | ||||
|     BooleanField, | ||||
|     CharField, | ||||
|     ChoiceField, | ||||
|     DateTimeField, | ||||
|     IntegerField, | ||||
|     ListField, | ||||
|     SerializerMethodField, | ||||
| ) | ||||
| from rest_framework.fields import CharField, IntegerField, ListField, SerializerMethodField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ( | ||||
|     BooleanField, | ||||
|     DateTimeField, | ||||
|     ListSerializer, | ||||
|     PrimaryKeyRelatedField, | ||||
|     ValidationError, | ||||
| ) | ||||
| from rest_framework.validators import UniqueValidator | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
| @ -85,7 +78,6 @@ from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner | ||||
| from authentik.flows.views.executor import QS_KEY_TOKEN | ||||
| from authentik.lib.avatars import get_avatar | ||||
| from authentik.rbac.decorators import permission_required | ||||
| from authentik.rbac.models import get_permission_choices | ||||
| from authentik.stages.email.models import EmailStage | ||||
| from authentik.stages.email.tasks import send_mails | ||||
| from authentik.stages.email.utils import TemplateEmailMessage | ||||
| @ -149,19 +141,12 @@ class UserSerializer(ModelSerializer): | ||||
|         super().__init__(*args, **kwargs) | ||||
|         if SERIALIZER_CONTEXT_BLUEPRINT in self.context: | ||||
|             self.fields["password"] = CharField(required=False, allow_null=True) | ||||
|             self.fields["permissions"] = ListField( | ||||
|                 required=False, child=ChoiceField(choices=get_permission_choices()) | ||||
|             ) | ||||
|  | ||||
|     def create(self, validated_data: dict) -> User: | ||||
|         """If this serializer is used in the blueprint context, we allow for | ||||
|         directly setting a password. However should be done via the `set_password` | ||||
|         method instead of directly setting it like rest_framework.""" | ||||
|         password = validated_data.pop("password", None) | ||||
|         permissions = Permission.objects.filter( | ||||
|             codename__in=[x.split(".")[1] for x in validated_data.pop("permissions", [])] | ||||
|         ) | ||||
|         validated_data["user_permissions"] = permissions | ||||
|         instance: User = super().create(validated_data) | ||||
|         self._set_password(instance, password) | ||||
|         return instance | ||||
| @ -170,10 +155,6 @@ class UserSerializer(ModelSerializer): | ||||
|         """Same as `create` above, set the password directly if we're in a blueprint | ||||
|         context""" | ||||
|         password = validated_data.pop("password", None) | ||||
|         permissions = Permission.objects.filter( | ||||
|             codename__in=[x.split(".")[1] for x in validated_data.pop("permissions", [])] | ||||
|         ) | ||||
|         validated_data["user_permissions"] = permissions | ||||
|         instance = super().update(instance, validated_data) | ||||
|         self._set_password(instance, password) | ||||
|         return instance | ||||
| @ -678,13 +659,10 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|         if not request.tenant.impersonation: | ||||
|             LOGGER.debug("User attempted to impersonate", user=request.user) | ||||
|             return Response(status=401) | ||||
|         user_to_be = self.get_object() | ||||
|         # Check both object-level perms and global perms | ||||
|         if not request.user.has_perm( | ||||
|             "authentik_core.impersonate", user_to_be | ||||
|         ) and not request.user.has_perm("authentik_core.impersonate"): | ||||
|         if not request.user.has_perm("impersonate"): | ||||
|             LOGGER.debug("User attempted to impersonate without permissions", user=request.user) | ||||
|             return Response(status=401) | ||||
|         user_to_be = self.get_object() | ||||
|         if user_to_be.pk == self.request.user.pk: | ||||
|             LOGGER.debug("User attempted to impersonate themselves", user=request.user) | ||||
|             return Response(status=401) | ||||
|  | ||||
| @ -1,32 +0,0 @@ | ||||
| """Change user type""" | ||||
|  | ||||
| from authentik.core.models import User, UserTypes | ||||
| from authentik.tenants.management import TenantCommand | ||||
|  | ||||
|  | ||||
| class Command(TenantCommand): | ||||
|     """Change user type""" | ||||
|  | ||||
|     def add_arguments(self, parser): | ||||
|         parser.add_argument("--type", type=str, required=True) | ||||
|         parser.add_argument("--all", action="store_true", default=False) | ||||
|         parser.add_argument("usernames", nargs="*", type=str) | ||||
|  | ||||
|     def handle_per_tenant(self, **options): | ||||
|         print(options) | ||||
|         new_type = UserTypes(options["type"]) | ||||
|         qs = ( | ||||
|             User.objects.exclude_anonymous() | ||||
|             .exclude(type=UserTypes.SERVICE_ACCOUNT) | ||||
|             .exclude(type=UserTypes.INTERNAL_SERVICE_ACCOUNT) | ||||
|         ) | ||||
|         if options["usernames"] and options["all"]: | ||||
|             self.stderr.write("--all and usernames specified, only one can be specified") | ||||
|             return | ||||
|         if not options["usernames"] and not options["all"]: | ||||
|             self.stderr.write("--all or usernames must be specified") | ||||
|             return | ||||
|         if options["usernames"] and not options["all"]: | ||||
|             qs = qs.filter(username__in=options["usernames"]) | ||||
|         updated = qs.update(type=new_type) | ||||
|         self.stdout.write(f"Updated {updated} users.") | ||||
| @ -1,43 +0,0 @@ | ||||
| # Generated by Django 5.0.2 on 2024-02-29 11:05 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0035_alter_group_options_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="source", | ||||
|             name="group_property_mappings", | ||||
|             field=models.ManyToManyField( | ||||
|                 blank=True, | ||||
|                 default=None, | ||||
|                 related_name="source_grouppropertymappings_set", | ||||
|                 to="authentik_core.propertymapping", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="source", | ||||
|             name="user_property_mappings", | ||||
|             field=models.ManyToManyField( | ||||
|                 blank=True, | ||||
|                 default=None, | ||||
|                 related_name="source_userpropertymappings_set", | ||||
|                 to="authentik_core.propertymapping", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="source", | ||||
|             name="property_mappings", | ||||
|             field=models.ManyToManyField( | ||||
|                 blank=True, | ||||
|                 default=None, | ||||
|                 related_name="source_set", | ||||
|                 to="authentik_core.propertymapping", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,18 +0,0 @@ | ||||
| # Generated by Django 5.0.2 on 2024-02-29 11:21 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_sources_ldap", "0005_remove_ldappropertymapping_object_field_and_more"), | ||||
|         ("authentik_core", "0036_source_group_property_mappings_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name="source", | ||||
|             name="property_mappings", | ||||
|         ), | ||||
|     ] | ||||
| @ -1,19 +0,0 @@ | ||||
| # Generated by Django 5.0.7 on 2024-07-22 13:32 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0037_remove_source_property_mappings"), | ||||
|         ("authentik_flows", "0027_auto_20231028_1424"), | ||||
|         ("authentik_policies", "0011_policybinding_failure_result_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddIndex( | ||||
|             model_name="source", | ||||
|             index=models.Index(fields=["enabled"], name="authentik_c_enabled_d72365_idx"), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,67 +0,0 @@ | ||||
| # Generated by Django 5.0.7 on 2024-08-01 18:52 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0038_source_authentik_c_enabled_d72365_idx"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="source", | ||||
|             name="group_matching_mode", | ||||
|             field=models.TextField( | ||||
|                 choices=[ | ||||
|                     ("identifier", "Use the source-specific identifier"), | ||||
|                     ( | ||||
|                         "name_link", | ||||
|                         "Link to a group with identical name. Can have security implications when a group name is used with another source.", | ||||
|                     ), | ||||
|                     ( | ||||
|                         "name_deny", | ||||
|                         "Use the group name, but deny enrollment when the name already exists.", | ||||
|                     ), | ||||
|                 ], | ||||
|                 default="identifier", | ||||
|                 help_text="How the source determines if an existing group should be used or a new group created.", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="group", | ||||
|             name="name", | ||||
|             field=models.TextField(verbose_name="name"), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="GroupSourceConnection", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         auto_created=True, primary_key=True, serialize=False, verbose_name="ID" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("created", models.DateTimeField(auto_now_add=True)), | ||||
|                 ("last_updated", models.DateTimeField(auto_now=True)), | ||||
|                 ("identifier", models.TextField()), | ||||
|                 ( | ||||
|                     "group", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="authentik_core.group" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "source", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="authentik_core.source" | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "unique_together": {("group", "source")}, | ||||
|             }, | ||||
|         ), | ||||
|     ] | ||||
| @ -11,7 +11,6 @@ from django.contrib.auth.models import AbstractUser | ||||
| from django.contrib.auth.models import UserManager as DjangoUserManager | ||||
| from django.db import models | ||||
| from django.db.models import Q, QuerySet, options | ||||
| from django.db.models.constants import LOOKUP_SEP | ||||
| from django.http import HttpRequest | ||||
| from django.utils.functional import SimpleLazyObject, cached_property | ||||
| from django.utils.timezone import now | ||||
| @ -29,7 +28,6 @@ from authentik.core.types import UILoginButton, UserSettingSerializer | ||||
| from authentik.lib.avatars import get_avatar | ||||
| from authentik.lib.expression.exceptions import ControlFlowException | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.lib.merge import MERGE_LIST_UNIQUE | ||||
| from authentik.lib.models import ( | ||||
|     CreatedUpdatedModel, | ||||
|     DomainlessFormattedURLValidator, | ||||
| @ -102,38 +100,6 @@ class UserTypes(models.TextChoices): | ||||
|     INTERNAL_SERVICE_ACCOUNT = "internal_service_account" | ||||
|  | ||||
|  | ||||
| class AttributesMixin(models.Model): | ||||
|     """Adds an attributes property to a model""" | ||||
|  | ||||
|     attributes = models.JSONField(default=dict, blank=True) | ||||
|  | ||||
|     class Meta: | ||||
|         abstract = True | ||||
|  | ||||
|     def update_attributes(self, properties: dict[str, Any]): | ||||
|         """Update fields and attributes, but correctly by merging dicts""" | ||||
|         for key, value in properties.items(): | ||||
|             if key == "attributes": | ||||
|                 continue | ||||
|             setattr(self, key, value) | ||||
|         final_attributes = {} | ||||
|         MERGE_LIST_UNIQUE.merge(final_attributes, self.attributes) | ||||
|         MERGE_LIST_UNIQUE.merge(final_attributes, properties.get("attributes", {})) | ||||
|         self.attributes = final_attributes | ||||
|         self.save() | ||||
|  | ||||
|     @classmethod | ||||
|     def update_or_create_attributes( | ||||
|         cls, query: dict[str, Any], properties: dict[str, Any] | ||||
|     ) -> tuple[models.Model, bool]: | ||||
|         """Same as django's update_or_create but correctly updates attributes by merging dicts""" | ||||
|         instance = cls.objects.filter(**query).first() | ||||
|         if not instance: | ||||
|             return cls.objects.create(**properties), True | ||||
|         instance.update_attributes(properties) | ||||
|         return instance, False | ||||
|  | ||||
|  | ||||
| class GroupQuerySet(CTEQuerySet): | ||||
|     def with_children_recursive(self): | ||||
|         """Recursively get all groups that have the current queryset as parents | ||||
| @ -168,12 +134,12 @@ class GroupQuerySet(CTEQuerySet): | ||||
|         return cte.join(Group, group_uuid=cte.col.group_uuid).with_cte(cte) | ||||
|  | ||||
|  | ||||
| class Group(SerializerModel, AttributesMixin): | ||||
| class Group(SerializerModel): | ||||
|     """Group model which supports a basic hierarchy and has attributes""" | ||||
|  | ||||
|     group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4) | ||||
|  | ||||
|     name = models.TextField(_("name")) | ||||
|     name = models.CharField(_("name"), max_length=80) | ||||
|     is_superuser = models.BooleanField( | ||||
|         default=False, help_text=_("Users added to this group will be superusers.") | ||||
|     ) | ||||
| @ -188,27 +154,10 @@ class Group(SerializerModel, AttributesMixin): | ||||
|         on_delete=models.SET_NULL, | ||||
|         related_name="children", | ||||
|     ) | ||||
|     attributes = models.JSONField(default=dict, blank=True) | ||||
|  | ||||
|     objects = GroupQuerySet.as_manager() | ||||
|  | ||||
|     class Meta: | ||||
|         unique_together = ( | ||||
|             ( | ||||
|                 "name", | ||||
|                 "parent", | ||||
|             ), | ||||
|         ) | ||||
|         indexes = [models.Index(fields=["name"])] | ||||
|         verbose_name = _("Group") | ||||
|         verbose_name_plural = _("Groups") | ||||
|         permissions = [ | ||||
|             ("add_user_to_group", _("Add user to group")), | ||||
|             ("remove_user_from_group", _("Remove user from group")), | ||||
|         ] | ||||
|  | ||||
|     def __str__(self): | ||||
|         return f"Group {self.name}" | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> Serializer: | ||||
|         from authentik.core.api.groups import GroupSerializer | ||||
| @ -233,6 +182,24 @@ class Group(SerializerModel, AttributesMixin): | ||||
|             qs = Group.objects.filter(group_uuid=self.group_uuid) | ||||
|         return qs.with_children_recursive() | ||||
|  | ||||
|     def __str__(self): | ||||
|         return f"Group {self.name}" | ||||
|  | ||||
|     class Meta: | ||||
|         unique_together = ( | ||||
|             ( | ||||
|                 "name", | ||||
|                 "parent", | ||||
|             ), | ||||
|         ) | ||||
|         indexes = [models.Index(fields=["name"])] | ||||
|         verbose_name = _("Group") | ||||
|         verbose_name_plural = _("Groups") | ||||
|         permissions = [ | ||||
|             ("add_user_to_group", _("Add user to group")), | ||||
|             ("remove_user_from_group", _("Remove user from group")), | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class UserQuerySet(models.QuerySet): | ||||
|     """User queryset""" | ||||
| @ -258,7 +225,7 @@ class UserManager(DjangoUserManager): | ||||
|         return self.get_queryset().exclude_anonymous() | ||||
|  | ||||
|  | ||||
| class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser): | ||||
| class User(SerializerModel, GuardianUserMixin, AbstractUser): | ||||
|     """authentik User model, based on django's contrib auth user model.""" | ||||
|  | ||||
|     uuid = models.UUIDField(default=uuid4, editable=False, unique=True) | ||||
| @ -270,30 +237,10 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser): | ||||
|     ak_groups = models.ManyToManyField("Group", related_name="users") | ||||
|     password_change_date = models.DateTimeField(auto_now_add=True) | ||||
|  | ||||
|     attributes = models.JSONField(default=dict, blank=True) | ||||
|  | ||||
|     objects = UserManager() | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("User") | ||||
|         verbose_name_plural = _("Users") | ||||
|         permissions = [ | ||||
|             ("reset_user_password", _("Reset Password")), | ||||
|             ("impersonate", _("Can impersonate other users")), | ||||
|             ("assign_user_permissions", _("Can assign permissions to users")), | ||||
|             ("unassign_user_permissions", _("Can unassign permissions from users")), | ||||
|             ("preview_user", _("Can preview user data sent to providers")), | ||||
|             ("view_user_applications", _("View applications the user has access to")), | ||||
|         ] | ||||
|         indexes = [ | ||||
|             models.Index(fields=["last_login"]), | ||||
|             models.Index(fields=["password_change_date"]), | ||||
|             models.Index(fields=["uuid"]), | ||||
|             models.Index(fields=["path"]), | ||||
|             models.Index(fields=["type"]), | ||||
|         ] | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.username | ||||
|  | ||||
|     @staticmethod | ||||
|     def default_path() -> str: | ||||
|         """Get the default user path""" | ||||
| @ -375,6 +322,25 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser): | ||||
|         """Get avatar, depending on authentik.avatar setting""" | ||||
|         return get_avatar(self) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("User") | ||||
|         verbose_name_plural = _("Users") | ||||
|         permissions = [ | ||||
|             ("reset_user_password", _("Reset Password")), | ||||
|             ("impersonate", _("Can impersonate other users")), | ||||
|             ("assign_user_permissions", _("Can assign permissions to users")), | ||||
|             ("unassign_user_permissions", _("Can unassign permissions from users")), | ||||
|             ("preview_user", _("Can preview user data sent to providers")), | ||||
|             ("view_user_applications", _("View applications the user has access to")), | ||||
|         ] | ||||
|         indexes = [ | ||||
|             models.Index(fields=["last_login"]), | ||||
|             models.Index(fields=["password_change_date"]), | ||||
|             models.Index(fields=["uuid"]), | ||||
|             models.Index(fields=["path"]), | ||||
|             models.Index(fields=["type"]), | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class Provider(SerializerModel): | ||||
|     """Application-independent Provider instance. For example SAML2 Remote, OAuth2 Application""" | ||||
| @ -462,14 +428,6 @@ class BackchannelProvider(Provider): | ||||
|         abstract = True | ||||
|  | ||||
|  | ||||
| class ApplicationQuerySet(QuerySet): | ||||
|     def with_provider(self) -> "QuerySet[Application]": | ||||
|         qs = self.select_related("provider") | ||||
|         for subclass in Provider.objects.get_queryset()._get_subclasses_recurse(Provider): | ||||
|             qs = qs.select_related(f"provider__{subclass}") | ||||
|         return qs | ||||
|  | ||||
|  | ||||
| class Application(SerializerModel, PolicyBindingModel): | ||||
|     """Every Application which uses authentik for authentication/identification/authorization | ||||
|     needs an Application record. Other authentication types can subclass this Model to | ||||
| @ -501,8 +459,6 @@ class Application(SerializerModel, PolicyBindingModel): | ||||
|     meta_description = models.TextField(default="", blank=True) | ||||
|     meta_publisher = models.TextField(default="", blank=True) | ||||
|  | ||||
|     objects = ApplicationQuerySet.as_manager() | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> Serializer: | ||||
|         from authentik.core.api.applications import ApplicationSerializer | ||||
| @ -539,28 +495,16 @@ class Application(SerializerModel, PolicyBindingModel): | ||||
|         return url | ||||
|  | ||||
|     def get_provider(self) -> Provider | None: | ||||
|         """Get casted provider instance. Needs Application queryset with_provider""" | ||||
|         """Get casted provider instance""" | ||||
|         if not self.provider: | ||||
|             return None | ||||
|  | ||||
|         candidates = [] | ||||
|         base_class = Provider | ||||
|         for subclass in base_class.objects.get_queryset()._get_subclasses_recurse(base_class): | ||||
|             parent = self.provider | ||||
|             for level in subclass.split(LOOKUP_SEP): | ||||
|         # if the Application class has been cache, self.provider is set | ||||
|         # but doing a direct query lookup will fail. | ||||
|         # In that case, just return None | ||||
|         try: | ||||
|                     parent = getattr(parent, level) | ||||
|                 except AttributeError: | ||||
|                     break | ||||
|             if parent in candidates: | ||||
|                 continue | ||||
|             idx = subclass.count(LOOKUP_SEP) | ||||
|             if type(parent) is not base_class: | ||||
|                 idx += 1 | ||||
|             candidates.insert(idx, parent) | ||||
|         if not candidates: | ||||
|             return Provider.objects.get_subclass(pk=self.provider.pk) | ||||
|         except Provider.DoesNotExist: | ||||
|             return None | ||||
|         return candidates[-1] | ||||
|  | ||||
|     def __str__(self): | ||||
|         return str(self.name) | ||||
| @ -590,19 +534,6 @@ class SourceUserMatchingModes(models.TextChoices): | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class SourceGroupMatchingModes(models.TextChoices): | ||||
|     """Different modes a source can handle new/returning groups""" | ||||
|  | ||||
|     IDENTIFIER = "identifier", _("Use the source-specific identifier") | ||||
|     NAME_LINK = "name_link", _( | ||||
|         "Link to a group with identical name. Can have security implications " | ||||
|         "when a group name is used with another source." | ||||
|     ) | ||||
|     NAME_DENY = "name_deny", _( | ||||
|         "Use the group name, but deny enrollment when the name already exists." | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|     """Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server""" | ||||
|  | ||||
| @ -612,12 +543,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|     user_path_template = models.TextField(default="goauthentik.io/sources/%(slug)s") | ||||
|  | ||||
|     enabled = models.BooleanField(default=True) | ||||
|     user_property_mappings = models.ManyToManyField( | ||||
|         "PropertyMapping", default=None, blank=True, related_name="source_userpropertymappings_set" | ||||
|     ) | ||||
|     group_property_mappings = models.ManyToManyField( | ||||
|         "PropertyMapping", default=None, blank=True, related_name="source_grouppropertymappings_set" | ||||
|     ) | ||||
|     property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True) | ||||
|     icon = models.FileField( | ||||
|         upload_to="source-icons/", | ||||
|         default=None, | ||||
| @ -652,14 +578,6 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|             "a new user enrolled." | ||||
|         ), | ||||
|     ) | ||||
|     group_matching_mode = models.TextField( | ||||
|         choices=SourceGroupMatchingModes.choices, | ||||
|         default=SourceGroupMatchingModes.IDENTIFIER, | ||||
|         help_text=_( | ||||
|             "How the source determines if an existing group should be used or " | ||||
|             "a new group created." | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     objects = InheritanceManager() | ||||
|  | ||||
| @ -689,11 +607,6 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|         """Return component used to edit this object""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     @property | ||||
|     def property_mapping_type(self) -> "type[PropertyMapping]": | ||||
|         """Return property mapping type used by this object""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def ui_login_button(self, request: HttpRequest) -> UILoginButton | None: | ||||
|         """If source uses a http-based flow, return UI Information about the login | ||||
|         button. If source doesn't use http-based flow, return None.""" | ||||
| @ -704,14 +617,6 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|         user settings are available, or UserSettingSerializer.""" | ||||
|         return None | ||||
|  | ||||
|     def get_base_user_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: | ||||
|         """Get base properties for a user to build final properties upon.""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def get_base_group_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: | ||||
|         """Get base properties for a group to build final properties upon.""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def __str__(self): | ||||
|         return str(self.name) | ||||
|  | ||||
| @ -727,11 +632,6 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|                     "name", | ||||
|                 ] | ||||
|             ), | ||||
|             models.Index( | ||||
|                 fields=[ | ||||
|                     "enabled", | ||||
|                 ] | ||||
|             ), | ||||
|         ] | ||||
|  | ||||
|  | ||||
| @ -755,27 +655,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||
|         unique_together = (("user", "source"),) | ||||
|  | ||||
|  | ||||
| class GroupSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||
|     """Connection between Group and Source.""" | ||||
|  | ||||
|     group = models.ForeignKey(Group, on_delete=models.CASCADE) | ||||
|     source = models.ForeignKey(Source, on_delete=models.CASCADE) | ||||
|     identifier = models.TextField() | ||||
|  | ||||
|     objects = InheritanceManager() | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         """Get serializer for this model""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"Group-source connection (group={self.group_id}, source={self.source_id})" | ||||
|  | ||||
|     class Meta: | ||||
|         unique_together = (("group", "source"),) | ||||
|  | ||||
|  | ||||
| class ExpiringModel(models.Model): | ||||
|     """Base Model which can expire, and is automatically cleaned up.""" | ||||
|  | ||||
| @ -908,7 +787,7 @@ class PropertyMapping(SerializerModel, ManagedModel): | ||||
|         except ControlFlowException as exc: | ||||
|             raise exc | ||||
|         except Exception as exc: | ||||
|             raise PropertyMappingExpressionException(exc, self) from exc | ||||
|             raise PropertyMappingExpressionException(self, exc) from exc | ||||
|  | ||||
|     def __str__(self): | ||||
|         return f"Property Mapping {self.name}" | ||||
|  | ||||
| @ -52,8 +52,6 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_): | ||||
| @receiver(user_logged_out) | ||||
| def user_logged_out_session(sender, request: HttpRequest, user: User, **_): | ||||
|     """Delete AuthenticatedSession if it exists""" | ||||
|     if not request.session or not request.session.session_key: | ||||
|         return | ||||
|     AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete() | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -4,7 +4,7 @@ from enum import Enum | ||||
| from typing import Any | ||||
|  | ||||
| from django.contrib import messages | ||||
| from django.db import IntegrityError, transaction | ||||
| from django.db import IntegrityError | ||||
| from django.db.models.query_utils import Q | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.shortcuts import redirect | ||||
| @ -12,20 +12,8 @@ from django.urls import reverse | ||||
| from django.utils.translation import gettext as _ | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.models import ( | ||||
|     Group, | ||||
|     GroupSourceConnection, | ||||
|     Source, | ||||
|     SourceGroupMatchingModes, | ||||
|     SourceUserMatchingModes, | ||||
|     User, | ||||
|     UserSourceConnection, | ||||
| ) | ||||
| from authentik.core.sources.mapper import SourceMapper | ||||
| from authentik.core.sources.stage import ( | ||||
|     PLAN_CONTEXT_SOURCES_CONNECTION, | ||||
|     PostSourceStage, | ||||
| ) | ||||
| from authentik.core.models import Source, SourceUserMatchingModes, User, UserSourceConnection | ||||
| from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostSourceStage | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.flows.exceptions import FlowNonApplicableException | ||||
| from authentik.flows.models import Flow, FlowToken, Stage, in_memory_stage | ||||
| @ -48,10 +36,7 @@ from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND | ||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||
| from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
| SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | ||||
| PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | ||||
|  | ||||
|  | ||||
| class Action(Enum): | ||||
| @ -85,69 +70,48 @@ class SourceFlowManager: | ||||
|     or deny the request.""" | ||||
|  | ||||
|     source: Source | ||||
|     mapper: SourceMapper | ||||
|     request: HttpRequest | ||||
|  | ||||
|     identifier: str | ||||
|  | ||||
|     user_connection_type: type[UserSourceConnection] = UserSourceConnection | ||||
|     group_connection_type: type[GroupSourceConnection] = GroupSourceConnection | ||||
|     connection_type: type[UserSourceConnection] = UserSourceConnection | ||||
|  | ||||
|     user_info: dict[str, Any] | ||||
|     enroll_info: dict[str, Any] | ||||
|     policy_context: dict[str, Any] | ||||
|     user_properties: dict[str, Any | dict[str, Any]] | ||||
|     groups_properties: dict[str, dict[str, Any | dict[str, Any]]] | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         source: Source, | ||||
|         request: HttpRequest, | ||||
|         identifier: str, | ||||
|         user_info: dict[str, Any], | ||||
|         policy_context: dict[str, Any], | ||||
|         enroll_info: dict[str, Any], | ||||
|     ) -> None: | ||||
|         self.source = source | ||||
|         self.mapper = SourceMapper(self.source) | ||||
|         self.request = request | ||||
|         self.identifier = identifier | ||||
|         self.user_info = user_info | ||||
|         self.enroll_info = enroll_info | ||||
|         self._logger = get_logger().bind(source=source, identifier=identifier) | ||||
|         self.policy_context = policy_context | ||||
|  | ||||
|         self.user_properties = self.mapper.build_object_properties( | ||||
|             object_type=User, request=request, user=None, **self.user_info | ||||
|         ) | ||||
|         self.groups_properties = { | ||||
|             group_id: self.mapper.build_object_properties( | ||||
|                 object_type=Group, | ||||
|                 request=request, | ||||
|                 user=None, | ||||
|                 group_id=group_id, | ||||
|                 **self.user_info, | ||||
|             ) | ||||
|             for group_id in self.user_properties.setdefault("groups", []) | ||||
|         } | ||||
|         del self.user_properties["groups"] | ||||
|         self.policy_context = {} | ||||
|  | ||||
|     def get_action(self, **kwargs) -> tuple[Action, UserSourceConnection | None]:  # noqa: PLR0911 | ||||
|         """decide which action should be taken""" | ||||
|         new_connection = self.user_connection_type(source=self.source, identifier=self.identifier) | ||||
|         new_connection = self.connection_type(source=self.source, identifier=self.identifier) | ||||
|         # When request is authenticated, always link | ||||
|         if self.request.user.is_authenticated: | ||||
|             new_connection.user = self.request.user | ||||
|             new_connection = self.update_user_connection(new_connection, **kwargs) | ||||
|             new_connection = self.update_connection(new_connection, **kwargs) | ||||
|             return Action.LINK, new_connection | ||||
|  | ||||
|         existing_connections = self.user_connection_type.objects.filter( | ||||
|         existing_connections = self.connection_type.objects.filter( | ||||
|             source=self.source, identifier=self.identifier | ||||
|         ) | ||||
|         if existing_connections.exists(): | ||||
|             connection = existing_connections.first() | ||||
|             return Action.AUTH, self.update_user_connection(connection, **kwargs) | ||||
|             return Action.AUTH, self.update_connection(connection, **kwargs) | ||||
|         # No connection exists, but we match on identifier, so enroll | ||||
|         if self.source.user_matching_mode == SourceUserMatchingModes.IDENTIFIER: | ||||
|             # We don't save the connection here cause it doesn't have a user assigned yet | ||||
|             return Action.ENROLL, self.update_user_connection(new_connection, **kwargs) | ||||
|             return Action.ENROLL, self.update_connection(new_connection, **kwargs) | ||||
|  | ||||
|         # Check for existing users with matching attributes | ||||
|         query = Q() | ||||
| @ -156,24 +120,24 @@ class SourceFlowManager: | ||||
|             SourceUserMatchingModes.EMAIL_LINK, | ||||
|             SourceUserMatchingModes.EMAIL_DENY, | ||||
|         ]: | ||||
|             if not self.user_properties.get("email", None): | ||||
|                 self._logger.warning("Refusing to use none email") | ||||
|             if not self.enroll_info.get("email", None): | ||||
|                 self._logger.warning("Refusing to use none email", source=self.source) | ||||
|                 return Action.DENY, None | ||||
|             query = Q(email__exact=self.user_properties.get("email", None)) | ||||
|             query = Q(email__exact=self.enroll_info.get("email", None)) | ||||
|         if self.source.user_matching_mode in [ | ||||
|             SourceUserMatchingModes.USERNAME_LINK, | ||||
|             SourceUserMatchingModes.USERNAME_DENY, | ||||
|         ]: | ||||
|             if not self.user_properties.get("username", None): | ||||
|                 self._logger.warning("Refusing to use none username") | ||||
|             if not self.enroll_info.get("username", None): | ||||
|                 self._logger.warning("Refusing to use none username", source=self.source) | ||||
|                 return Action.DENY, None | ||||
|             query = Q(username__exact=self.user_properties.get("username", None)) | ||||
|             query = Q(username__exact=self.enroll_info.get("username", None)) | ||||
|         self._logger.debug("trying to link with existing user", query=query) | ||||
|         matching_users = User.objects.filter(query) | ||||
|         # No matching users, always enroll | ||||
|         if not matching_users.exists(): | ||||
|             self._logger.debug("no matching users found, enrolling") | ||||
|             return Action.ENROLL, self.update_user_connection(new_connection, **kwargs) | ||||
|             return Action.ENROLL, self.update_connection(new_connection, **kwargs) | ||||
|  | ||||
|         user = matching_users.first() | ||||
|         if self.source.user_matching_mode in [ | ||||
| @ -181,7 +145,7 @@ class SourceFlowManager: | ||||
|             SourceUserMatchingModes.USERNAME_LINK, | ||||
|         ]: | ||||
|             new_connection.user = user | ||||
|             new_connection = self.update_user_connection(new_connection, **kwargs) | ||||
|             new_connection = self.update_connection(new_connection, **kwargs) | ||||
|             return Action.LINK, new_connection | ||||
|         if self.source.user_matching_mode in [ | ||||
|             SourceUserMatchingModes.EMAIL_DENY, | ||||
| @ -192,10 +156,10 @@ class SourceFlowManager: | ||||
|         # Should never get here as default enroll case is returned above. | ||||
|         return Action.DENY, None  # pragma: no cover | ||||
|  | ||||
|     def update_user_connection( | ||||
|     def update_connection( | ||||
|         self, connection: UserSourceConnection, **kwargs | ||||
|     ) -> UserSourceConnection:  # pragma: no cover | ||||
|         """Optionally make changes to the user connection after it is looked up/created.""" | ||||
|         """Optionally make changes to the connection after it is looked up/created.""" | ||||
|         return connection | ||||
|  | ||||
|     def get_flow(self, **kwargs) -> HttpResponse: | ||||
| @ -251,31 +215,25 @@ class SourceFlowManager: | ||||
|         flow: Flow | None, | ||||
|         connection: UserSourceConnection, | ||||
|         stages: list[StageView] | None = None, | ||||
|         **flow_context, | ||||
|         **kwargs, | ||||
|     ) -> HttpResponse: | ||||
|         """Prepare Authentication Plan, redirect user FlowExecutor""" | ||||
|         # Ensure redirect is carried through when user was trying to | ||||
|         # authorize application | ||||
|         final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get( | ||||
|             NEXT_ARG_NAME, "authentik_core:if-user" | ||||
|         ) | ||||
|         flow_context.update( | ||||
|         kwargs.update( | ||||
|             { | ||||
|                 # Since we authenticate the user by their token, they have no backend set | ||||
|                 PLAN_CONTEXT_AUTHENTICATION_BACKEND: BACKEND_INBUILT, | ||||
|                 PLAN_CONTEXT_SSO: True, | ||||
|                 PLAN_CONTEXT_SOURCE: self.source, | ||||
|                 PLAN_CONTEXT_SOURCES_CONNECTION: connection, | ||||
|                 PLAN_CONTEXT_SOURCE_GROUPS: self.groups_properties, | ||||
|             } | ||||
|         ) | ||||
|         flow_context.update(self.policy_context) | ||||
|         kwargs.update(self.policy_context) | ||||
|         if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: | ||||
|             token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) | ||||
|             self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) | ||||
|             plan = token.plan | ||||
|             plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||
|             plan.context.update(flow_context) | ||||
|             plan.context.update(kwargs) | ||||
|             for stage in self.get_stages_to_append(flow): | ||||
|                 plan.append_stage(stage) | ||||
|             if stages: | ||||
| @ -294,8 +252,8 @@ class SourceFlowManager: | ||||
|         final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get( | ||||
|             NEXT_ARG_NAME, "authentik_core:if-user" | ||||
|         ) | ||||
|         if PLAN_CONTEXT_REDIRECT not in flow_context: | ||||
|             flow_context[PLAN_CONTEXT_REDIRECT] = final_redirect | ||||
|         if PLAN_CONTEXT_REDIRECT not in kwargs: | ||||
|             kwargs[PLAN_CONTEXT_REDIRECT] = final_redirect | ||||
|  | ||||
|         if not flow: | ||||
|             return bad_request_message( | ||||
| @ -307,12 +265,9 @@ class SourceFlowManager: | ||||
|         # We append some stages so the initial flow we get might be empty | ||||
|         planner.allow_empty_flows = True | ||||
|         planner.use_cache = False | ||||
|         plan = planner.plan(self.request, flow_context) | ||||
|         plan = planner.plan(self.request, kwargs) | ||||
|         for stage in self.get_stages_to_append(flow): | ||||
|             plan.append_stage(stage) | ||||
|         plan.append_stage( | ||||
|             in_memory_stage(GroupUpdateStage, group_connection_type=self.group_connection_type) | ||||
|         ) | ||||
|         if stages: | ||||
|             for stage in stages: | ||||
|                 plan.append_stage(stage) | ||||
| @ -399,123 +354,7 @@ class SourceFlowManager: | ||||
|                 ) | ||||
|             ], | ||||
|             **{ | ||||
|                 PLAN_CONTEXT_PROMPT: delete_none_values(self.user_properties), | ||||
|                 PLAN_CONTEXT_PROMPT: delete_none_values(self.enroll_info), | ||||
|                 PLAN_CONTEXT_USER_PATH: self.source.get_user_path(), | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class GroupUpdateStage(StageView): | ||||
|     """Dynamically injected stage which updates the user after enrollment/authentication.""" | ||||
|  | ||||
|     def get_action( | ||||
|         self, group_id: str, group_properties: dict[str, Any | dict[str, Any]] | ||||
|     ) -> tuple[Action, GroupSourceConnection | None]: | ||||
|         """decide which action should be taken""" | ||||
|         new_connection = self.group_connection_type(source=self.source, identifier=group_id) | ||||
|  | ||||
|         existing_connections = self.group_connection_type.objects.filter( | ||||
|             source=self.source, identifier=group_id | ||||
|         ) | ||||
|         if existing_connections.exists(): | ||||
|             return Action.LINK, existing_connections.first() | ||||
|         # No connection exists, but we match on identifier, so enroll | ||||
|         if self.source.group_matching_mode == SourceGroupMatchingModes.IDENTIFIER: | ||||
|             # We don't save the connection here cause it doesn't have a user assigned yet | ||||
|             return Action.ENROLL, new_connection | ||||
|  | ||||
|         # Check for existing groups with matching attributes | ||||
|         query = Q() | ||||
|         if self.source.group_matching_mode in [ | ||||
|             SourceGroupMatchingModes.NAME_LINK, | ||||
|             SourceGroupMatchingModes.NAME_DENY, | ||||
|         ]: | ||||
|             if not group_properties.get("name", None): | ||||
|                 LOGGER.warning( | ||||
|                     "Refusing to use none group name", source=self.source, group_id=group_id | ||||
|                 ) | ||||
|                 return Action.DENY, None | ||||
|             query = Q(name__exact=group_properties.get("name")) | ||||
|         LOGGER.debug( | ||||
|             "trying to link with existing group", source=self.source, query=query, group_id=group_id | ||||
|         ) | ||||
|         matching_groups = Group.objects.filter(query) | ||||
|         # No matching groups, always enroll | ||||
|         if not matching_groups.exists(): | ||||
|             LOGGER.debug( | ||||
|                 "no matching groups found, enrolling", source=self.source, group_id=group_id | ||||
|             ) | ||||
|             return Action.ENROLL, new_connection | ||||
|  | ||||
|         group = matching_groups.first() | ||||
|         if self.source.group_matching_mode in [ | ||||
|             SourceGroupMatchingModes.NAME_LINK, | ||||
|         ]: | ||||
|             new_connection.group = group | ||||
|             return Action.LINK, new_connection | ||||
|         if self.source.group_matching_mode in [ | ||||
|             SourceGroupMatchingModes.NAME_DENY, | ||||
|         ]: | ||||
|             LOGGER.info( | ||||
|                 "denying source because group exists", | ||||
|                 source=self.source, | ||||
|                 group=group, | ||||
|                 group_id=group_id, | ||||
|             ) | ||||
|             return Action.DENY, None | ||||
|         # Should never get here as default enroll case is returned above. | ||||
|         return Action.DENY, None  # pragma: no cover | ||||
|  | ||||
|     def handle_group( | ||||
|         self, group_id: str, group_properties: dict[str, Any | dict[str, Any]] | ||||
|     ) -> Group | None: | ||||
|         action, connection = self.get_action(group_id, group_properties) | ||||
|         if action == Action.ENROLL: | ||||
|             group = Group.objects.create(**group_properties) | ||||
|             connection.group = group | ||||
|             connection.save() | ||||
|             return group | ||||
|         elif action == Action.LINK: | ||||
|             group = connection.group | ||||
|             group.update_attributes(group_properties) | ||||
|             connection.save() | ||||
|             return group | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     def handle_groups(self) -> bool: | ||||
|         self.source: Source = self.executor.plan.context[PLAN_CONTEXT_SOURCE] | ||||
|         self.user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] | ||||
|         self.group_connection_type: GroupSourceConnection = ( | ||||
|             self.executor.current_stage.group_connection_type | ||||
|         ) | ||||
|  | ||||
|         raw_groups: dict[str, dict[str, Any | dict[str, Any]]] = self.executor.plan.context[ | ||||
|             PLAN_CONTEXT_SOURCE_GROUPS | ||||
|         ] | ||||
|         groups: list[Group] = [] | ||||
|  | ||||
|         for group_id, group_properties in raw_groups.items(): | ||||
|             group = self.handle_group(group_id, group_properties) | ||||
|             if not group: | ||||
|                 return False | ||||
|             groups.append(group) | ||||
|  | ||||
|         with transaction.atomic(): | ||||
|             self.user.ak_groups.remove( | ||||
|                 *self.user.ak_groups.filter(groupsourceconnection__source=self.source) | ||||
|             ) | ||||
|             self.user.ak_groups.add(*groups) | ||||
|  | ||||
|         return True | ||||
|  | ||||
|     def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse: | ||||
|         """Stage used after the user has been enrolled to sync their groups from source data""" | ||||
|         if self.handle_groups(): | ||||
|             return self.executor.stage_ok() | ||||
|         else: | ||||
|             return self.executor.stage_invalid("Failed to update groups. Please try again later.") | ||||
|  | ||||
|     def post(self, request: HttpRequest) -> HttpResponse: | ||||
|         """Wrapper for post requests""" | ||||
|         return self.get(request) | ||||
|  | ||||
| @ -1,103 +0,0 @@ | ||||
| from typing import Any | ||||
|  | ||||
| from django.http import HttpRequest | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.expression.exceptions import PropertyMappingExpressionException | ||||
| from authentik.core.models import Group, PropertyMapping, Source, User | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.lib.merge import MERGE_LIST_UNIQUE | ||||
| from authentik.lib.sync.mapper import PropertyMappingManager | ||||
| from authentik.policies.utils import delete_none_values | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class SourceMapper: | ||||
|     def __init__(self, source: Source): | ||||
|         self.source = source | ||||
|  | ||||
|     def get_manager( | ||||
|         self, object_type: type[User | Group], context_keys: list[str] | ||||
|     ) -> PropertyMappingManager: | ||||
|         """Get property mapping manager for this source.""" | ||||
|  | ||||
|         qs = PropertyMapping.objects.none() | ||||
|         if object_type == User: | ||||
|             qs = self.source.user_property_mappings.all().select_subclasses() | ||||
|         elif object_type == Group: | ||||
|             qs = self.source.group_property_mappings.all().select_subclasses() | ||||
|         qs = qs.order_by("name") | ||||
|         return PropertyMappingManager( | ||||
|             qs, | ||||
|             self.source.property_mapping_type, | ||||
|             ["source", "properties"] + context_keys, | ||||
|         ) | ||||
|  | ||||
|     def get_base_properties( | ||||
|         self, object_type: type[User | Group], **kwargs | ||||
|     ) -> dict[str, Any | dict[str, Any]]: | ||||
|         """Get base properties for a user or a group to build final properties upon.""" | ||||
|         if object_type == User: | ||||
|             properties = self.source.get_base_user_properties(**kwargs) | ||||
|             properties.setdefault("path", self.source.get_user_path()) | ||||
|             return properties | ||||
|         if object_type == Group: | ||||
|             return self.source.get_base_group_properties(**kwargs) | ||||
|         return {} | ||||
|  | ||||
|     def build_object_properties( | ||||
|         self, | ||||
|         object_type: type[User | Group], | ||||
|         manager: "PropertyMappingManager | None" = None, | ||||
|         user: User | None = None, | ||||
|         request: HttpRequest | None = None, | ||||
|         **kwargs, | ||||
|     ) -> dict[str, Any | dict[str, Any]]: | ||||
|         """Build a user or group properties from the source configured property mappings.""" | ||||
|  | ||||
|         properties = self.get_base_properties(object_type, **kwargs) | ||||
|         if "attributes" not in properties: | ||||
|             properties["attributes"] = {} | ||||
|  | ||||
|         if not manager: | ||||
|             manager = self.get_manager(object_type, list(kwargs.keys())) | ||||
|         evaluations = manager.iter_eval( | ||||
|             user=user, | ||||
|             request=request, | ||||
|             return_mapping=True, | ||||
|             source=self.source, | ||||
|             properties=properties, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         while True: | ||||
|             try: | ||||
|                 value, mapping = next(evaluations) | ||||
|             except StopIteration: | ||||
|                 break | ||||
|             except PropertyMappingExpressionException as exc: | ||||
|                 Event.new( | ||||
|                     EventAction.CONFIGURATION_ERROR, | ||||
|                     message=f"Failed to evaluate property mapping: '{exc.mapping.name}'", | ||||
|                     source=self, | ||||
|                     mapping=exc.mapping, | ||||
|                 ).save() | ||||
|                 LOGGER.warning( | ||||
|                     "Mapping failed to evaluate", | ||||
|                     exc=exc, | ||||
|                     source=self, | ||||
|                     mapping=exc.mapping, | ||||
|                 ) | ||||
|                 raise exc | ||||
|  | ||||
|             if not value or not isinstance(value, dict): | ||||
|                 LOGGER.debug( | ||||
|                     "Mapping evaluated to None or is not a dict. Skipping", | ||||
|                     source=self, | ||||
|                     mapping=mapping, | ||||
|                 ) | ||||
|                 continue | ||||
|  | ||||
|             MERGE_LIST_UNIQUE.merge(properties, value) | ||||
|  | ||||
|         return delete_none_values(properties) | ||||
| @ -4,7 +4,7 @@ | ||||
|  | ||||
| <!DOCTYPE html> | ||||
|  | ||||
| <html> | ||||
| <html lang="en"> | ||||
|     <head> | ||||
|         <meta charset="UTF-8"> | ||||
|         <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"> | ||||
|  | ||||
| @ -9,12 +9,9 @@ from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_flow | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.policies.dummy.models import DummyPolicy | ||||
| from authentik.policies.models import PolicyBinding | ||||
| from authentik.providers.oauth2.models import OAuth2Provider, RedirectURI, RedirectURIMatchingMode | ||||
| from authentik.providers.proxy.models import ProxyProvider | ||||
| from authentik.providers.saml.models import SAMLProvider | ||||
| from authentik.providers.oauth2.models import OAuth2Provider | ||||
|  | ||||
|  | ||||
| class TestApplicationsAPI(APITestCase): | ||||
| @ -24,7 +21,7 @@ class TestApplicationsAPI(APITestCase): | ||||
|         self.user = create_test_admin_user() | ||||
|         self.provider = OAuth2Provider.objects.create( | ||||
|             name="test", | ||||
|             redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://some-other-domain")], | ||||
|             redirect_uris="http://some-other-domain", | ||||
|             authorization_flow=create_test_flow(), | ||||
|         ) | ||||
|         self.allowed: Application = Application.objects.create( | ||||
| @ -225,31 +222,3 @@ class TestApplicationsAPI(APITestCase): | ||||
|                 ], | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_get_provider(self): | ||||
|         """Ensure that proxy providers (at the time of writing that is the only provider | ||||
|         that inherits from another proxy type (OAuth) instead of inheriting from the root | ||||
|         provider class) is correctly looked up and selected from the database""" | ||||
|         slug = generate_id() | ||||
|         provider = ProxyProvider.objects.create(name=generate_id()) | ||||
|         Application.objects.create( | ||||
|             name=generate_id(), | ||||
|             slug=slug, | ||||
|             provider=provider, | ||||
|         ) | ||||
|         self.assertEqual(Application.objects.get(slug=slug).get_provider(), provider) | ||||
|         self.assertEqual( | ||||
|             Application.objects.with_provider().get(slug=slug).get_provider(), provider | ||||
|         ) | ||||
|  | ||||
|         slug = generate_id() | ||||
|         provider = SAMLProvider.objects.create(name=generate_id()) | ||||
|         Application.objects.create( | ||||
|             name=generate_id(), | ||||
|             slug=slug, | ||||
|             provider=provider, | ||||
|         ) | ||||
|         self.assertEqual(Application.objects.get(slug=slug).get_provider(), provider) | ||||
|         self.assertEqual( | ||||
|             Application.objects.with_provider().get(slug=slug).get_provider(), provider | ||||
|         ) | ||||
|  | ||||
| @ -3,10 +3,10 @@ | ||||
| from json import loads | ||||
|  | ||||
| from django.urls import reverse | ||||
| from guardian.shortcuts import assign_perm | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_user | ||||
| from authentik.core.models import User | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
| from authentik.tenants.utils import get_current_tenant | ||||
|  | ||||
|  | ||||
| @ -15,7 +15,7 @@ class TestImpersonation(APITestCase): | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.other_user = create_test_user() | ||||
|         self.other_user = User.objects.create(username="to-impersonate") | ||||
|         self.user = create_test_admin_user() | ||||
|  | ||||
|     def test_impersonate_simple(self): | ||||
| @ -44,46 +44,6 @@ class TestImpersonation(APITestCase): | ||||
|         self.assertEqual(response_body["user"]["username"], self.user.username) | ||||
|         self.assertNotIn("original", response_body) | ||||
|  | ||||
|     def test_impersonate_global(self): | ||||
|         """Test impersonation with global permissions""" | ||||
|         new_user = create_test_user() | ||||
|         assign_perm("authentik_core.impersonate", new_user) | ||||
|         assign_perm("authentik_core.view_user", new_user) | ||||
|         self.client.force_login(new_user) | ||||
|  | ||||
|         response = self.client.post( | ||||
|             reverse( | ||||
|                 "authentik_api:user-impersonate", | ||||
|                 kwargs={"pk": self.other_user.pk}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 201) | ||||
|  | ||||
|         response = self.client.get(reverse("authentik_api:user-me")) | ||||
|         response_body = loads(response.content.decode()) | ||||
|         self.assertEqual(response_body["user"]["username"], self.other_user.username) | ||||
|         self.assertEqual(response_body["original"]["username"], new_user.username) | ||||
|  | ||||
|     def test_impersonate_scoped(self): | ||||
|         """Test impersonation with scoped permissions""" | ||||
|         new_user = create_test_user() | ||||
|         assign_perm("authentik_core.impersonate", new_user, self.other_user) | ||||
|         assign_perm("authentik_core.view_user", new_user, self.other_user) | ||||
|         self.client.force_login(new_user) | ||||
|  | ||||
|         response = self.client.post( | ||||
|             reverse( | ||||
|                 "authentik_api:user-impersonate", | ||||
|                 kwargs={"pk": self.other_user.pk}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 201) | ||||
|  | ||||
|         response = self.client.get(reverse("authentik_api:user-me")) | ||||
|         response_body = loads(response.content.decode()) | ||||
|         self.assertEqual(response_body["user"]["username"], self.other_user.username) | ||||
|         self.assertEqual(response_body["original"]["username"], new_user.username) | ||||
|  | ||||
|     def test_impersonate_denied(self): | ||||
|         """test impersonation without permissions""" | ||||
|         self.client.force_login(self.other_user) | ||||
|  | ||||
| @ -38,9 +38,7 @@ class TestSourceFlowManager(TestCase): | ||||
|     def test_unauthenticated_enroll(self): | ||||
|         """Test un-authenticated user enrolling""" | ||||
|         request = get_request("/", user=AnonymousUser()) | ||||
|         flow_manager = OAuthSourceFlowManager( | ||||
|             self.source, request, self.identifier, {"info": {}}, {} | ||||
|         ) | ||||
|         flow_manager = OAuthSourceFlowManager(self.source, request, self.identifier, {}) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.ENROLL) | ||||
|         response = flow_manager.get_flow() | ||||
| @ -54,9 +52,7 @@ class TestSourceFlowManager(TestCase): | ||||
|             user=get_anonymous_user(), source=self.source, identifier=self.identifier | ||||
|         ) | ||||
|         request = get_request("/", user=AnonymousUser()) | ||||
|         flow_manager = OAuthSourceFlowManager( | ||||
|             self.source, request, self.identifier, {"info": {}}, {} | ||||
|         ) | ||||
|         flow_manager = OAuthSourceFlowManager(self.source, request, self.identifier, {}) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.AUTH) | ||||
|         response = flow_manager.get_flow() | ||||
| @ -68,9 +64,7 @@ class TestSourceFlowManager(TestCase): | ||||
|         """Test authenticated user linking""" | ||||
|         user = User.objects.create(username="foo", email="foo@bar.baz") | ||||
|         request = get_request("/", user=user) | ||||
|         flow_manager = OAuthSourceFlowManager( | ||||
|             self.source, request, self.identifier, {"info": {}}, {} | ||||
|         ) | ||||
|         flow_manager = OAuthSourceFlowManager(self.source, request, self.identifier, {}) | ||||
|         action, connection = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.LINK) | ||||
|         self.assertIsNone(connection.pk) | ||||
| @ -83,9 +77,7 @@ class TestSourceFlowManager(TestCase): | ||||
|  | ||||
|     def test_unauthenticated_link(self): | ||||
|         """Test un-authenticated user linking""" | ||||
|         flow_manager = OAuthSourceFlowManager( | ||||
|             self.source, get_request("/"), self.identifier, {"info": {}}, {} | ||||
|         ) | ||||
|         flow_manager = OAuthSourceFlowManager(self.source, get_request("/"), self.identifier, {}) | ||||
|         action, connection = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.LINK) | ||||
|         self.assertIsNone(connection.pk) | ||||
| @ -98,7 +90,7 @@ class TestSourceFlowManager(TestCase): | ||||
|  | ||||
|         # Without email, deny | ||||
|         flow_manager = OAuthSourceFlowManager( | ||||
|             self.source, get_request("/", user=AnonymousUser()), self.identifier, {"info": {}}, {} | ||||
|             self.source, get_request("/", user=AnonymousUser()), self.identifier, {} | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.DENY) | ||||
| @ -108,12 +100,7 @@ class TestSourceFlowManager(TestCase): | ||||
|             self.source, | ||||
|             get_request("/", user=AnonymousUser()), | ||||
|             self.identifier, | ||||
|             { | ||||
|                 "info": { | ||||
|                     "email": "foo@bar.baz", | ||||
|                 }, | ||||
|             }, | ||||
|             {}, | ||||
|             {"email": "foo@bar.baz"}, | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.LINK) | ||||
| @ -126,7 +113,7 @@ class TestSourceFlowManager(TestCase): | ||||
|  | ||||
|         # Without username, deny | ||||
|         flow_manager = OAuthSourceFlowManager( | ||||
|             self.source, get_request("/", user=AnonymousUser()), self.identifier, {"info": {}}, {} | ||||
|             self.source, get_request("/", user=AnonymousUser()), self.identifier, {} | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.DENY) | ||||
| @ -136,10 +123,7 @@ class TestSourceFlowManager(TestCase): | ||||
|             self.source, | ||||
|             get_request("/", user=AnonymousUser()), | ||||
|             self.identifier, | ||||
|             { | ||||
|                 "info": {"username": "foo"}, | ||||
|             }, | ||||
|             {}, | ||||
|             {"username": "foo"}, | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.LINK) | ||||
| @ -156,11 +140,8 @@ class TestSourceFlowManager(TestCase): | ||||
|             get_request("/", user=AnonymousUser()), | ||||
|             self.identifier, | ||||
|             { | ||||
|                 "info": { | ||||
|                 "username": "bar", | ||||
|             }, | ||||
|             }, | ||||
|             {}, | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.ENROLL) | ||||
| @ -170,10 +151,7 @@ class TestSourceFlowManager(TestCase): | ||||
|             self.source, | ||||
|             get_request("/", user=AnonymousUser()), | ||||
|             self.identifier, | ||||
|             { | ||||
|                 "info": {"username": "foo"}, | ||||
|             }, | ||||
|             {}, | ||||
|             {"username": "foo"}, | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.DENY) | ||||
| @ -187,10 +165,7 @@ class TestSourceFlowManager(TestCase): | ||||
|             self.source, | ||||
|             get_request("/", user=AnonymousUser()), | ||||
|             self.identifier, | ||||
|             { | ||||
|                 "info": {"username": "foo"}, | ||||
|             }, | ||||
|             {}, | ||||
|             {"username": "foo"}, | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.ENROLL) | ||||
| @ -216,10 +191,7 @@ class TestSourceFlowManager(TestCase): | ||||
|             self.source, | ||||
|             get_request("/", user=AnonymousUser()), | ||||
|             self.identifier, | ||||
|             { | ||||
|                 "info": {"username": "foo"}, | ||||
|             }, | ||||
|             {}, | ||||
|             {"username": "foo"}, | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.ENROLL) | ||||
|  | ||||
| @ -1,237 +0,0 @@ | ||||
| """Test Source flow_manager group update stage""" | ||||
|  | ||||
| from django.test import RequestFactory | ||||
|  | ||||
| from authentik.core.models import Group, SourceGroupMatchingModes | ||||
| from authentik.core.sources.flow_manager import PLAN_CONTEXT_SOURCE_GROUPS, GroupUpdateStage | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_flow | ||||
| from authentik.flows.models import in_memory_stage | ||||
| from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, PLAN_CONTEXT_SOURCE, FlowPlan | ||||
| from authentik.flows.tests import FlowTestCase | ||||
| from authentik.flows.views.executor import FlowExecutorView | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.sources.oauth.models import GroupOAuthSourceConnection, OAuthSource | ||||
|  | ||||
|  | ||||
| class TestSourceFlowManager(FlowTestCase): | ||||
|     """Test Source flow_manager group update stage""" | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.factory = RequestFactory() | ||||
|         self.authentication_flow = create_test_flow() | ||||
|         self.enrollment_flow = create_test_flow() | ||||
|         self.source: OAuthSource = OAuthSource.objects.create( | ||||
|             name=generate_id(), | ||||
|             slug=generate_id(), | ||||
|             authentication_flow=self.authentication_flow, | ||||
|             enrollment_flow=self.enrollment_flow, | ||||
|         ) | ||||
|         self.identifier = generate_id() | ||||
|         self.user = create_test_admin_user() | ||||
|  | ||||
|     def test_nonexistant_group(self): | ||||
|         request = self.factory.get("/") | ||||
|         stage = GroupUpdateStage( | ||||
|             FlowExecutorView( | ||||
|                 current_stage=in_memory_stage( | ||||
|                     GroupUpdateStage, group_connection_type=GroupOAuthSourceConnection | ||||
|                 ), | ||||
|                 plan=FlowPlan( | ||||
|                     flow_pk=generate_id(), | ||||
|                     context={ | ||||
|                         PLAN_CONTEXT_SOURCE: self.source, | ||||
|                         PLAN_CONTEXT_PENDING_USER: self.user, | ||||
|                         PLAN_CONTEXT_SOURCE_GROUPS: { | ||||
|                             "group 1": { | ||||
|                                 "name": "group 1", | ||||
|                             }, | ||||
|                         }, | ||||
|                     }, | ||||
|                 ), | ||||
|             ), | ||||
|             request=request, | ||||
|         ) | ||||
|         self.assertTrue(stage.handle_groups()) | ||||
|         self.assertTrue(Group.objects.filter(name="group 1").exists()) | ||||
|         self.assertTrue(self.user.ak_groups.filter(name="group 1").exists()) | ||||
|         self.assertTrue( | ||||
|             GroupOAuthSourceConnection.objects.filter( | ||||
|                 group=Group.objects.get(name="group 1"), source=self.source | ||||
|             ).exists() | ||||
|         ) | ||||
|  | ||||
|     def test_nonexistant_group_name_link(self): | ||||
|         self.source.group_matching_mode = SourceGroupMatchingModes.NAME_LINK | ||||
|         self.source.save() | ||||
|  | ||||
|         request = self.factory.get("/") | ||||
|         stage = GroupUpdateStage( | ||||
|             FlowExecutorView( | ||||
|                 current_stage=in_memory_stage( | ||||
|                     GroupUpdateStage, group_connection_type=GroupOAuthSourceConnection | ||||
|                 ), | ||||
|                 plan=FlowPlan( | ||||
|                     flow_pk=generate_id(), | ||||
|                     context={ | ||||
|                         PLAN_CONTEXT_SOURCE: self.source, | ||||
|                         PLAN_CONTEXT_PENDING_USER: self.user, | ||||
|                         PLAN_CONTEXT_SOURCE_GROUPS: { | ||||
|                             "group 1": { | ||||
|                                 "name": "group 1", | ||||
|                             }, | ||||
|                         }, | ||||
|                     }, | ||||
|                 ), | ||||
|             ), | ||||
|             request=request, | ||||
|         ) | ||||
|         self.assertTrue(stage.handle_groups()) | ||||
|         self.assertTrue(Group.objects.filter(name="group 1").exists()) | ||||
|         self.assertTrue(self.user.ak_groups.filter(name="group 1").exists()) | ||||
|         self.assertTrue( | ||||
|             GroupOAuthSourceConnection.objects.filter( | ||||
|                 group=Group.objects.get(name="group 1"), source=self.source | ||||
|             ).exists() | ||||
|         ) | ||||
|  | ||||
|     def test_existant_group_name_link(self): | ||||
|         self.source.group_matching_mode = SourceGroupMatchingModes.NAME_LINK | ||||
|         self.source.save() | ||||
|         group = Group.objects.create(name="group 1") | ||||
|  | ||||
|         request = self.factory.get("/") | ||||
|         stage = GroupUpdateStage( | ||||
|             FlowExecutorView( | ||||
|                 current_stage=in_memory_stage( | ||||
|                     GroupUpdateStage, group_connection_type=GroupOAuthSourceConnection | ||||
|                 ), | ||||
|                 plan=FlowPlan( | ||||
|                     flow_pk=generate_id(), | ||||
|                     context={ | ||||
|                         PLAN_CONTEXT_SOURCE: self.source, | ||||
|                         PLAN_CONTEXT_PENDING_USER: self.user, | ||||
|                         PLAN_CONTEXT_SOURCE_GROUPS: { | ||||
|                             "group 1": { | ||||
|                                 "name": "group 1", | ||||
|                             }, | ||||
|                         }, | ||||
|                     }, | ||||
|                 ), | ||||
|             ), | ||||
|             request=request, | ||||
|         ) | ||||
|         self.assertTrue(stage.handle_groups()) | ||||
|         self.assertTrue(Group.objects.filter(name="group 1").exists()) | ||||
|         self.assertTrue(self.user.ak_groups.filter(name="group 1").exists()) | ||||
|         self.assertTrue( | ||||
|             GroupOAuthSourceConnection.objects.filter(group=group, source=self.source).exists() | ||||
|         ) | ||||
|  | ||||
|     def test_nonexistant_group_name_deny(self): | ||||
|         self.source.group_matching_mode = SourceGroupMatchingModes.NAME_DENY | ||||
|         self.source.save() | ||||
|  | ||||
|         request = self.factory.get("/") | ||||
|         stage = GroupUpdateStage( | ||||
|             FlowExecutorView( | ||||
|                 current_stage=in_memory_stage( | ||||
|                     GroupUpdateStage, group_connection_type=GroupOAuthSourceConnection | ||||
|                 ), | ||||
|                 plan=FlowPlan( | ||||
|                     flow_pk=generate_id(), | ||||
|                     context={ | ||||
|                         PLAN_CONTEXT_SOURCE: self.source, | ||||
|                         PLAN_CONTEXT_PENDING_USER: self.user, | ||||
|                         PLAN_CONTEXT_SOURCE_GROUPS: { | ||||
|                             "group 1": { | ||||
|                                 "name": "group 1", | ||||
|                             }, | ||||
|                         }, | ||||
|                     }, | ||||
|                 ), | ||||
|             ), | ||||
|             request=request, | ||||
|         ) | ||||
|         self.assertTrue(stage.handle_groups()) | ||||
|         self.assertTrue(Group.objects.filter(name="group 1").exists()) | ||||
|         self.assertTrue(self.user.ak_groups.filter(name="group 1").exists()) | ||||
|         self.assertTrue( | ||||
|             GroupOAuthSourceConnection.objects.filter( | ||||
|                 group=Group.objects.get(name="group 1"), source=self.source | ||||
|             ).exists() | ||||
|         ) | ||||
|  | ||||
|     def test_existant_group_name_deny(self): | ||||
|         self.source.group_matching_mode = SourceGroupMatchingModes.NAME_DENY | ||||
|         self.source.save() | ||||
|         group = Group.objects.create(name="group 1") | ||||
|  | ||||
|         request = self.factory.get("/") | ||||
|         stage = GroupUpdateStage( | ||||
|             FlowExecutorView( | ||||
|                 current_stage=in_memory_stage( | ||||
|                     GroupUpdateStage, group_connection_type=GroupOAuthSourceConnection | ||||
|                 ), | ||||
|                 plan=FlowPlan( | ||||
|                     flow_pk=generate_id(), | ||||
|                     context={ | ||||
|                         PLAN_CONTEXT_SOURCE: self.source, | ||||
|                         PLAN_CONTEXT_PENDING_USER: self.user, | ||||
|                         PLAN_CONTEXT_SOURCE_GROUPS: { | ||||
|                             "group 1": { | ||||
|                                 "name": "group 1", | ||||
|                             }, | ||||
|                         }, | ||||
|                     }, | ||||
|                 ), | ||||
|             ), | ||||
|             request=request, | ||||
|         ) | ||||
|         self.assertFalse(stage.handle_groups()) | ||||
|         self.assertFalse(self.user.ak_groups.filter(name="group 1").exists()) | ||||
|         self.assertFalse( | ||||
|             GroupOAuthSourceConnection.objects.filter(group=group, source=self.source).exists() | ||||
|         ) | ||||
|  | ||||
|     def test_group_updates(self): | ||||
|         self.source.group_matching_mode = SourceGroupMatchingModes.NAME_LINK | ||||
|         self.source.save() | ||||
|  | ||||
|         other_group = Group.objects.create(name="other group") | ||||
|         old_group = Group.objects.create(name="old group") | ||||
|         new_group = Group.objects.create(name="new group") | ||||
|         self.user.ak_groups.set([other_group, old_group]) | ||||
|         GroupOAuthSourceConnection.objects.create( | ||||
|             group=old_group, source=self.source, identifier=old_group.name | ||||
|         ) | ||||
|         GroupOAuthSourceConnection.objects.create( | ||||
|             group=new_group, source=self.source, identifier=new_group.name | ||||
|         ) | ||||
|  | ||||
|         request = self.factory.get("/") | ||||
|         stage = GroupUpdateStage( | ||||
|             FlowExecutorView( | ||||
|                 current_stage=in_memory_stage( | ||||
|                     GroupUpdateStage, group_connection_type=GroupOAuthSourceConnection | ||||
|                 ), | ||||
|                 plan=FlowPlan( | ||||
|                     flow_pk=generate_id(), | ||||
|                     context={ | ||||
|                         PLAN_CONTEXT_SOURCE: self.source, | ||||
|                         PLAN_CONTEXT_PENDING_USER: self.user, | ||||
|                         PLAN_CONTEXT_SOURCE_GROUPS: { | ||||
|                             "new group": { | ||||
|                                 "name": "new group", | ||||
|                             }, | ||||
|                         }, | ||||
|                     }, | ||||
|                 ), | ||||
|             ), | ||||
|             request=request, | ||||
|         ) | ||||
|         self.assertTrue(stage.handle_groups()) | ||||
|         self.assertFalse(self.user.ak_groups.filter(name="old group").exists()) | ||||
|         self.assertTrue(self.user.ak_groups.filter(name="other group").exists()) | ||||
|         self.assertTrue(self.user.ak_groups.filter(name="new group").exists()) | ||||
|         self.assertEqual(self.user.ak_groups.count(), 2) | ||||
| @ -1,72 +0,0 @@ | ||||
| """Test Source Property mappings""" | ||||
|  | ||||
| from django.test import TestCase | ||||
|  | ||||
| from authentik.core.models import Group, PropertyMapping, Source, User | ||||
| from authentik.core.sources.mapper import SourceMapper | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| class ProxySource(Source): | ||||
|     @property | ||||
|     def property_mapping_type(self): | ||||
|         return PropertyMapping | ||||
|  | ||||
|     def get_base_user_properties(self, **kwargs): | ||||
|         return { | ||||
|             "username": kwargs.get("username", None), | ||||
|             "email": kwargs.get("email", "default@authentik"), | ||||
|         } | ||||
|  | ||||
|     def get_base_group_properties(self, **kwargs): | ||||
|         return {"name": kwargs.get("name", None)} | ||||
|  | ||||
|     class Meta: | ||||
|         proxy = True | ||||
|  | ||||
|  | ||||
| class TestSourcePropertyMappings(TestCase): | ||||
|     """Test Source PropertyMappings""" | ||||
|  | ||||
|     def test_base_properties(self): | ||||
|         source = ProxySource.objects.create(name=generate_id(), slug=generate_id(), enabled=True) | ||||
|         mapper = SourceMapper(source) | ||||
|  | ||||
|         user_base_properties = mapper.get_base_properties(User, username="test1") | ||||
|         self.assertEqual( | ||||
|             user_base_properties, | ||||
|             { | ||||
|                 "username": "test1", | ||||
|                 "email": "default@authentik", | ||||
|                 "path": f"goauthentik.io/sources/{source.slug}", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|         group_base_properties = mapper.get_base_properties(Group) | ||||
|         self.assertEqual(group_base_properties, {"name": None}) | ||||
|  | ||||
|     def test_build_properties(self): | ||||
|         source = ProxySource.objects.create(name=generate_id(), slug=generate_id(), enabled=True) | ||||
|         mapper = SourceMapper(source) | ||||
|  | ||||
|         source.user_property_mappings.add( | ||||
|             PropertyMapping.objects.create( | ||||
|                 name=generate_id(), | ||||
|                 expression=""" | ||||
|                     return {"username": data.get("username", None), "email": None} | ||||
|                 """, | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|         properties = mapper.build_object_properties( | ||||
|             object_type=User, user=None, request=None, username="test1", data={"username": "test2"} | ||||
|         ) | ||||
|  | ||||
|         self.assertEqual( | ||||
|             properties, | ||||
|             { | ||||
|                 "username": "test2", | ||||
|                 "path": f"goauthentik.io/sources/{source.slug}", | ||||
|                 "attributes": {}, | ||||
|             }, | ||||
|         ) | ||||
| @ -31,7 +31,6 @@ class TestTransactionalApplicationsAPI(APITestCase): | ||||
|                 "provider": { | ||||
|                     "name": uid, | ||||
|                     "authorization_flow": str(authorization_flow.pk), | ||||
|                     "redirect_uris": [], | ||||
|                 }, | ||||
|             }, | ||||
|         ) | ||||
| @ -57,7 +56,6 @@ class TestTransactionalApplicationsAPI(APITestCase): | ||||
|                 "provider": { | ||||
|                     "name": uid, | ||||
|                     "authorization_flow": "", | ||||
|                     "redirect_uris": [], | ||||
|                 }, | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
| @ -6,6 +6,7 @@ from django.conf import settings | ||||
| from django.contrib.auth.decorators import login_required | ||||
| from django.urls import path | ||||
| from django.views.decorators.csrf import ensure_csrf_cookie | ||||
| from django.views.generic import RedirectView | ||||
|  | ||||
| from authentik.core.api.applications import ApplicationViewSet | ||||
| from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet | ||||
| @ -17,13 +18,9 @@ from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSe | ||||
| from authentik.core.api.tokens import TokenViewSet | ||||
| from authentik.core.api.transactional_applications import TransactionalApplicationView | ||||
| from authentik.core.api.users import UserViewSet | ||||
| from authentik.core.views.apps import RedirectToAppLaunch | ||||
| from authentik.core.views import apps | ||||
| from authentik.core.views.debug import AccessDeniedView | ||||
| from authentik.core.views.interface import ( | ||||
|     BrandDefaultRedirectView, | ||||
|     InterfaceView, | ||||
|     RootRedirectView, | ||||
| ) | ||||
| from authentik.core.views.interface import InterfaceView | ||||
| from authentik.core.views.session import EndSessionView | ||||
| from authentik.flows.views.interface import FlowInterfaceView | ||||
| from authentik.root.asgi_middleware import SessionMiddleware | ||||
| @ -33,24 +30,26 @@ from authentik.root.middleware import ChannelsLoggingMiddleware | ||||
| urlpatterns = [ | ||||
|     path( | ||||
|         "", | ||||
|         login_required(RootRedirectView.as_view()), | ||||
|         login_required( | ||||
|             RedirectView.as_view(pattern_name="authentik_core:if-user", query_string=True) | ||||
|         ), | ||||
|         name="root-redirect", | ||||
|     ), | ||||
|     path( | ||||
|         # We have to use this format since everything else uses application/o or application/saml | ||||
|         # We have to use this format since everything else uses applications/o or applications/saml | ||||
|         "application/launch/<slug:application_slug>/", | ||||
|         RedirectToAppLaunch.as_view(), | ||||
|         apps.RedirectToAppLaunch.as_view(), | ||||
|         name="application-launch", | ||||
|     ), | ||||
|     # Interfaces | ||||
|     path( | ||||
|         "if/admin/", | ||||
|         ensure_csrf_cookie(BrandDefaultRedirectView.as_view(template_name="if/admin.html")), | ||||
|         ensure_csrf_cookie(InterfaceView.as_view(template_name="if/admin.html")), | ||||
|         name="if-admin", | ||||
|     ), | ||||
|     path( | ||||
|         "if/user/", | ||||
|         ensure_csrf_cookie(BrandDefaultRedirectView.as_view(template_name="if/user.html")), | ||||
|         ensure_csrf_cookie(InterfaceView.as_view(template_name="if/user.html")), | ||||
|         name="if-user", | ||||
|     ), | ||||
|     path( | ||||
|  | ||||
| @ -3,42 +3,13 @@ | ||||
| from json import dumps | ||||
| from typing import Any | ||||
|  | ||||
| from django.http import HttpRequest | ||||
| from django.http.response import HttpResponse | ||||
| from django.shortcuts import redirect | ||||
| from django.utils.translation import gettext as _ | ||||
| from django.views.generic.base import RedirectView, TemplateView | ||||
| from django.views.generic.base import TemplateView | ||||
| from rest_framework.request import Request | ||||
|  | ||||
| from authentik import get_build_hash | ||||
| from authentik.admin.tasks import LOCAL_VERSION | ||||
| from authentik.api.v3.config import ConfigView | ||||
| from authentik.brands.api import CurrentBrandSerializer | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.core.models import UserTypes | ||||
| from authentik.policies.denied import AccessDeniedResponse | ||||
|  | ||||
|  | ||||
| class RootRedirectView(RedirectView): | ||||
|     """Root redirect view, redirect to brand's default application if set""" | ||||
|  | ||||
|     pattern_name = "authentik_core:if-user" | ||||
|     query_string = True | ||||
|  | ||||
|     def redirect_to_app(self, request: HttpRequest): | ||||
|         if request.user.is_authenticated and request.user.type == UserTypes.EXTERNAL: | ||||
|             brand: Brand = request.brand | ||||
|             if brand.default_application: | ||||
|                 return redirect( | ||||
|                     "authentik_core:application-launch", | ||||
|                     application_slug=brand.default_application.slug, | ||||
|                 ) | ||||
|         return None | ||||
|  | ||||
|     def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse: | ||||
|         if redirect_response := RootRedirectView().redirect_to_app(request): | ||||
|             return redirect_response | ||||
|         return super().dispatch(request, *args, **kwargs) | ||||
|  | ||||
|  | ||||
| class InterfaceView(TemplateView): | ||||
| @ -52,20 +23,3 @@ class InterfaceView(TemplateView): | ||||
|         kwargs["build"] = get_build_hash() | ||||
|         kwargs["url_kwargs"] = self.kwargs | ||||
|         return super().get_context_data(**kwargs) | ||||
|  | ||||
|  | ||||
| class BrandDefaultRedirectView(InterfaceView): | ||||
|     """By default redirect to default app""" | ||||
|  | ||||
|     def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse: | ||||
|         if request.user.is_authenticated and request.user.type == UserTypes.EXTERNAL: | ||||
|             brand: Brand = request.brand | ||||
|             if brand.default_application: | ||||
|                 return redirect( | ||||
|                     "authentik_core:application-launch", | ||||
|                     application_slug=brand.default_application.slug, | ||||
|                 ) | ||||
|             response = AccessDeniedResponse(self.request) | ||||
|             response.error_message = _("Interface can only be accessed by internal users.") | ||||
|             return response | ||||
|         return super().dispatch(request, *args, **kwargs) | ||||
|  | ||||
| @ -35,7 +35,6 @@ from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.rbac.decorators import permission_required | ||||
| from authentik.rbac.filters import ObjectFilter | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
| @ -266,7 +265,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | ||||
|         ], | ||||
|         responses={200: CertificateDataSerializer(many=False)}, | ||||
|     ) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[ObjectFilter]) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     def view_certificate(self, request: Request, pk: str) -> Response: | ||||
|         """Return certificate-key pairs certificate and log access""" | ||||
|         certificate: CertificateKeyPair = self.get_object() | ||||
| @ -296,7 +295,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet): | ||||
|         ], | ||||
|         responses={200: CertificateDataSerializer(many=False)}, | ||||
|     ) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[ObjectFilter]) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     def view_private_key(self, request: Request, pk: str) -> Response: | ||||
|         """Return certificate-key pairs private key and log access""" | ||||
|         certificate: CertificateKeyPair = self.get_object() | ||||
|  | ||||
| @ -76,7 +76,7 @@ class CertificateBuilder: | ||||
|             .subject_name( | ||||
|                 x509.Name( | ||||
|                     [ | ||||
|                         x509.NameAttribute(NameOID.COMMON_NAME, self.common_name[:64]), | ||||
|                         x509.NameAttribute(NameOID.COMMON_NAME, self.common_name), | ||||
|                         x509.NameAttribute(NameOID.ORGANIZATION_NAME, "authentik"), | ||||
|                         x509.NameAttribute(NameOID.ORGANIZATIONAL_UNIT_NAME, "Self-signed"), | ||||
|                     ] | ||||
|  | ||||
| @ -18,7 +18,7 @@ from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.crypto.tasks import MANAGED_DISCOVERED, certificate_discovery | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.generators import generate_id, generate_key | ||||
| from authentik.providers.oauth2.models import OAuth2Provider, RedirectURI, RedirectURIMatchingMode | ||||
| from authentik.providers.oauth2.models import OAuth2Provider | ||||
|  | ||||
|  | ||||
| class TestCrypto(APITestCase): | ||||
| @ -214,46 +214,6 @@ class TestCrypto(APITestCase): | ||||
|         self.assertEqual(200, response.status_code) | ||||
|         self.assertIn("Content-Disposition", response) | ||||
|  | ||||
|     def test_certificate_download_denied(self): | ||||
|         """Test certificate export (download)""" | ||||
|         self.client.logout() | ||||
|         keypair = create_test_cert() | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:certificatekeypair-view-certificate", | ||||
|                 kwargs={"pk": keypair.pk}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(403, response.status_code) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:certificatekeypair-view-certificate", | ||||
|                 kwargs={"pk": keypair.pk}, | ||||
|             ), | ||||
|             data={"download": True}, | ||||
|         ) | ||||
|         self.assertEqual(403, response.status_code) | ||||
|  | ||||
|     def test_private_key_download_denied(self): | ||||
|         """Test private_key export (download)""" | ||||
|         self.client.logout() | ||||
|         keypair = create_test_cert() | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:certificatekeypair-view-private-key", | ||||
|                 kwargs={"pk": keypair.pk}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(403, response.status_code) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:certificatekeypair-view-private-key", | ||||
|                 kwargs={"pk": keypair.pk}, | ||||
|             ), | ||||
|             data={"download": True}, | ||||
|         ) | ||||
|         self.assertEqual(403, response.status_code) | ||||
|  | ||||
|     def test_used_by(self): | ||||
|         """Test used_by endpoint""" | ||||
|         self.client.force_login(create_test_admin_user()) | ||||
| @ -263,7 +223,7 @@ class TestCrypto(APITestCase): | ||||
|             client_id="test", | ||||
|             client_secret=generate_key(), | ||||
|             authorization_flow=create_test_flow(), | ||||
|             redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://localhost")], | ||||
|             redirect_uris="http://localhost", | ||||
|             signing_key=keypair, | ||||
|         ) | ||||
|         response = self.client.get( | ||||
| @ -286,26 +246,6 @@ class TestCrypto(APITestCase): | ||||
|             ], | ||||
|         ) | ||||
|  | ||||
|     def test_used_by_denied(self): | ||||
|         """Test used_by endpoint""" | ||||
|         self.client.logout() | ||||
|         keypair = create_test_cert() | ||||
|         OAuth2Provider.objects.create( | ||||
|             name=generate_id(), | ||||
|             client_id="test", | ||||
|             client_secret=generate_key(), | ||||
|             authorization_flow=create_test_flow(), | ||||
|             redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://localhost")], | ||||
|             signing_key=keypair, | ||||
|         ) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:certificatekeypair-used-by", | ||||
|                 kwargs={"pk": keypair.pk}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(403, response.status_code) | ||||
|  | ||||
|     def test_discovery(self): | ||||
|         """Test certificate discovery""" | ||||
|         name = generate_id() | ||||
|  | ||||
| @ -1,11 +1,12 @@ | ||||
| """Enterprise API Views""" | ||||
|  | ||||
| from dataclasses import asdict | ||||
| from datetime import timedelta | ||||
|  | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext as _ | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import OpenApiParameter, extend_schema, inline_serializer | ||||
| from drf_spectacular.utils import extend_schema, inline_serializer | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.fields import CharField, IntegerField | ||||
| @ -29,7 +30,7 @@ class EnterpriseRequiredMixin: | ||||
|  | ||||
|     def validate(self, attrs: dict) -> dict: | ||||
|         """Check that a valid license exists""" | ||||
|         if not LicenseKey.cached_summary().status.is_valid: | ||||
|         if not LicenseKey.cached_summary().has_license: | ||||
|             raise ValidationError(_("Enterprise is required to create/update this object.")) | ||||
|         return super().validate(attrs) | ||||
|  | ||||
| @ -86,7 +87,7 @@ class LicenseViewSet(UsedByMixin, ModelViewSet): | ||||
|         }, | ||||
|     ) | ||||
|     @action(detail=False, methods=["GET"]) | ||||
|     def install_id(self, request: Request) -> Response: | ||||
|     def get_install_id(self, request: Request) -> Response: | ||||
|         """Get install_id""" | ||||
|         return Response( | ||||
|             data={ | ||||
| @ -99,22 +100,12 @@ class LicenseViewSet(UsedByMixin, ModelViewSet): | ||||
|         responses={ | ||||
|             200: LicenseSummarySerializer(), | ||||
|         }, | ||||
|         parameters=[ | ||||
|             OpenApiParameter( | ||||
|                 name="cached", | ||||
|                 location=OpenApiParameter.QUERY, | ||||
|                 type=OpenApiTypes.BOOL, | ||||
|                 default=True, | ||||
|             ) | ||||
|         ], | ||||
|     ) | ||||
|     @action(detail=False, methods=["GET"], permission_classes=[IsAuthenticated]) | ||||
|     def summary(self, request: Request) -> Response: | ||||
|         """Get the total license status""" | ||||
|         summary = LicenseKey.cached_summary() | ||||
|         if request.query_params.get("cached", "true").lower() == "false": | ||||
|             summary = LicenseKey.get_total().summary() | ||||
|         response = LicenseSummarySerializer(instance=summary) | ||||
|         response = LicenseSummarySerializer(data=asdict(LicenseKey.cached_summary())) | ||||
|         response.is_valid(raise_exception=True) | ||||
|         return Response(response.data) | ||||
|  | ||||
|     @permission_required(None, ["authentik_enterprise.view_license"]) | ||||
| @ -137,7 +128,7 @@ class LicenseViewSet(UsedByMixin, ModelViewSet): | ||||
|         forecast_for_months = 12 | ||||
|         response = LicenseForecastSerializer( | ||||
|             data={ | ||||
|                 "internal_users": LicenseKey.get_internal_user_count(), | ||||
|                 "internal_users": LicenseKey.get_default_user_count(), | ||||
|                 "external_users": LicenseKey.get_external_user_count(), | ||||
|                 "forecasted_internal_users": (internal_in_last_month * forecast_for_months), | ||||
|                 "forecasted_external_users": (external_in_last_month * forecast_for_months), | ||||
|  | ||||
| @ -25,4 +25,4 @@ class AuthentikEnterpriseConfig(EnterpriseConfig): | ||||
|         """Actual enterprise check, cached""" | ||||
|         from authentik.enterprise.license import LicenseKey | ||||
|  | ||||
|         return LicenseKey.cached_summary().status.is_valid | ||||
|         return LicenseKey.cached_summary().valid | ||||
|  | ||||
| @ -3,37 +3,24 @@ | ||||
| from base64 import b64decode | ||||
| from binascii import Error | ||||
| from dataclasses import asdict, dataclass, field | ||||
| from datetime import UTC, datetime, timedelta | ||||
| from datetime import datetime, timedelta | ||||
| from enum import Enum | ||||
| from functools import lru_cache | ||||
| from time import mktime | ||||
|  | ||||
| from cryptography.exceptions import InvalidSignature | ||||
| from cryptography.x509 import Certificate, load_der_x509_certificate, load_pem_x509_certificate | ||||
| from dacite import DaciteError, from_dict | ||||
| from dacite import from_dict | ||||
| from django.core.cache import cache | ||||
| from django.db.models.query import QuerySet | ||||
| from django.utils.timezone import now | ||||
| from jwt import PyJWTError, decode, get_unverified_header | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.fields import ( | ||||
|     ChoiceField, | ||||
|     DateTimeField, | ||||
|     IntegerField, | ||||
|     ListField, | ||||
| ) | ||||
| from rest_framework.fields import BooleanField, DateTimeField, IntegerField | ||||
|  | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.core.models import User, UserTypes | ||||
| from authentik.enterprise.models import ( | ||||
|     THRESHOLD_READ_ONLY_WEEKS, | ||||
|     THRESHOLD_WARNING_ADMIN_WEEKS, | ||||
|     THRESHOLD_WARNING_EXPIRY_WEEKS, | ||||
|     THRESHOLD_WARNING_USER_WEEKS, | ||||
|     License, | ||||
|     LicenseUsage, | ||||
|     LicenseUsageStatus, | ||||
| ) | ||||
| from authentik.enterprise.models import License, LicenseUsage | ||||
| from authentik.tenants.utils import get_unique_identifier | ||||
|  | ||||
| CACHE_KEY_ENTERPRISE_LICENSE = "goauthentik.io/enterprise/license" | ||||
| @ -55,9 +42,6 @@ def get_license_aud() -> str: | ||||
| class LicenseFlags(Enum): | ||||
|     """License flags""" | ||||
|  | ||||
|     TRIAL = "trial" | ||||
|     NON_PRODUCTION = "non_production" | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class LicenseSummary: | ||||
| @ -65,9 +49,12 @@ class LicenseSummary: | ||||
|  | ||||
|     internal_users: int | ||||
|     external_users: int | ||||
|     status: LicenseUsageStatus | ||||
|     valid: bool | ||||
|     show_admin_warning: bool | ||||
|     show_user_warning: bool | ||||
|     read_only: bool | ||||
|     latest_valid: datetime | ||||
|     license_flags: list[LicenseFlags] | ||||
|     has_license: bool | ||||
|  | ||||
|  | ||||
| class LicenseSummarySerializer(PassiveSerializer): | ||||
| @ -75,9 +62,12 @@ class LicenseSummarySerializer(PassiveSerializer): | ||||
|  | ||||
|     internal_users = IntegerField(required=True) | ||||
|     external_users = IntegerField(required=True) | ||||
|     status = ChoiceField(choices=LicenseUsageStatus.choices) | ||||
|     valid = BooleanField() | ||||
|     show_admin_warning = BooleanField() | ||||
|     show_user_warning = BooleanField() | ||||
|     read_only = BooleanField() | ||||
|     latest_valid = DateTimeField() | ||||
|     license_flags = ListField(child=ChoiceField(choices=tuple(x.value for x in LicenseFlags))) | ||||
|     has_license = BooleanField() | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| @ -90,10 +80,10 @@ class LicenseKey: | ||||
|     name: str | ||||
|     internal_users: int = 0 | ||||
|     external_users: int = 0 | ||||
|     license_flags: list[LicenseFlags] = field(default_factory=list) | ||||
|     flags: list[LicenseFlags] = field(default_factory=list) | ||||
|  | ||||
|     @staticmethod | ||||
|     def validate(jwt: str, check_expiry=True) -> "LicenseKey": | ||||
|     def validate(jwt: str) -> "LicenseKey": | ||||
|         """Validate the license from a given JWT""" | ||||
|         try: | ||||
|             headers = get_unverified_header(jwt) | ||||
| @ -117,28 +107,26 @@ class LicenseKey: | ||||
|                     our_cert.public_key(), | ||||
|                     algorithms=["ES512"], | ||||
|                     audience=get_license_aud(), | ||||
|                     options={"verify_exp": check_expiry, "verify_signature": check_expiry}, | ||||
|                 ), | ||||
|             ) | ||||
|         except PyJWTError: | ||||
|             unverified = decode(jwt, options={"verify_signature": False}) | ||||
|             if unverified["aud"] != get_license_aud(): | ||||
|                 raise ValidationError("Invalid Install ID in license") from None | ||||
|             raise ValidationError("Unable to verify license") from None | ||||
|         return body | ||||
|  | ||||
|     @staticmethod | ||||
|     def get_total() -> "LicenseKey": | ||||
|         """Get a summarized version of all (not expired) licenses""" | ||||
|         active_licenses = License.objects.filter(expiry__gte=now()) | ||||
|         total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0) | ||||
|         for lic in License.objects.all(): | ||||
|         for lic in active_licenses: | ||||
|             total.internal_users += lic.internal_users | ||||
|             total.external_users += lic.external_users | ||||
|             exp_ts = int(mktime(lic.expiry.timetuple())) | ||||
|             if total.exp == 0: | ||||
|                 total.exp = exp_ts | ||||
|             total.exp = max(total.exp, exp_ts) | ||||
|             total.license_flags.extend(lic.status.license_flags) | ||||
|             if exp_ts <= total.exp: | ||||
|                 total.exp = exp_ts | ||||
|             total.flags.extend(lic.status.flags) | ||||
|         return total | ||||
|  | ||||
|     @staticmethod | ||||
| @ -147,7 +135,7 @@ class LicenseKey: | ||||
|         return User.objects.all().exclude_anonymous().exclude(is_active=False) | ||||
|  | ||||
|     @staticmethod | ||||
|     def get_internal_user_count(): | ||||
|     def get_default_user_count(): | ||||
|         """Get current default user count""" | ||||
|         return LicenseKey.base_user_qs().filter(type=UserTypes.INTERNAL).count() | ||||
|  | ||||
| @ -156,73 +144,59 @@ class LicenseKey: | ||||
|         """Get current external user count""" | ||||
|         return LicenseKey.base_user_qs().filter(type=UserTypes.EXTERNAL).count() | ||||
|  | ||||
|     def _last_valid_date(self): | ||||
|         last_valid_date = ( | ||||
|             LicenseUsage.objects.order_by("-record_date") | ||||
|             .filter(status=LicenseUsageStatus.VALID) | ||||
|             .first() | ||||
|         ) | ||||
|         if not last_valid_date: | ||||
|             return datetime.fromtimestamp(0, UTC) | ||||
|         return last_valid_date.record_date | ||||
|     def is_valid(self) -> bool: | ||||
|         """Check if the given license body covers all users | ||||
|  | ||||
|     def status(self) -> LicenseUsageStatus: | ||||
|         """Check if the given license body covers all users, and is valid.""" | ||||
|         last_valid = self._last_valid_date() | ||||
|         if self.exp == 0 and not License.objects.exists(): | ||||
|             return LicenseUsageStatus.UNLICENSED | ||||
|         _now = now() | ||||
|         # Check limit-exceeded based status | ||||
|         internal_users = self.get_internal_user_count() | ||||
|         external_users = self.get_external_user_count() | ||||
|         if internal_users > self.internal_users or external_users > self.external_users: | ||||
|             if last_valid < _now - timedelta(weeks=THRESHOLD_READ_ONLY_WEEKS): | ||||
|                 return LicenseUsageStatus.READ_ONLY | ||||
|             if last_valid < _now - timedelta(weeks=THRESHOLD_WARNING_USER_WEEKS): | ||||
|                 return LicenseUsageStatus.LIMIT_EXCEEDED_USER | ||||
|             if last_valid < _now - timedelta(weeks=THRESHOLD_WARNING_ADMIN_WEEKS): | ||||
|                 return LicenseUsageStatus.LIMIT_EXCEEDED_ADMIN | ||||
|         # Check expiry based status | ||||
|         if datetime.fromtimestamp(self.exp, UTC) < _now: | ||||
|             if datetime.fromtimestamp(self.exp, UTC) < _now - timedelta( | ||||
|                 weeks=THRESHOLD_READ_ONLY_WEEKS | ||||
|             ): | ||||
|                 return LicenseUsageStatus.READ_ONLY | ||||
|             return LicenseUsageStatus.EXPIRED | ||||
|         # Expiry warning | ||||
|         if datetime.fromtimestamp(self.exp, UTC) <= _now + timedelta( | ||||
|             weeks=THRESHOLD_WARNING_EXPIRY_WEEKS | ||||
|         ): | ||||
|             return LicenseUsageStatus.EXPIRY_SOON | ||||
|         return LicenseUsageStatus.VALID | ||||
|         Only checks the current count, no historical data is checked""" | ||||
|         default_users = self.get_default_user_count() | ||||
|         if default_users > self.internal_users: | ||||
|             return False | ||||
|         active_users = self.get_external_user_count() | ||||
|         if active_users > self.external_users: | ||||
|             return False | ||||
|         return True | ||||
|  | ||||
|     def record_usage(self): | ||||
|         """Capture the current validity status and metrics and save them""" | ||||
|         threshold = now() - timedelta(hours=8) | ||||
|         usage = ( | ||||
|             LicenseUsage.objects.order_by("-record_date").filter(record_date__gte=threshold).first() | ||||
|         ) | ||||
|         if not usage: | ||||
|             usage = LicenseUsage.objects.create( | ||||
|                 internal_user_count=self.get_internal_user_count(), | ||||
|         if not LicenseUsage.objects.filter(record_date__gte=threshold).exists(): | ||||
|             LicenseUsage.objects.create( | ||||
|                 user_count=self.get_default_user_count(), | ||||
|                 external_user_count=self.get_external_user_count(), | ||||
|                 status=self.status(), | ||||
|                 within_limits=self.is_valid(), | ||||
|             ) | ||||
|         summary = asdict(self.summary()) | ||||
|         # Also cache the latest summary for the middleware | ||||
|         cache.set(CACHE_KEY_ENTERPRISE_LICENSE, summary, timeout=CACHE_EXPIRY_ENTERPRISE_LICENSE) | ||||
|         return usage | ||||
|         return summary | ||||
|  | ||||
|     @staticmethod | ||||
|     def last_valid_date() -> datetime: | ||||
|         """Get the last date the license was valid""" | ||||
|         usage: LicenseUsage = ( | ||||
|             LicenseUsage.filter_not_expired(within_limits=True).order_by("-record_date").first() | ||||
|         ) | ||||
|         if not usage: | ||||
|             return now() | ||||
|         return usage.record_date | ||||
|  | ||||
|     def summary(self) -> LicenseSummary: | ||||
|         """Summary of license status""" | ||||
|         status = self.status() | ||||
|         has_license = License.objects.all().count() > 0 | ||||
|         last_valid = LicenseKey.last_valid_date() | ||||
|         show_admin_warning = last_valid < now() - timedelta(weeks=2) | ||||
|         show_user_warning = last_valid < now() - timedelta(weeks=4) | ||||
|         read_only = last_valid < now() - timedelta(weeks=6) | ||||
|         latest_valid = datetime.fromtimestamp(self.exp) | ||||
|         return LicenseSummary( | ||||
|             show_admin_warning=show_admin_warning and has_license, | ||||
|             show_user_warning=show_user_warning and has_license, | ||||
|             read_only=read_only and has_license, | ||||
|             latest_valid=latest_valid, | ||||
|             internal_users=self.internal_users, | ||||
|             external_users=self.external_users, | ||||
|             status=status, | ||||
|             license_flags=self.license_flags, | ||||
|             valid=self.is_valid(), | ||||
|             has_license=has_license, | ||||
|         ) | ||||
|  | ||||
|     @staticmethod | ||||
| @ -231,8 +205,4 @@ class LicenseKey: | ||||
|         summary = cache.get(CACHE_KEY_ENTERPRISE_LICENSE) | ||||
|         if not summary: | ||||
|             return LicenseKey.get_total().summary() | ||||
|         try: | ||||
|         return from_dict(LicenseSummary, summary) | ||||
|         except DaciteError: | ||||
|             cache.delete(CACHE_KEY_ENTERPRISE_LICENSE) | ||||
|             return LicenseKey.get_total().summary() | ||||
|  | ||||
| @ -8,7 +8,6 @@ from structlog.stdlib import BoundLogger, get_logger | ||||
|  | ||||
| from authentik.enterprise.api import LicenseViewSet | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.enterprise.models import LicenseUsageStatus | ||||
| from authentik.flows.views.executor import FlowExecutorView | ||||
| from authentik.lib.utils.reflection import class_to_path | ||||
|  | ||||
| @ -44,7 +43,7 @@ class EnterpriseMiddleware: | ||||
|         cached_status = LicenseKey.cached_summary() | ||||
|         if not cached_status: | ||||
|             return True | ||||
|         if cached_status.status == LicenseUsageStatus.READ_ONLY: | ||||
|         if cached_status.read_only: | ||||
|             return False | ||||
|         return True | ||||
|  | ||||
| @ -54,10 +53,10 @@ class EnterpriseMiddleware: | ||||
|         if request.method.lower() in ["get", "head", "options", "trace"]: | ||||
|             return True | ||||
|         # Always allow requests to manage licenses | ||||
|         if request.resolver_match._func_path == class_to_path(LicenseViewSet): | ||||
|         if class_to_path(request.resolver_match.func) == class_to_path(LicenseViewSet): | ||||
|             return True | ||||
|         # Flow executor is mounted as an API path but explicitly allowed | ||||
|         if request.resolver_match._func_path == class_to_path(FlowExecutorView): | ||||
|         if class_to_path(request.resolver_match.func) == class_to_path(FlowExecutorView): | ||||
|             return True | ||||
|         # Only apply these restrictions to the API | ||||
|         if "authentik_api" not in request.resolver_match.app_names: | ||||
|  | ||||
| @ -1,68 +0,0 @@ | ||||
| # Generated by Django 5.0.8 on 2024-08-08 14:15 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| from django.apps.registry import Apps | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
|  | ||||
|  | ||||
| def migrate_license_usage(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     LicenseUsage = apps.get_model("authentik_enterprise", "licenseusage") | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
|     for usage in LicenseUsage.objects.using(db_alias).all(): | ||||
|         usage.status = "valid" if usage.within_limits else "limit_exceeded_admin" | ||||
|         usage.save() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_enterprise", "0002_rename_users_license_internal_users_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="licenseusage", | ||||
|             name="status", | ||||
|             field=models.TextField( | ||||
|                 choices=[ | ||||
|                     ("unlicensed", "Unlicensed"), | ||||
|                     ("valid", "Valid"), | ||||
|                     ("expired", "Expired"), | ||||
|                     ("expiry_soon", "Expiry Soon"), | ||||
|                     ("limit_exceeded_admin", "Limit Exceeded Admin"), | ||||
|                     ("limit_exceeded_user", "Limit Exceeded User"), | ||||
|                     ("read_only", "Read Only"), | ||||
|                 ], | ||||
|                 default=None, | ||||
|                 null=True, | ||||
|             ), | ||||
|             preserve_default=False, | ||||
|         ), | ||||
|         migrations.RunPython(migrate_license_usage), | ||||
|         migrations.RemoveField( | ||||
|             model_name="licenseusage", | ||||
|             name="within_limits", | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="licenseusage", | ||||
|             name="status", | ||||
|             field=models.TextField( | ||||
|                 choices=[ | ||||
|                     ("unlicensed", "Unlicensed"), | ||||
|                     ("valid", "Valid"), | ||||
|                     ("expired", "Expired"), | ||||
|                     ("expiry_soon", "Expiry Soon"), | ||||
|                     ("limit_exceeded_admin", "Limit Exceeded Admin"), | ||||
|                     ("limit_exceeded_user", "Limit Exceeded User"), | ||||
|                     ("read_only", "Read Only"), | ||||
|                 ], | ||||
|             ), | ||||
|             preserve_default=False, | ||||
|         ), | ||||
|         migrations.RenameField( | ||||
|             model_name="licenseusage", | ||||
|             old_name="user_count", | ||||
|             new_name="internal_user_count", | ||||
|         ), | ||||
|     ] | ||||
| @ -17,17 +17,6 @@ if TYPE_CHECKING: | ||||
|     from authentik.enterprise.license import LicenseKey | ||||
|  | ||||
|  | ||||
| def usage_expiry(): | ||||
|     """Keep license usage records for 3 months""" | ||||
|     return now() + timedelta(days=30 * 3) | ||||
|  | ||||
|  | ||||
| THRESHOLD_WARNING_ADMIN_WEEKS = 2 | ||||
| THRESHOLD_WARNING_USER_WEEKS = 4 | ||||
| THRESHOLD_WARNING_EXPIRY_WEEKS = 2 | ||||
| THRESHOLD_READ_ONLY_WEEKS = 6 | ||||
|  | ||||
|  | ||||
| class License(SerializerModel): | ||||
|     """An authentik enterprise license""" | ||||
|  | ||||
| @ -50,7 +39,7 @@ class License(SerializerModel): | ||||
|         """Get parsed license status""" | ||||
|         from authentik.enterprise.license import LicenseKey | ||||
|  | ||||
|         return LicenseKey.validate(self.key, check_expiry=False) | ||||
|         return LicenseKey.validate(self.key) | ||||
|  | ||||
|     class Meta: | ||||
|         indexes = (HashIndex(fields=("key",)),) | ||||
| @ -58,23 +47,9 @@ class License(SerializerModel): | ||||
|         verbose_name_plural = _("Licenses") | ||||
|  | ||||
|  | ||||
| class LicenseUsageStatus(models.TextChoices): | ||||
|     """License states an instance/tenant can be in""" | ||||
|  | ||||
|     UNLICENSED = "unlicensed" | ||||
|     VALID = "valid" | ||||
|     EXPIRED = "expired" | ||||
|     EXPIRY_SOON = "expiry_soon" | ||||
|     # User limit exceeded, 2 week threshold, show message in admin interface | ||||
|     LIMIT_EXCEEDED_ADMIN = "limit_exceeded_admin" | ||||
|     # User limit exceeded, 4 week threshold, show message in user interface | ||||
|     LIMIT_EXCEEDED_USER = "limit_exceeded_user" | ||||
|     READ_ONLY = "read_only" | ||||
|  | ||||
|     @property | ||||
|     def is_valid(self) -> bool: | ||||
|         """Quickly check if a license is valid""" | ||||
|         return self in [LicenseUsageStatus.VALID, LicenseUsageStatus.EXPIRY_SOON] | ||||
| def usage_expiry(): | ||||
|     """Keep license usage records for 3 months""" | ||||
|     return now() + timedelta(days=30 * 3) | ||||
|  | ||||
|  | ||||
| class LicenseUsage(ExpiringModel): | ||||
| @ -84,9 +59,9 @@ class LicenseUsage(ExpiringModel): | ||||
|  | ||||
|     usage_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4) | ||||
|  | ||||
|     internal_user_count = models.BigIntegerField() | ||||
|     user_count = models.BigIntegerField() | ||||
|     external_user_count = models.BigIntegerField() | ||||
|     status = models.TextField(choices=LicenseUsageStatus.choices) | ||||
|     within_limits = models.BooleanField() | ||||
|  | ||||
|     record_date = models.DateTimeField(auto_now_add=True) | ||||
|  | ||||
|  | ||||
| @ -13,7 +13,7 @@ class EnterprisePolicyAccessView(PolicyAccessView): | ||||
|  | ||||
|     def check_license(self): | ||||
|         """Check license""" | ||||
|         if not LicenseKey.get_total().status().is_valid: | ||||
|         if not LicenseKey.get_total().is_valid(): | ||||
|             return PolicyResult(False, _("Enterprise required to access this feature.")) | ||||
|         if self.request.user.type != UserTypes.INTERNAL: | ||||
|             return PolicyResult(False, _("Feature only accessible for internal users.")) | ||||
|  | ||||
| @ -6,10 +6,7 @@ from authentik.core.api.providers import ProviderSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider | ||||
| from authentik.enterprise.providers.google_workspace.tasks import ( | ||||
|     google_workspace_sync, | ||||
|     google_workspace_sync_objects, | ||||
| ) | ||||
| from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync | ||||
| from authentik.lib.sync.outgoing.api import OutgoingSyncProviderStatusMixin | ||||
|  | ||||
|  | ||||
| @ -55,4 +52,3 @@ class GoogleWorkspaceProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixi | ||||
|     search_fields = ["name"] | ||||
|     ordering = ["name"] | ||||
|     sync_single_task = google_workspace_sync | ||||
|     sync_objects_task = google_workspace_sync_objects | ||||
|  | ||||
| @ -181,7 +181,7 @@ class GoogleWorkspaceProviderMapping(PropertyMapping): | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         return "ak-property-mapping-provider-google-workspace-form" | ||||
|         return "ak-property-mapping-google-workspace-form" | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|  | ||||
| @ -6,10 +6,7 @@ from authentik.core.api.providers import ProviderSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider | ||||
| from authentik.enterprise.providers.microsoft_entra.tasks import ( | ||||
|     microsoft_entra_sync, | ||||
|     microsoft_entra_sync_objects, | ||||
| ) | ||||
| from authentik.enterprise.providers.microsoft_entra.tasks import microsoft_entra_sync | ||||
| from authentik.lib.sync.outgoing.api import OutgoingSyncProviderStatusMixin | ||||
|  | ||||
|  | ||||
| @ -53,4 +50,3 @@ class MicrosoftEntraProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixin | ||||
|     search_fields = ["name"] | ||||
|     ordering = ["name"] | ||||
|     sync_single_task = microsoft_entra_sync | ||||
|     sync_objects_task = microsoft_entra_sync_objects | ||||
|  | ||||
| @ -170,7 +170,7 @@ class MicrosoftEntraProviderMapping(PropertyMapping): | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         return "ak-property-mapping-provider-microsoft-entra-form" | ||||
|         return "ak-property-mapping-microsoft-entra-form" | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|  | ||||
| @ -34,12 +34,6 @@ class ConnectionTokenSerializer(EnterpriseRequiredMixin, ModelSerializer): | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class ConnectionTokenOwnerFilter(OwnerFilter): | ||||
|     """Owner filter for connection tokens (checks session's user)""" | ||||
|  | ||||
|     owner_key = "session__user" | ||||
|  | ||||
|  | ||||
| class ConnectionTokenViewSet( | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.UpdateModelMixin, | ||||
| @ -56,9 +50,4 @@ class ConnectionTokenViewSet( | ||||
|     search_fields = ["endpoint__name", "provider__name"] | ||||
|     ordering = ["endpoint__name", "provider__name"] | ||||
|     permission_classes = [OwnerSuperuserPermissions] | ||||
|     filter_backends = [ | ||||
|         ConnectionTokenOwnerFilter, | ||||
|         DjangoFilterBackend, | ||||
|         OrderingFilter, | ||||
|         SearchFilter, | ||||
|     ] | ||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|  | ||||
| @ -1,20 +0,0 @@ | ||||
| # Generated by Django 5.0.8 on 2024-08-12 12:54 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_providers_rac", "0004_alter_connectiontoken_expires"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterModelOptions( | ||||
|             name="racpropertymapping", | ||||
|             options={ | ||||
|                 "verbose_name": "RAC Provider Property Mapping", | ||||
|                 "verbose_name_plural": "RAC Provider Property Mappings", | ||||
|             }, | ||||
|         ), | ||||
|     ] | ||||
| @ -125,7 +125,7 @@ class RACPropertyMapping(PropertyMapping): | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         return "ak-property-mapping-provider-rac-form" | ||||
|         return "ak-property-mapping-rac-form" | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
| @ -136,8 +136,8 @@ class RACPropertyMapping(PropertyMapping): | ||||
|         return RACPropertyMappingSerializer | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("RAC Provider Property Mapping") | ||||
|         verbose_name_plural = _("RAC Provider Property Mappings") | ||||
|         verbose_name = _("RAC Property Mapping") | ||||
|         verbose_name_plural = _("RAC Property Mappings") | ||||
|  | ||||
|  | ||||
| class ConnectionToken(ExpiringModel): | ||||
|  | ||||
| @ -21,8 +21,6 @@ from authentik.enterprise.providers.rac.models import ConnectionToken, Endpoint | ||||
| @receiver(user_logged_out) | ||||
| def user_logged_out_session(sender, request: HttpRequest, user: User, **_): | ||||
|     """Disconnect any open RAC connections""" | ||||
|     if not request.session or not request.session.session_key: | ||||
|         return | ||||
|     layer = get_channel_layer() | ||||
|     async_to_sync(layer.group_send)( | ||||
|         RAC_CLIENT_GROUP_SESSION | ||||
|  | ||||
| @ -5,6 +5,7 @@ from channels.sessions import CookieMiddleware | ||||
| from django.urls import path | ||||
| from django.views.decorators.csrf import ensure_csrf_cookie | ||||
|  | ||||
| from authentik.core.channels import TokenOutpostMiddleware | ||||
| from authentik.enterprise.providers.rac.api.connection_tokens import ConnectionTokenViewSet | ||||
| from authentik.enterprise.providers.rac.api.endpoints import EndpointViewSet | ||||
| from authentik.enterprise.providers.rac.api.property_mappings import RACPropertyMappingViewSet | ||||
| @ -12,7 +13,6 @@ from authentik.enterprise.providers.rac.api.providers import RACProviderViewSet | ||||
| from authentik.enterprise.providers.rac.consumer_client import RACClientConsumer | ||||
| from authentik.enterprise.providers.rac.consumer_outpost import RACOutpostConsumer | ||||
| from authentik.enterprise.providers.rac.views import RACInterface, RACStartView | ||||
| from authentik.outposts.channels import TokenOutpostMiddleware | ||||
| from authentik.root.asgi_middleware import SessionMiddleware | ||||
| from authentik.root.middleware import ChannelsLoggingMiddleware | ||||
|  | ||||
| @ -44,7 +44,7 @@ websocket_urlpatterns = [ | ||||
|  | ||||
| api_urlpatterns = [ | ||||
|     ("providers/rac", RACProviderViewSet), | ||||
|     ("propertymappings/provider/rac", RACPropertyMappingViewSet), | ||||
|     ("propertymappings/rac", RACPropertyMappingViewSet), | ||||
|     ("rac/endpoints", EndpointViewSet), | ||||
|     ("rac/connection_tokens", ConnectionTokenViewSet), | ||||
| ] | ||||
|  | ||||
| @ -3,7 +3,7 @@ | ||||
| from datetime import datetime | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.db.models.signals import post_delete, post_save, pre_save | ||||
| from django.db.models.signals import post_save, pre_save | ||||
| from django.dispatch import receiver | ||||
| from django.utils.timezone import get_current_timezone | ||||
|  | ||||
| @ -27,9 +27,3 @@ def post_save_license(sender: type[License], instance: License, **_): | ||||
|     """Trigger license usage calculation when license is saved""" | ||||
|     cache.delete(CACHE_KEY_ENTERPRISE_LICENSE) | ||||
|     enterprise_update_usage.delay() | ||||
|  | ||||
|  | ||||
| @receiver(post_delete, sender=License) | ||||
| def post_delete_license(sender: type[License], instance: License, **_): | ||||
|     """Clear license cache when license is deleted""" | ||||
|     cache.delete(CACHE_KEY_ENTERPRISE_LICENSE) | ||||
|  | ||||
| @ -9,26 +9,10 @@ from django.utils.timezone import now | ||||
| from rest_framework.exceptions import ValidationError | ||||
|  | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.enterprise.models import ( | ||||
|     THRESHOLD_READ_ONLY_WEEKS, | ||||
|     THRESHOLD_WARNING_ADMIN_WEEKS, | ||||
|     THRESHOLD_WARNING_USER_WEEKS, | ||||
|     License, | ||||
|     LicenseUsage, | ||||
|     LicenseUsageStatus, | ||||
| ) | ||||
| from authentik.enterprise.models import License | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
| # Valid license expiry | ||||
| expiry_valid = int(mktime((now() + timedelta(days=3000)).timetuple())) | ||||
| # Valid license expiry, expires soon | ||||
| expiry_soon = int(mktime((now() + timedelta(hours=10)).timetuple())) | ||||
| # Invalid license expiry, recently expired | ||||
| expiry_expired = int(mktime((now() - timedelta(hours=10)).timetuple())) | ||||
| # Invalid license expiry, expired longer ago | ||||
| expiry_expired_read_only = int( | ||||
|     mktime((now() - timedelta(weeks=THRESHOLD_READ_ONLY_WEEKS + 1)).timetuple()) | ||||
| ) | ||||
| _exp = int(mktime((now() + timedelta(days=3000)).timetuple())) | ||||
|  | ||||
|  | ||||
| class TestEnterpriseLicense(TestCase): | ||||
| @ -39,7 +23,7 @@ class TestEnterpriseLicense(TestCase): | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=expiry_valid, | ||||
|                 exp=_exp, | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
| @ -49,7 +33,7 @@ class TestEnterpriseLicense(TestCase): | ||||
|     def test_valid(self): | ||||
|         """Check license verification""" | ||||
|         lic = License.objects.create(key=generate_id()) | ||||
|         self.assertTrue(lic.status.status().is_valid) | ||||
|         self.assertTrue(lic.status.is_valid()) | ||||
|         self.assertEqual(lic.internal_users, 100) | ||||
|  | ||||
|     def test_invalid(self): | ||||
| @ -62,7 +46,7 @@ class TestEnterpriseLicense(TestCase): | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=expiry_valid, | ||||
|                 exp=_exp, | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
| @ -72,186 +56,11 @@ class TestEnterpriseLicense(TestCase): | ||||
|     def test_valid_multiple(self): | ||||
|         """Check license verification""" | ||||
|         lic = License.objects.create(key=generate_id()) | ||||
|         self.assertTrue(lic.status.status().is_valid) | ||||
|         self.assertTrue(lic.status.is_valid()) | ||||
|         lic2 = License.objects.create(key=generate_id()) | ||||
|         self.assertTrue(lic2.status.status().is_valid) | ||||
|         self.assertTrue(lic2.status.is_valid()) | ||||
|         total = LicenseKey.get_total() | ||||
|         self.assertEqual(total.internal_users, 200) | ||||
|         self.assertEqual(total.external_users, 200) | ||||
|         self.assertEqual(total.exp, expiry_valid) | ||||
|         self.assertTrue(total.status().is_valid) | ||||
|  | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=expiry_valid, | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_internal_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_external_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.record_usage", | ||||
|         MagicMock(), | ||||
|     ) | ||||
|     def test_limit_exceeded_read_only(self): | ||||
|         """Check license verification""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         usage = LicenseUsage.objects.create( | ||||
|             internal_user_count=100, | ||||
|             external_user_count=100, | ||||
|             status=LicenseUsageStatus.VALID, | ||||
|         ) | ||||
|         usage.record_date = now() - timedelta(weeks=THRESHOLD_READ_ONLY_WEEKS + 1) | ||||
|         usage.save(update_fields=["record_date"]) | ||||
|         self.assertEqual(LicenseKey.get_total().summary().status, LicenseUsageStatus.READ_ONLY) | ||||
|  | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=expiry_valid, | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_internal_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_external_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.record_usage", | ||||
|         MagicMock(), | ||||
|     ) | ||||
|     def test_limit_exceeded_user_warning(self): | ||||
|         """Check license verification""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         usage = LicenseUsage.objects.create( | ||||
|             internal_user_count=100, | ||||
|             external_user_count=100, | ||||
|             status=LicenseUsageStatus.VALID, | ||||
|         ) | ||||
|         usage.record_date = now() - timedelta(weeks=THRESHOLD_WARNING_USER_WEEKS + 1) | ||||
|         usage.save(update_fields=["record_date"]) | ||||
|         self.assertEqual( | ||||
|             LicenseKey.get_total().summary().status, LicenseUsageStatus.LIMIT_EXCEEDED_USER | ||||
|         ) | ||||
|  | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=expiry_valid, | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_internal_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_external_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.record_usage", | ||||
|         MagicMock(), | ||||
|     ) | ||||
|     def test_limit_exceeded_admin_warning(self): | ||||
|         """Check license verification""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         usage = LicenseUsage.objects.create( | ||||
|             internal_user_count=100, | ||||
|             external_user_count=100, | ||||
|             status=LicenseUsageStatus.VALID, | ||||
|         ) | ||||
|         usage.record_date = now() - timedelta(weeks=THRESHOLD_WARNING_ADMIN_WEEKS + 1) | ||||
|         usage.save(update_fields=["record_date"]) | ||||
|         self.assertEqual( | ||||
|             LicenseKey.get_total().summary().status, LicenseUsageStatus.LIMIT_EXCEEDED_ADMIN | ||||
|         ) | ||||
|  | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=expiry_expired_read_only, | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.record_usage", | ||||
|         MagicMock(), | ||||
|     ) | ||||
|     def test_expiry_read_only(self): | ||||
|         """Check license verification""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.assertEqual(LicenseKey.get_total().summary().status, LicenseUsageStatus.READ_ONLY) | ||||
|  | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=expiry_expired, | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.record_usage", | ||||
|         MagicMock(), | ||||
|     ) | ||||
|     def test_expiry_expired(self): | ||||
|         """Check license verification""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.assertEqual(LicenseKey.get_total().summary().status, LicenseUsageStatus.EXPIRED) | ||||
|  | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=expiry_soon, | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.record_usage", | ||||
|         MagicMock(), | ||||
|     ) | ||||
|     def test_expiry_soon(self): | ||||
|         """Check license verification""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.assertEqual(LicenseKey.get_total().summary().status, LicenseUsageStatus.EXPIRY_SOON) | ||||
|         self.assertEqual(total.exp, _exp) | ||||
|         self.assertTrue(total.is_valid()) | ||||
|  | ||||
| @ -1,217 +0,0 @@ | ||||
| """read only tests""" | ||||
|  | ||||
| from datetime import timedelta | ||||
| from unittest.mock import MagicMock, patch | ||||
|  | ||||
| from django.urls import reverse | ||||
| from django.utils.timezone import now | ||||
|  | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_user | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.enterprise.models import ( | ||||
|     THRESHOLD_READ_ONLY_WEEKS, | ||||
|     License, | ||||
|     LicenseUsage, | ||||
|     LicenseUsageStatus, | ||||
| ) | ||||
| from authentik.enterprise.tests.test_license import expiry_valid | ||||
| from authentik.flows.models import ( | ||||
|     FlowDesignation, | ||||
|     FlowStageBinding, | ||||
| ) | ||||
| from authentik.flows.tests import FlowTestCase | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.stages.identification.models import IdentificationStage, UserFields | ||||
| from authentik.stages.password import BACKEND_INBUILT | ||||
| from authentik.stages.password.models import PasswordStage | ||||
| from authentik.stages.user_login.models import UserLoginStage | ||||
|  | ||||
|  | ||||
| class TestReadOnly(FlowTestCase): | ||||
|     """Test read_only""" | ||||
|  | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=expiry_valid, | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_internal_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_external_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.record_usage", | ||||
|         MagicMock(), | ||||
|     ) | ||||
|     def test_login(self): | ||||
|         """Test flow, ensure login is still possible with read only mode""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         usage = LicenseUsage.objects.create( | ||||
|             internal_user_count=100, | ||||
|             external_user_count=100, | ||||
|             status=LicenseUsageStatus.VALID, | ||||
|         ) | ||||
|         usage.record_date = now() - timedelta(weeks=THRESHOLD_READ_ONLY_WEEKS + 1) | ||||
|         usage.save(update_fields=["record_date"]) | ||||
|  | ||||
|         flow = create_test_flow( | ||||
|             FlowDesignation.AUTHENTICATION, | ||||
|         ) | ||||
|  | ||||
|         ident_stage = IdentificationStage.objects.create( | ||||
|             name=generate_id(), | ||||
|             user_fields=[UserFields.E_MAIL], | ||||
|             pretend_user_exists=False, | ||||
|         ) | ||||
|         FlowStageBinding.objects.create( | ||||
|             target=flow, | ||||
|             stage=ident_stage, | ||||
|             order=0, | ||||
|         ) | ||||
|         password_stage = PasswordStage.objects.create( | ||||
|             name=generate_id(), backends=[BACKEND_INBUILT] | ||||
|         ) | ||||
|         FlowStageBinding.objects.create( | ||||
|             target=flow, | ||||
|             stage=password_stage, | ||||
|             order=1, | ||||
|         ) | ||||
|         login_stage = UserLoginStage.objects.create( | ||||
|             name=generate_id(), | ||||
|         ) | ||||
|         FlowStageBinding.objects.create( | ||||
|             target=flow, | ||||
|             stage=login_stage, | ||||
|             order=2, | ||||
|         ) | ||||
|  | ||||
|         user = create_test_user() | ||||
|  | ||||
|         exec_url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}) | ||||
|         response = self.client.get(exec_url) | ||||
|         self.assertStageResponse( | ||||
|             response, | ||||
|             flow, | ||||
|             component="ak-stage-identification", | ||||
|             password_fields=False, | ||||
|             primary_action="Log in", | ||||
|             sources=[], | ||||
|             show_source_labels=False, | ||||
|             user_fields=[UserFields.E_MAIL], | ||||
|         ) | ||||
|         response = self.client.post(exec_url, {"uid_field": user.email}, follow=True) | ||||
|         self.assertStageResponse(response, flow, component="ak-stage-password") | ||||
|         response = self.client.post(exec_url, {"password": user.username}, follow=True) | ||||
|         self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) | ||||
|  | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=expiry_valid, | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_internal_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_external_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.record_usage", | ||||
|         MagicMock(), | ||||
|     ) | ||||
|     def test_manage_licenses(self): | ||||
|         """Test that managing licenses is still possible""" | ||||
|         license = License.objects.create(key=generate_id()) | ||||
|         usage = LicenseUsage.objects.create( | ||||
|             internal_user_count=100, | ||||
|             external_user_count=100, | ||||
|             status=LicenseUsageStatus.VALID, | ||||
|         ) | ||||
|         usage.record_date = now() - timedelta(weeks=THRESHOLD_READ_ONLY_WEEKS + 1) | ||||
|         usage.save(update_fields=["record_date"]) | ||||
|  | ||||
|         admin = create_test_admin_user() | ||||
|         self.client.force_login(admin) | ||||
|  | ||||
|         # Reading is always allowed | ||||
|         response = self.client.get(reverse("authentik_api:license-list")) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|         # Writing should also be allowed | ||||
|         response = self.client.patch( | ||||
|             reverse("authentik_api:license-detail", kwargs={"pk": license.pk}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=expiry_valid, | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_internal_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.get_external_user_count", | ||||
|         MagicMock(return_value=1000), | ||||
|     ) | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.record_usage", | ||||
|         MagicMock(), | ||||
|     ) | ||||
|     def test_manage_flows(self): | ||||
|         """Test flow""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         usage = LicenseUsage.objects.create( | ||||
|             internal_user_count=100, | ||||
|             external_user_count=100, | ||||
|             status=LicenseUsageStatus.VALID, | ||||
|         ) | ||||
|         usage.record_date = now() - timedelta(weeks=THRESHOLD_READ_ONLY_WEEKS + 1) | ||||
|         usage.save(update_fields=["record_date"]) | ||||
|  | ||||
|         admin = create_test_admin_user() | ||||
|         self.client.force_login(admin) | ||||
|  | ||||
|         # Read only is still allowed | ||||
|         response = self.client.get(reverse("authentik_api:flow-list")) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|         flow = create_test_flow() | ||||
|         # Writing is not | ||||
|         response = self.client.patch( | ||||
|             reverse("authentik_api:flow-detail", kwargs={"slug": flow.slug}) | ||||
|         ) | ||||
|         self.assertJSONEqual( | ||||
|             response.content, | ||||
|             {"detail": "Request denied due to expired/invalid license.", "code": "denied_license"}, | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 400) | ||||
| @ -69,5 +69,8 @@ class NotificationViewSet( | ||||
|     @action(detail=False, methods=["post"]) | ||||
|     def mark_all_seen(self, request: Request) -> Response: | ||||
|         """Mark all the user's notifications as seen""" | ||||
|         Notification.objects.filter(user=request.user, seen=False).update(seen=True) | ||||
|         notifications = Notification.objects.filter(user=request.user) | ||||
|         for notification in notifications: | ||||
|             notification.seen = True | ||||
|         Notification.objects.bulk_update(notifications, ["seen"]) | ||||
|         return Response({}, status=204) | ||||
|  | ||||
| @ -5,7 +5,7 @@ from typing import TYPE_CHECKING, Optional, TypedDict | ||||
| from django.http import HttpRequest | ||||
| from geoip2.errors import GeoIP2Error | ||||
| from geoip2.models import ASN | ||||
| from sentry_sdk import start_span | ||||
| from sentry_sdk import Hub | ||||
|  | ||||
| from authentik.events.context_processors.mmdb import MMDBContextProcessor | ||||
| from authentik.lib.config import CONFIG | ||||
| @ -48,7 +48,7 @@ class ASNContextProcessor(MMDBContextProcessor): | ||||
|  | ||||
|     def asn(self, ip_address: str) -> ASN | None: | ||||
|         """Wrapper for Reader.asn""" | ||||
|         with start_span( | ||||
|         with Hub.current.start_span( | ||||
|             op="authentik.events.asn.asn", | ||||
|             description=ip_address, | ||||
|         ): | ||||
|  | ||||
| @ -5,7 +5,7 @@ from typing import TYPE_CHECKING, Optional, TypedDict | ||||
| from django.http import HttpRequest | ||||
| from geoip2.errors import GeoIP2Error | ||||
| from geoip2.models import City | ||||
| from sentry_sdk import start_span | ||||
| from sentry_sdk.hub import Hub | ||||
|  | ||||
| from authentik.events.context_processors.mmdb import MMDBContextProcessor | ||||
| from authentik.lib.config import CONFIG | ||||
| @ -49,7 +49,7 @@ class GeoIPContextProcessor(MMDBContextProcessor): | ||||
|  | ||||
|     def city(self, ip_address: str) -> City | None: | ||||
|         """Wrapper for Reader.city""" | ||||
|         with start_span( | ||||
|         with Hub.current.start_span( | ||||
|             op="authentik.events.geo.city", | ||||
|             description=ip_address, | ||||
|         ): | ||||
|  | ||||
| @ -35,7 +35,6 @@ IGNORED_MODELS = tuple( | ||||
|  | ||||
| _CTX_OVERWRITE_USER = ContextVar[User | None]("authentik_events_log_overwrite_user", default=None) | ||||
| _CTX_IGNORE = ContextVar[bool]("authentik_events_log_ignore", default=False) | ||||
| _CTX_REQUEST = ContextVar[HttpRequest | None]("authentik_events_log_request", default=None) | ||||
|  | ||||
|  | ||||
| def should_log_model(model: Model) -> bool: | ||||
| @ -150,13 +149,11 @@ class AuditMiddleware: | ||||
|         m2m_changed.disconnect(dispatch_uid=request.request_id) | ||||
|  | ||||
|     def __call__(self, request: HttpRequest) -> HttpResponse: | ||||
|         _CTX_REQUEST.set(request) | ||||
|         self.connect(request) | ||||
|  | ||||
|         response = self.get_response(request) | ||||
|  | ||||
|         self.disconnect(request) | ||||
|         _CTX_REQUEST.set(None) | ||||
|         return response | ||||
|  | ||||
|     def process_exception(self, request: HttpRequest, exception: Exception): | ||||
| @ -170,7 +167,7 @@ class AuditMiddleware: | ||||
|             thread = EventNewThread( | ||||
|                 EventAction.SUSPICIOUS_REQUEST, | ||||
|                 request, | ||||
|                 message=exception_to_string(exception), | ||||
|                 message=str(exception), | ||||
|             ) | ||||
|             thread.run() | ||||
|         elif before_send({}, {"exc_info": (None, exception, None)}) is not None: | ||||
| @ -195,8 +192,6 @@ class AuditMiddleware: | ||||
|             return | ||||
|         if _CTX_IGNORE.get(): | ||||
|             return | ||||
|         if request.request_id != _CTX_REQUEST.get().request_id: | ||||
|             return | ||||
|         user = self.get_user(request) | ||||
|  | ||||
|         action = EventAction.MODEL_CREATED if created else EventAction.MODEL_UPDATED | ||||
| @ -210,8 +205,6 @@ class AuditMiddleware: | ||||
|             return | ||||
|         if _CTX_IGNORE.get(): | ||||
|             return | ||||
|         if request.request_id != _CTX_REQUEST.get().request_id: | ||||
|             return | ||||
|         user = self.get_user(request) | ||||
|  | ||||
|         EventNewThread( | ||||
| @ -237,8 +230,6 @@ class AuditMiddleware: | ||||
|             return | ||||
|         if _CTX_IGNORE.get(): | ||||
|             return | ||||
|         if request.request_id != _CTX_REQUEST.get().request_id: | ||||
|             return | ||||
|         user = self.get_user(request) | ||||
|  | ||||
|         EventNewThread( | ||||
|  | ||||
| @ -49,7 +49,6 @@ from authentik.policies.models import PolicyBindingModel | ||||
| from authentik.root.middleware import ClientIPMiddleware | ||||
| from authentik.stages.email.utils import TemplateEmailMessage | ||||
| from authentik.tenants.models import Tenant | ||||
| from authentik.tenants.utils import get_current_tenant | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| DISCORD_FIELD_LIMIT = 25 | ||||
| @ -59,10 +58,6 @@ NOTIFICATION_SUMMARY_LENGTH = 75 | ||||
| def default_event_duration(): | ||||
|     """Default duration an Event is saved. | ||||
|     This is used as a fallback when no brand is available""" | ||||
|     try: | ||||
|         tenant = get_current_tenant() | ||||
|         return now() + timedelta_from_string(tenant.event_retention) | ||||
|     except Tenant.DoesNotExist: | ||||
|     return now() + timedelta(days=365) | ||||
|  | ||||
|  | ||||
| @ -243,13 +238,17 @@ class Event(SerializerModel, ExpiringModel): | ||||
|                 "args": cleanse_dict(QueryDict(request.META.get("QUERY_STRING", ""))), | ||||
|                 "user_agent": request.META.get("HTTP_USER_AGENT", ""), | ||||
|             } | ||||
|             if hasattr(request, "request_id"): | ||||
|                 self.context["http_request"]["request_id"] = request.request_id | ||||
|             # Special case for events created during flow execution | ||||
|             # since they keep the http query within a wrapped query | ||||
|             if QS_QUERY in self.context["http_request"]["args"]: | ||||
|                 wrapped = self.context["http_request"]["args"][QS_QUERY] | ||||
|                 self.context["http_request"]["args"] = cleanse_dict(QueryDict(wrapped)) | ||||
|         if hasattr(request, "tenant"): | ||||
|             tenant: Tenant = request.tenant | ||||
|             # Because self.created only gets set on save, we can't use it's value here | ||||
|             # hence we set self.created to now and then use it | ||||
|             self.created = now() | ||||
|             self.expires = self.created + timedelta_from_string(tenant.event_retention) | ||||
|         if hasattr(request, "brand"): | ||||
|             brand: Brand = request.brand | ||||
|             self.brand = sanitize_dict(model_to_dict(brand)) | ||||
|  | ||||
| @ -1,16 +1,13 @@ | ||||
| """authentik events signal listener""" | ||||
|  | ||||
| from importlib import import_module | ||||
| from typing import Any | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.contrib.auth.signals import user_logged_in, user_logged_out | ||||
| from django.db.models.signals import post_save, pre_delete | ||||
| from django.dispatch import receiver | ||||
| from django.http import HttpRequest | ||||
| from rest_framework.request import Request | ||||
|  | ||||
| from authentik.core.models import AuthenticatedSession, User | ||||
| from authentik.core.models import User | ||||
| from authentik.core.signals import login_failed, password_changed | ||||
| from authentik.events.apps import SYSTEM_TASK_STATUS | ||||
| from authentik.events.models import Event, EventAction, SystemTask | ||||
| @ -26,7 +23,6 @@ from authentik.stages.user_write.signals import user_write | ||||
| from authentik.tenants.utils import get_current_tenant | ||||
|  | ||||
| SESSION_LOGIN_EVENT = "login_event" | ||||
| _session_engine = import_module(settings.SESSION_ENGINE) | ||||
|  | ||||
|  | ||||
| @receiver(user_logged_in) | ||||
| @ -44,20 +40,11 @@ def on_user_logged_in(sender, request: HttpRequest, user: User, **_): | ||||
|             kwargs[PLAN_CONTEXT_METHOD_ARGS] = flow_plan.context.get(PLAN_CONTEXT_METHOD_ARGS, {}) | ||||
|     event = Event.new(EventAction.LOGIN, **kwargs).from_http(request, user=user) | ||||
|     request.session[SESSION_LOGIN_EVENT] = event | ||||
|     request.session.save() | ||||
|  | ||||
|  | ||||
| def get_login_event(request_or_session: HttpRequest | AuthenticatedSession | None) -> Event | None: | ||||
| def get_login_event(request: HttpRequest) -> Event | None: | ||||
|     """Wrapper to get login event that can be mocked in tests""" | ||||
|     session = None | ||||
|     if not request_or_session: | ||||
|         return None | ||||
|     if isinstance(request_or_session, HttpRequest | Request): | ||||
|         session = request_or_session.session | ||||
|     if isinstance(request_or_session, AuthenticatedSession): | ||||
|         SessionStore = _session_engine.SessionStore | ||||
|         session = SessionStore(request_or_session.session_key) | ||||
|     return session.get(SESSION_LOGIN_EVENT, None) | ||||
|     return request.session.get(SESSION_LOGIN_EVENT, None) | ||||
|  | ||||
|  | ||||
| @receiver(user_logged_out) | ||||
|  | ||||
| @ -6,7 +6,6 @@ from django.db.models import Model | ||||
| from django.test import TestCase | ||||
|  | ||||
| from authentik.core.models import default_token_key | ||||
| from authentik.events.models import default_event_duration | ||||
| from authentik.lib.utils.reflection import get_apps | ||||
|  | ||||
|  | ||||
| @ -21,7 +20,7 @@ def model_tester_factory(test_model: type[Model]) -> Callable: | ||||
|         allowed = 0 | ||||
|         # Token-like objects need to lookup the current tenant to get the default token length | ||||
|         for field in test_model._meta.fields: | ||||
|             if field.default in [default_token_key, default_event_duration]: | ||||
|             if field.default == default_token_key: | ||||
|                 allowed += 1 | ||||
|         with self.assertNumQueries(allowed): | ||||
|             str(test_model()) | ||||
|  | ||||
| @ -2,8 +2,7 @@ | ||||
|  | ||||
| from unittest.mock import MagicMock, patch | ||||
|  | ||||
| from django.urls import reverse | ||||
| from rest_framework.test import APITestCase | ||||
| from django.test import TestCase | ||||
|  | ||||
| from authentik.core.models import Group, User | ||||
| from authentik.events.models import ( | ||||
| @ -11,7 +10,6 @@ from authentik.events.models import ( | ||||
|     EventAction, | ||||
|     Notification, | ||||
|     NotificationRule, | ||||
|     NotificationSeverity, | ||||
|     NotificationTransport, | ||||
|     NotificationWebhookMapping, | ||||
|     TransportMode, | ||||
| @ -22,7 +20,7 @@ from authentik.policies.exceptions import PolicyException | ||||
| from authentik.policies.models import PolicyBinding | ||||
|  | ||||
|  | ||||
| class TestEventsNotifications(APITestCase): | ||||
| class TestEventsNotifications(TestCase): | ||||
|     """Test Event Notifications""" | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
| @ -133,15 +131,3 @@ class TestEventsNotifications(APITestCase): | ||||
|         Notification.objects.all().delete() | ||||
|         Event.new(EventAction.CUSTOM_PREFIX).save() | ||||
|         self.assertEqual(Notification.objects.first().body, "foo") | ||||
|  | ||||
|     def test_api_mark_all_seen(self): | ||||
|         """Test mark_all_seen""" | ||||
|         self.client.force_login(self.user) | ||||
|  | ||||
|         Notification.objects.create( | ||||
|             severity=NotificationSeverity.NOTICE, body="foo", user=self.user, seen=False | ||||
|         ) | ||||
|  | ||||
|         response = self.client.post(reverse("authentik_api:notification-mark-all-seen")) | ||||
|         self.assertEqual(response.status_code, 204) | ||||
|         self.assertFalse(Notification.objects.filter(body="foo", seen=False).exists()) | ||||
|  | ||||
| @ -37,7 +37,6 @@ from authentik.lib.utils.file import ( | ||||
| ) | ||||
| from authentik.lib.views import bad_request_message | ||||
| from authentik.rbac.decorators import permission_required | ||||
| from authentik.rbac.filters import ObjectFilter | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
| @ -282,7 +281,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet): | ||||
|             400: OpenApiResponse(description="Flow not applicable"), | ||||
|         }, | ||||
|     ) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[ObjectFilter]) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     def execute(self, request: Request, slug: str): | ||||
|         """Execute flow for current user""" | ||||
|         # Because we pre-plan the flow here, and not in the planner, we need to manually clear | ||||
|  | ||||
| @ -5,7 +5,7 @@ from typing import Any | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.http import HttpRequest | ||||
| from sentry_sdk import start_span | ||||
| from sentry_sdk.hub import Hub | ||||
| from sentry_sdk.tracing import Span | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
|  | ||||
| @ -151,7 +151,9 @@ class FlowPlanner: | ||||
|     def plan(self, request: HttpRequest, default_context: dict[str, Any] | None = None) -> FlowPlan: | ||||
|         """Check each of the flows' policies, check policies for each stage with PolicyBinding | ||||
|         and return ordered list""" | ||||
|         with start_span(op="authentik.flow.planner.plan", description=self.flow.slug) as span: | ||||
|         with Hub.current.start_span( | ||||
|             op="authentik.flow.planner.plan", description=self.flow.slug | ||||
|         ) as span: | ||||
|             span: Span | ||||
|             span.set_data("flow", self.flow) | ||||
|             span.set_data("request", request) | ||||
| @ -216,7 +218,7 @@ class FlowPlanner: | ||||
|         """Build flow plan by checking each stage in their respective | ||||
|         order and checking the applied policies""" | ||||
|         with ( | ||||
|             start_span( | ||||
|             Hub.current.start_span( | ||||
|                 op="authentik.flow.planner.build_plan", | ||||
|                 description=self.flow.slug, | ||||
|             ) as span, | ||||
|  | ||||
| @ -10,7 +10,7 @@ from django.urls import reverse | ||||
| from django.views.generic.base import View | ||||
| from prometheus_client import Histogram | ||||
| from rest_framework.request import Request | ||||
| from sentry_sdk import start_span | ||||
| from sentry_sdk.hub import Hub | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
|  | ||||
| from authentik.core.models import User | ||||
| @ -123,7 +123,7 @@ class ChallengeStageView(StageView): | ||||
|                 ) | ||||
|                 return self.executor.restart_flow(keep_context) | ||||
|             with ( | ||||
|                 start_span( | ||||
|                 Hub.current.start_span( | ||||
|                     op="authentik.flow.stage.challenge_invalid", | ||||
|                     description=self.__class__.__name__, | ||||
|                 ), | ||||
| @ -133,7 +133,7 @@ class ChallengeStageView(StageView): | ||||
|             ): | ||||
|                 return self.challenge_invalid(challenge) | ||||
|         with ( | ||||
|             start_span( | ||||
|             Hub.current.start_span( | ||||
|                 op="authentik.flow.stage.challenge_valid", | ||||
|                 description=self.__class__.__name__, | ||||
|             ), | ||||
| @ -159,7 +159,7 @@ class ChallengeStageView(StageView): | ||||
|  | ||||
|     def _get_challenge(self, *args, **kwargs) -> Challenge: | ||||
|         with ( | ||||
|             start_span( | ||||
|             Hub.current.start_span( | ||||
|                 op="authentik.flow.stage.get_challenge", | ||||
|                 description=self.__class__.__name__, | ||||
|             ), | ||||
| @ -172,7 +172,7 @@ class ChallengeStageView(StageView): | ||||
|             except StageInvalidException as exc: | ||||
|                 self.logger.debug("Got StageInvalidException", exc=exc) | ||||
|                 return self.executor.stage_invalid() | ||||
|         with start_span( | ||||
|         with Hub.current.start_span( | ||||
|             op="authentik.flow.stage._get_challenge", | ||||
|             description=self.__class__.__name__, | ||||
|         ): | ||||
|  | ||||
| @ -18,8 +18,9 @@ from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import OpenApiParameter, PolymorphicProxySerializer, extend_schema | ||||
| from rest_framework.permissions import AllowAny | ||||
| from rest_framework.views import APIView | ||||
| from sentry_sdk import capture_exception, start_span | ||||
| from sentry_sdk import capture_exception | ||||
| from sentry_sdk.api import set_tag | ||||
| from sentry_sdk.hub import Hub | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
|  | ||||
| from authentik.brands.models import Brand | ||||
| @ -153,7 +154,9 @@ class FlowExecutorView(APIView): | ||||
|         return plan | ||||
|  | ||||
|     def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse: | ||||
|         with start_span(op="authentik.flow.executor.dispatch", description=self.flow.slug) as span: | ||||
|         with Hub.current.start_span( | ||||
|             op="authentik.flow.executor.dispatch", description=self.flow.slug | ||||
|         ) as span: | ||||
|             span.set_data("authentik Flow", self.flow.slug) | ||||
|             get_params = QueryDict(request.GET.get(QS_QUERY, "")) | ||||
|             if QS_KEY_TOKEN in get_params: | ||||
| @ -271,7 +274,7 @@ class FlowExecutorView(APIView): | ||||
|         ) | ||||
|         try: | ||||
|             with ( | ||||
|                 start_span( | ||||
|                 Hub.current.start_span( | ||||
|                     op="authentik.flow.executor.stage", | ||||
|                     description=class_path, | ||||
|                 ) as span, | ||||
| @ -322,7 +325,7 @@ class FlowExecutorView(APIView): | ||||
|         ) | ||||
|         try: | ||||
|             with ( | ||||
|                 start_span( | ||||
|                 Hub.current.start_span( | ||||
|                     op="authentik.flow.executor.stage", | ||||
|                     description=class_path, | ||||
|                 ) as span, | ||||
|  | ||||
| @ -13,7 +13,7 @@ from lxml import etree  # nosec | ||||
| from lxml.etree import Element, SubElement  # nosec | ||||
| from requests.exceptions import ConnectionError, HTTPError, RequestException, Timeout | ||||
|  | ||||
| from authentik.lib.utils.dict import get_path_from_dict | ||||
| from authentik.lib.config import get_path_from_dict | ||||
| from authentik.lib.utils.http import get_http_session | ||||
| from authentik.tenants.utils import get_current_tenant | ||||
|  | ||||
|  | ||||
| @ -19,8 +19,6 @@ from urllib.parse import quote_plus, urlparse | ||||
| import yaml | ||||
| from django.conf import ImproperlyConfigured | ||||
|  | ||||
| from authentik.lib.utils.dict import get_path_from_dict, set_path_in_dict | ||||
|  | ||||
| SEARCH_PATHS = ["authentik/lib/default.yml", "/etc/authentik/config.yml", ""] + glob( | ||||
|     "/etc/authentik/config.d/*.yml", recursive=True | ||||
| ) | ||||
| @ -49,6 +47,29 @@ DEPRECATIONS = { | ||||
| } | ||||
|  | ||||
|  | ||||
| def get_path_from_dict(root: dict, path: str, sep=".", default=None) -> Any: | ||||
|     """Recursively walk through `root`, checking each part of `path` separated by `sep`. | ||||
|     If at any point a dict does not exist, return default""" | ||||
|     for comp in path.split(sep): | ||||
|         if root and comp in root: | ||||
|             root = root.get(comp) | ||||
|         else: | ||||
|             return default | ||||
|     return root | ||||
|  | ||||
|  | ||||
| def set_path_in_dict(root: dict, path: str, value: Any, sep="."): | ||||
|     """Recursively walk through `root`, checking each part of `path` separated by `sep` | ||||
|     and setting the last value to `value`""" | ||||
|     # Walk each component of the path | ||||
|     path_parts = path.split(sep) | ||||
|     for comp in path_parts[:-1]: | ||||
|         if comp not in root: | ||||
|             root[comp] = {} | ||||
|         root = root.get(comp, {}) | ||||
|     root[path_parts[-1]] = value | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class Attr: | ||||
|     """Single configuration attribute""" | ||||
|  | ||||
| @ -2,6 +2,7 @@ | ||||
|  | ||||
| import re | ||||
| import socket | ||||
| from collections.abc import Iterable | ||||
| from ipaddress import ip_address, ip_network | ||||
| from textwrap import indent | ||||
| from types import CodeType | ||||
| @ -12,7 +13,7 @@ from django.core.exceptions import FieldError | ||||
| from django.utils.text import slugify | ||||
| from guardian.shortcuts import get_anonymous_user | ||||
| from rest_framework.serializers import ValidationError | ||||
| from sentry_sdk import start_span | ||||
| from sentry_sdk.hub import Hub | ||||
| from sentry_sdk.tracing import Span | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| @ -27,12 +28,6 @@ from authentik.stages.authenticator import devices_for_user | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
| ARG_SANITIZE = re.compile(r"[:.-]") | ||||
|  | ||||
|  | ||||
| def sanitize_arg(arg_name: str) -> str: | ||||
|     return re.sub(ARG_SANITIZE, "_", arg_name) | ||||
|  | ||||
|  | ||||
| class BaseEvaluator: | ||||
|     """Validate and evaluate python-based expressions""" | ||||
| @ -182,9 +177,9 @@ class BaseEvaluator: | ||||
|         proc = PolicyProcess(PolicyBinding(policy=policy), request=req, connection=None) | ||||
|         return proc.profiling_wrapper() | ||||
|  | ||||
|     def wrap_expression(self, expression: str) -> str: | ||||
|     def wrap_expression(self, expression: str, params: Iterable[str]) -> str: | ||||
|         """Wrap expression in a function, call it, and save the result as `result`""" | ||||
|         handler_signature = ",".join(sanitize_arg(x) for x in self._context.keys()) | ||||
|         handler_signature = ",".join(params) | ||||
|         full_expression = "" | ||||
|         full_expression += f"def handler({handler_signature}):\n" | ||||
|         full_expression += indent(expression, "    ") | ||||
| @ -193,14 +188,14 @@ class BaseEvaluator: | ||||
|  | ||||
|     def compile(self, expression: str) -> CodeType: | ||||
|         """Parse expression. Raises SyntaxError or ValueError if the syntax is incorrect.""" | ||||
|         expression = self.wrap_expression(expression) | ||||
|         return compile(expression, self._filename, "exec") | ||||
|         param_keys = self._context.keys() | ||||
|         return compile(self.wrap_expression(expression, param_keys), self._filename, "exec") | ||||
|  | ||||
|     def evaluate(self, expression_source: str) -> Any: | ||||
|         """Parse and evaluate expression. If the syntax is incorrect, a SyntaxError is raised. | ||||
|         If any exception is raised during execution, it is raised. | ||||
|         The result is returned without any type-checking.""" | ||||
|         with start_span(op="authentik.lib.evaluator.evaluate") as span: | ||||
|         with Hub.current.start_span(op="authentik.lib.evaluator.evaluate") as span: | ||||
|             span: Span | ||||
|             span.description = self._filename | ||||
|             span.set_data("expression", expression_source) | ||||
| @ -210,7 +205,7 @@ class BaseEvaluator: | ||||
|                 self.handle_error(exc, expression_source) | ||||
|                 raise exc | ||||
|             try: | ||||
|                 _locals = {sanitize_arg(x): y for x, y in self._context.items()} | ||||
|                 _locals = self._context | ||||
|                 # Yes this is an exec, yes it is potentially bad. Since we limit what variables are | ||||
|                 # available here, and these policies can only be edited by admins, this is a risk | ||||
|                 # we're willing to take. | ||||
|  | ||||
| @ -68,7 +68,7 @@ def sentry_init(**sentry_init_kwargs): | ||||
|         integrations=[ | ||||
|             ArgvIntegration(), | ||||
|             StdlibIntegration(), | ||||
|             DjangoIntegration(transaction_style="function_name", cache_spans=True), | ||||
|             DjangoIntegration(transaction_style="function_name"), | ||||
|             CeleryIntegration(), | ||||
|             RedisIntegration(), | ||||
|             ThreadingIntegration(propagate_hub=True), | ||||
|  | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	