Compare commits
	
		
			13 Commits
		
	
	
		
			version/20
			...
			version/20
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| e87a17fd81 | |||
| bb1bcb29cd | |||
| 0a5bdad972 | |||
| d313225956 | |||
| 249dc276d4 | |||
| 5fb7dc4cb3 | |||
| 82930ee807 | |||
| ac25fbab54 | |||
| 15cb6b18f6 | |||
| fdd39b4b4c | |||
| 589304df4f | |||
| 4d920ff477 | |||
| 88dc616c5e | 
| @ -1,5 +1,5 @@ | ||||
| [bumpversion] | ||||
| current_version = 2025.2.2 | ||||
| current_version = 2024.12.1 | ||||
| tag = True | ||||
| commit = True | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||
| @ -31,4 +31,4 @@ optional_value = final | ||||
|  | ||||
| [bumpversion:file:web/src/common/constants.ts] | ||||
|  | ||||
| [bumpversion:file:lifecycle/aws/template.yaml] | ||||
| [bumpversion:file:website/docs/install-config/install/aws/template.yaml] | ||||
|  | ||||
| @ -35,6 +35,14 @@ runs: | ||||
|             AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||
|             ``` | ||||
|  | ||||
|             For arm64, use these values: | ||||
|  | ||||
|             ```shell | ||||
|             AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server | ||||
|             AUTHENTIK_TAG=${{ inputs.tag }}-arm64 | ||||
|             AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||
|             ``` | ||||
|  | ||||
|             Afterwards, run the upgrade commands from the latest release notes. | ||||
|           </details> | ||||
|           <details> | ||||
| @ -52,6 +60,18 @@ runs: | ||||
|                     tag: ${{ inputs.tag }} | ||||
|             ``` | ||||
|  | ||||
|             For arm64, use these values: | ||||
|  | ||||
|             ```yaml | ||||
|             authentik: | ||||
|                 outposts: | ||||
|                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||
|             global: | ||||
|                 image: | ||||
|                     repository: ghcr.io/goauthentik/dev-server | ||||
|                     tag: ${{ inputs.tag }}-arm64 | ||||
|             ``` | ||||
|  | ||||
|             Afterwards, run the upgrade commands from the latest release notes. | ||||
|           </details> | ||||
|         edit-mode: replace | ||||
|  | ||||
							
								
								
									
										14
									
								
								.github/actions/docker-push-variables/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.github/actions/docker-push-variables/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -9,9 +9,6 @@ inputs: | ||||
|   image-arch: | ||||
|     required: false | ||||
|     description: "Docker image arch" | ||||
|   release: | ||||
|     required: true | ||||
|     description: "True if this is a release build, false if this is a dev/PR build" | ||||
|  | ||||
| outputs: | ||||
|   shouldPush: | ||||
| @ -32,24 +29,15 @@ outputs: | ||||
|   imageTags: | ||||
|     description: "Docker image tags" | ||||
|     value: ${{ steps.ev.outputs.imageTags }} | ||||
|   imageTagsJSON: | ||||
|     description: "Docker image tags, as a JSON array" | ||||
|     value: ${{ steps.ev.outputs.imageTagsJSON }} | ||||
|   attestImageNames: | ||||
|     description: "Docker image names used for attestation" | ||||
|     value: ${{ steps.ev.outputs.attestImageNames }} | ||||
|   cacheTo: | ||||
|     description: "cache-to value for the docker build step" | ||||
|     value: ${{ steps.ev.outputs.cacheTo }} | ||||
|   imageMainTag: | ||||
|     description: "Docker image main tag" | ||||
|     value: ${{ steps.ev.outputs.imageMainTag }} | ||||
|   imageMainName: | ||||
|     description: "Docker image main name" | ||||
|     value: ${{ steps.ev.outputs.imageMainName }} | ||||
|   imageBuildArgs: | ||||
|     description: "Docker image build args" | ||||
|     value: ${{ steps.ev.outputs.imageBuildArgs }} | ||||
|  | ||||
| runs: | ||||
|   using: "composite" | ||||
| @ -60,8 +48,6 @@ runs: | ||||
|       env: | ||||
|         IMAGE_NAME: ${{ inputs.image-name }} | ||||
|         IMAGE_ARCH: ${{ inputs.image-arch }} | ||||
|         RELEASE: ${{ inputs.release }} | ||||
|         PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} | ||||
|         REF: ${{ github.ref }} | ||||
|       run: | | ||||
|         python3 ${{ github.action_path }}/push_vars.py | ||||
|  | ||||
| @ -2,7 +2,6 @@ | ||||
|  | ||||
| import configparser | ||||
| import os | ||||
| from json import dumps | ||||
| from time import time | ||||
|  | ||||
| parser = configparser.ConfigParser() | ||||
| @ -49,7 +48,7 @@ if is_release: | ||||
|             ] | ||||
| else: | ||||
|     suffix = "" | ||||
|     if image_arch: | ||||
|     if image_arch and image_arch != "amd64": | ||||
|         suffix = f"-{image_arch}" | ||||
|     for name in image_names: | ||||
|         image_tags += [ | ||||
| @ -71,31 +70,12 @@ def get_attest_image_names(image_with_tags: list[str]): | ||||
|     return ",".join(set(image_tags)) | ||||
|  | ||||
|  | ||||
| # Generate `cache-to` param | ||||
| cache_to = "" | ||||
| if should_push: | ||||
|     _cache_tag = "buildcache" | ||||
|     if image_arch: | ||||
|         _cache_tag += f"-{image_arch}" | ||||
|     cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max" | ||||
|  | ||||
|  | ||||
| image_build_args = [] | ||||
| if os.getenv("RELEASE", "false").lower() == "true": | ||||
|     image_build_args = [f"VERSION={os.getenv('REF')}"] | ||||
| else: | ||||
|     image_build_args = [f"GIT_BUILD_HASH={sha}"] | ||||
| image_build_args = "\n".join(image_build_args) | ||||
|  | ||||
| with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output: | ||||
|     print(f"shouldPush={str(should_push).lower()}", file=_output) | ||||
|     print(f"sha={sha}", file=_output) | ||||
|     print(f"version={version}", file=_output) | ||||
|     print(f"prerelease={prerelease}", file=_output) | ||||
|     print(f"imageTags={','.join(image_tags)}", file=_output) | ||||
|     print(f"imageTagsJSON={dumps(image_tags)}", file=_output) | ||||
|     print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output) | ||||
|     print(f"imageMainTag={image_main_tag}", file=_output) | ||||
|     print(f"imageMainName={image_tags[0]}", file=_output) | ||||
|     print(f"cacheTo={cache_to}", file=_output) | ||||
|     print(f"imageBuildArgs={image_build_args}", file=_output) | ||||
|  | ||||
							
								
								
									
										11
									
								
								.github/actions/docker-push-variables/test.sh
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										11
									
								
								.github/actions/docker-push-variables/test.sh
									
									
									
									
										vendored
									
									
								
							| @ -1,18 +1,7 @@ | ||||
| #!/bin/bash -x | ||||
| SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) | ||||
| # Non-pushing PR | ||||
| GITHUB_OUTPUT=/dev/stdout \ | ||||
|     GITHUB_REF=ref \ | ||||
|     GITHUB_SHA=sha \ | ||||
|     IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \ | ||||
|     GITHUB_REPOSITORY=goauthentik/authentik \ | ||||
|     python $SCRIPT_DIR/push_vars.py | ||||
|  | ||||
| # Pushing PR/main | ||||
| GITHUB_OUTPUT=/dev/stdout \ | ||||
|     GITHUB_REF=ref \ | ||||
|     GITHUB_SHA=sha \ | ||||
|     IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \ | ||||
|     GITHUB_REPOSITORY=goauthentik/authentik \ | ||||
|     DOCKER_USERNAME=foo \ | ||||
|     python $SCRIPT_DIR/push_vars.py | ||||
|  | ||||
							
								
								
									
										10
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -82,16 +82,6 @@ updates: | ||||
|       docusaurus: | ||||
|         patterns: | ||||
|           - "@docusaurus/*" | ||||
|   - package-ecosystem: npm | ||||
|     directory: "/lifecycle/aws" | ||||
|     schedule: | ||||
|       interval: daily | ||||
|       time: "04:00" | ||||
|     open-pull-requests-limit: 10 | ||||
|     commit-message: | ||||
|       prefix: "lifecycle/aws:" | ||||
|     labels: | ||||
|       - dependencies | ||||
|   - package-ecosystem: pip | ||||
|     directory: "/" | ||||
|     schedule: | ||||
|  | ||||
| @ -1,96 +0,0 @@ | ||||
| # Re-usable workflow for a single-architecture build | ||||
| name: Single-arch Container build | ||||
|  | ||||
| on: | ||||
|   workflow_call: | ||||
|     inputs: | ||||
|       image_name: | ||||
|         required: true | ||||
|         type: string | ||||
|       image_arch: | ||||
|         required: true | ||||
|         type: string | ||||
|       runs-on: | ||||
|         required: true | ||||
|         type: string | ||||
|       registry_dockerhub: | ||||
|         default: false | ||||
|         type: boolean | ||||
|       registry_ghcr: | ||||
|         default: false | ||||
|         type: boolean | ||||
|       release: | ||||
|         default: false | ||||
|         type: boolean | ||||
|     outputs: | ||||
|       image-digest: | ||||
|         value: ${{ jobs.build.outputs.image-digest }} | ||||
|  | ||||
| jobs: | ||||
|   build: | ||||
|     name: Build ${{ inputs.image_arch }} | ||||
|     runs-on: ${{ inputs.runs-on }} | ||||
|     outputs: | ||||
|       image-digest: ${{ steps.push.outputs.digest }} | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: docker/setup-qemu-action@v3.4.0 | ||||
|       - uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ${{ inputs.image_name }} | ||||
|           image-arch: ${{ inputs.image_arch }} | ||||
|           release: ${{ inputs.release }} | ||||
|       - name: Login to Docker Hub | ||||
|         if: ${{ inputs.registry_dockerhub }} | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKER_USERNAME }} | ||||
|           password: ${{ secrets.DOCKER_PASSWORD }} | ||||
|       - name: Login to GitHub Container Registry | ||||
|         if: ${{ inputs.registry_ghcr }} | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: make empty clients | ||||
|         if: ${{ inputs.release }} | ||||
|         run: | | ||||
|           mkdir -p ./gen-ts-api | ||||
|           mkdir -p ./gen-go-api | ||||
|       - name: generate ts client | ||||
|         if: ${{ !inputs.release }} | ||||
|         run: make gen-client-ts | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         id: push | ||||
|         with: | ||||
|           context: . | ||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|           secrets: | | ||||
|             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||
|             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||
|           build-args: | | ||||
|             ${{ steps.ev.outputs.imageBuildArgs }} | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           platforms: linux/${{ inputs.image_arch }} | ||||
|           cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }} | ||||
|           cache-to: ${{ steps.ev.outputs.cacheTo }} | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
							
								
								
									
										104
									
								
								.github/workflows/_reusable-docker-build.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										104
									
								
								.github/workflows/_reusable-docker-build.yaml
									
									
									
									
										vendored
									
									
								
							| @ -1,104 +0,0 @@ | ||||
| # Re-usable workflow for a multi-architecture build | ||||
| name: Multi-arch container build | ||||
|  | ||||
| on: | ||||
|   workflow_call: | ||||
|     inputs: | ||||
|       image_name: | ||||
|         required: true | ||||
|         type: string | ||||
|       registry_dockerhub: | ||||
|         default: false | ||||
|         type: boolean | ||||
|       registry_ghcr: | ||||
|         default: true | ||||
|         type: boolean | ||||
|       release: | ||||
|         default: false | ||||
|         type: boolean | ||||
|     outputs: {} | ||||
|  | ||||
| jobs: | ||||
|   build-server-amd64: | ||||
|     uses: ./.github/workflows/_reusable-docker-build-single.yaml | ||||
|     secrets: inherit | ||||
|     with: | ||||
|       image_name: ${{ inputs.image_name }} | ||||
|       image_arch: amd64 | ||||
|       runs-on: ubuntu-latest | ||||
|       registry_dockerhub: ${{ inputs.registry_dockerhub }} | ||||
|       registry_ghcr: ${{ inputs.registry_ghcr }} | ||||
|       release: ${{ inputs.release }} | ||||
|   build-server-arm64: | ||||
|     uses: ./.github/workflows/_reusable-docker-build-single.yaml | ||||
|     secrets: inherit | ||||
|     with: | ||||
|       image_name: ${{ inputs.image_name }} | ||||
|       image_arch: arm64 | ||||
|       runs-on: ubuntu-22.04-arm | ||||
|       registry_dockerhub: ${{ inputs.registry_dockerhub }} | ||||
|       registry_ghcr: ${{ inputs.registry_ghcr }} | ||||
|       release: ${{ inputs.release }} | ||||
|   get-tags: | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: | ||||
|       - build-server-amd64 | ||||
|       - build-server-arm64 | ||||
|     outputs: | ||||
|       tags: ${{ steps.ev.outputs.imageTagsJSON }} | ||||
|       shouldPush: ${{ steps.ev.outputs.shouldPush }} | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ${{ inputs.image_name }} | ||||
|   merge-server: | ||||
|     runs-on: ubuntu-latest | ||||
|     if: ${{ needs.get-tags.outputs.shouldPush == 'true' }} | ||||
|     needs: | ||||
|       - get-tags | ||||
|       - build-server-amd64 | ||||
|       - build-server-arm64 | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         tag: ${{ fromJson(needs.get-tags.outputs.tags) }} | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ${{ inputs.image_name }} | ||||
|       - name: Login to Docker Hub | ||||
|         if: ${{ inputs.registry_dockerhub }} | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKER_USERNAME }} | ||||
|           password: ${{ secrets.DOCKER_PASSWORD }} | ||||
|       - name: Login to GitHub Container Registry | ||||
|         if: ${{ inputs.registry_ghcr }} | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - uses: int128/docker-manifest-create-action@v2 | ||||
|         id: build | ||||
|         with: | ||||
|           tags: ${{ matrix.tag }} | ||||
|           sources: | | ||||
|             ${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }} | ||||
|             ${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }} | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.build.outputs.digest }} | ||||
|           push-to-registry: true | ||||
							
								
								
									
										6
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							| @ -25,10 +25,10 @@ jobs: | ||||
|         uses: ./.github/actions/setup | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: lifecycle/aws/package.json | ||||
|           node-version-file: website/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: lifecycle/aws/package-lock.json | ||||
|       - working-directory: lifecycle/aws/ | ||||
|           cache-dependency-path: website/package-lock.json | ||||
|       - working-directory: website/ | ||||
|         run: | | ||||
|           npm ci | ||||
|       - name: Check changes have been applied | ||||
|  | ||||
							
								
								
									
										28
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										28
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,28 +0,0 @@ | ||||
| --- | ||||
| name: authentik-ci-main-daily | ||||
|  | ||||
| on: | ||||
|   workflow_dispatch: | ||||
|   schedule: | ||||
|     # Every night at 3am | ||||
|     - cron: "0 3 * * *" | ||||
|  | ||||
| jobs: | ||||
|   test-container: | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         version: | ||||
|           - docs | ||||
|           - version-2024-12 | ||||
|           - version-2024-10 | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - run: | | ||||
|           current="$(pwd)" | ||||
|           dir="/tmp/authentik/${{ matrix.version }}" | ||||
|           mkdir -p $dir | ||||
|           cd $dir | ||||
|           wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml | ||||
|           ${current}/scripts/test_docker.sh | ||||
							
								
								
									
										97
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										97
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -43,26 +43,15 @@ jobs: | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: run migrations | ||||
|         run: poetry run python -m lifecycle.migrate | ||||
|   test-make-seed: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - id: seed | ||||
|         run: | | ||||
|           echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT" | ||||
|     outputs: | ||||
|       seed: ${{ steps.seed.outputs.seed }} | ||||
|   test-migrations-from-stable: | ||||
|     name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||
|     name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 20 | ||||
|     needs: test-make-seed | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
| @ -104,23 +93,18 @@ jobs: | ||||
|         env: | ||||
|           # Test in the main database that we just migrated from the previous stable version | ||||
|           AUTHENTIK_POSTGRESQL__TEST__NAME: authentik | ||||
|           CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }} | ||||
|           CI_RUN_ID: ${{ matrix.run_id }} | ||||
|           CI_TOTAL_RUNS: "5" | ||||
|         run: | | ||||
|           poetry run make ci-test | ||||
|           poetry run make test | ||||
|   test-unittest: | ||||
|     name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 | ||||
|     name: test-unittest - PostgreSQL ${{ matrix.psql }} | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 20 | ||||
|     needs: test-make-seed | ||||
|     timeout-minutes: 30 | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         psql: | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|         run_id: [1, 2, 3, 4, 5] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Setup authentik env | ||||
| @ -128,12 +112,9 @@ jobs: | ||||
|         with: | ||||
|           postgresql_version: ${{ matrix.psql }} | ||||
|       - name: run unittest | ||||
|         env: | ||||
|           CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }} | ||||
|           CI_RUN_ID: ${{ matrix.run_id }} | ||||
|           CI_TOTAL_RUNS: "5" | ||||
|         run: | | ||||
|           poetry run make ci-test | ||||
|           poetry run make test | ||||
|           poetry run coverage xml | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v5 | ||||
|         with: | ||||
| @ -153,7 +134,7 @@ jobs: | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: Create k8s Kind Cluster | ||||
|         uses: helm/kind-action@v1.12.0 | ||||
|         uses: helm/kind-action@v1.11.0 | ||||
|       - name: run integration | ||||
|         run: | | ||||
|           poetry run coverage run manage.py test tests/integration | ||||
| @ -242,18 +223,68 @@ jobs: | ||||
|         with: | ||||
|           jobs: ${{ toJSON(needs) }} | ||||
|   build: | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         arch: | ||||
|           - amd64 | ||||
|           - arm64 | ||||
|     needs: ci-core-mark | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     needs: ci-core-mark | ||||
|     uses: ./.github/workflows/_reusable-docker-build.yaml | ||||
|     secrets: inherit | ||||
|     with: | ||||
|       image_name: ghcr.io/goauthentik/dev-server | ||||
|       release: false | ||||
|     timeout-minutes: 120 | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/dev-server | ||||
|           image-arch: ${{ matrix.arch }} | ||||
|       - name: Login to Container Registry | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: generate ts client | ||||
|         run: make gen-client-ts | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         id: push | ||||
|         with: | ||||
|           context: . | ||||
|           secrets: | | ||||
|             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||
|             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|           build-args: | | ||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache | ||||
|           cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max' || '' }} | ||||
|           platforms: linux/${{ matrix.arch }} | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|   pr-comment: | ||||
|     needs: | ||||
|       - build | ||||
|  | ||||
							
								
								
									
										4
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -72,7 +72,7 @@ jobs: | ||||
|           - rac | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
| @ -82,7 +82,7 @@ jobs: | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.4.0 | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @ -2,7 +2,7 @@ name: "CodeQL" | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: [main, "*", next, version*] | ||||
|     branches: [main, next, version*] | ||||
|   pull_request: | ||||
|     branches: [main] | ||||
|   schedule: | ||||
|  | ||||
							
								
								
									
										65
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										65
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,23 +7,64 @@ on: | ||||
|  | ||||
| jobs: | ||||
|   build-server: | ||||
|     uses: ./.github/workflows/_reusable-docker-build.yaml | ||||
|     secrets: inherit | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     with: | ||||
|       image_name: ghcr.io/goauthentik/server,beryju/authentik | ||||
|       release: true | ||||
|       registry_dockerhub: true | ||||
|       registry_ghcr: true | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/server,beryju/authentik | ||||
|       - name: Docker Login Registry | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKER_USERNAME }} | ||||
|           password: ${{ secrets.DOCKER_PASSWORD }} | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: make empty clients | ||||
|         run: | | ||||
|           mkdir -p ./gen-ts-api | ||||
|           mkdir -p ./gen-go-api | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         id: push | ||||
|         with: | ||||
|           context: . | ||||
|           push: true | ||||
|           secrets: | | ||||
|             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||
|             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||
|           build-args: | | ||||
|             VERSION=${{ github.ref }} | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|   build-outpost: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload container images to ghcr.io | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
| @ -42,7 +83,7 @@ jobs: | ||||
|         with: | ||||
|           go-version-file: "go.mod" | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.4.0 | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
| @ -147,8 +188,8 @@ jobs: | ||||
|           aws-region: ${{ env.AWS_REGION }} | ||||
|       - name: Upload template | ||||
|         run: | | ||||
|           aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml | ||||
|           aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml | ||||
|           aws s3 cp website/docs/install-config/install/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml | ||||
|           aws s3 cp website/docs/install-config/install/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml | ||||
|   test-release: | ||||
|     needs: | ||||
|       - build-server | ||||
|  | ||||
							
								
								
									
										11
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										11
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							| @ -14,7 +14,16 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Pre-release test | ||||
|         run: | | ||||
|           make test-docker | ||||
|           echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           docker buildx install | ||||
|           mkdir -p ./gen-ts-api | ||||
|           docker build -t testing:latest . | ||||
|           echo "AUTHENTIK_IMAGE=testing" >> .env | ||||
|           echo "AUTHENTIK_TAG=latest" >> .env | ||||
|           docker compose up --no-start | ||||
|           docker compose start postgresql redis | ||||
|           docker compose run -u root server test-all | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         with: | ||||
|  | ||||
							
								
								
									
										6
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,8 +1,8 @@ | ||||
| name: "authentik-repo-stale" | ||||
| name: 'authentik-repo-stale' | ||||
|  | ||||
| on: | ||||
|   schedule: | ||||
|     - cron: "30 1 * * *" | ||||
|     - cron: '30 1 * * *' | ||||
|   workflow_dispatch: | ||||
|  | ||||
| permissions: | ||||
| @ -25,7 +25,7 @@ jobs: | ||||
|           days-before-stale: 60 | ||||
|           days-before-close: 7 | ||||
|           exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing | ||||
|           stale-issue-label: status/stale | ||||
|           stale-issue-label: wontfix | ||||
|           stale-issue-message: > | ||||
|             This issue has been automatically marked as stale because it has not had | ||||
|             recent activity. It will be closed if no further activity occurs. Thank you | ||||
|  | ||||
							
								
								
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -209,6 +209,3 @@ source_docs/ | ||||
|  | ||||
| ### Golang ### | ||||
| /vendor/ | ||||
|  | ||||
| ### Docker ### | ||||
| docker-compose.override.yml | ||||
|  | ||||
							
								
								
									
										7
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @ -2,7 +2,6 @@ | ||||
|     "recommendations": [ | ||||
|         "bashmish.es6-string-css", | ||||
|         "bpruitt-goddard.mermaid-markdown-syntax-highlighting", | ||||
|         "charliermarsh.ruff", | ||||
|         "dbaeumer.vscode-eslint", | ||||
|         "EditorConfig.EditorConfig", | ||||
|         "esbenp.prettier-vscode", | ||||
| @ -11,12 +10,12 @@ | ||||
|         "Gruntfuggly.todo-tree", | ||||
|         "mechatroner.rainbow-csv", | ||||
|         "ms-python.black-formatter", | ||||
|         "ms-python.black-formatter", | ||||
|         "ms-python.debugpy", | ||||
|         "charliermarsh.ruff", | ||||
|         "ms-python.python", | ||||
|         "ms-python.vscode-pylance", | ||||
|         "ms-python.black-formatter", | ||||
|         "redhat.vscode-yaml", | ||||
|         "Tobermory.es6-string-html", | ||||
|         "unifiedjs.vscode-mdx", | ||||
|         "unifiedjs.vscode-mdx" | ||||
|     ] | ||||
| } | ||||
|  | ||||
							
								
								
									
										66
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										66
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							| @ -2,76 +2,26 @@ | ||||
|     "version": "0.2.0", | ||||
|     "configurations": [ | ||||
|         { | ||||
|             "name": "Debug: Attach Server Core", | ||||
|             "type": "debugpy", | ||||
|             "name": "Python: PDB attach Server", | ||||
|             "type": "python", | ||||
|             "request": "attach", | ||||
|             "connect": { | ||||
|                 "host": "localhost", | ||||
|                 "port": 9901 | ||||
|                 "port": 6800 | ||||
|             }, | ||||
|             "pathMappings": [ | ||||
|                 { | ||||
|                     "localRoot": "${workspaceFolder}", | ||||
|                     "remoteRoot": "." | ||||
|                 } | ||||
|             ], | ||||
|             "justMyCode": true, | ||||
|             "django": true | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Attach Worker", | ||||
|             "type": "debugpy", | ||||
|             "name": "Python: PDB attach Worker", | ||||
|             "type": "python", | ||||
|             "request": "attach", | ||||
|             "connect": { | ||||
|                 "host": "localhost", | ||||
|                 "port": 9901 | ||||
|                 "port": 6900 | ||||
|             }, | ||||
|             "pathMappings": [ | ||||
|                 { | ||||
|                     "localRoot": "${workspaceFolder}", | ||||
|                     "remoteRoot": "." | ||||
|                 } | ||||
|             ], | ||||
|             "justMyCode": true, | ||||
|             "django": true | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Start Server Router", | ||||
|             "type": "go", | ||||
|             "request": "launch", | ||||
|             "mode": "auto", | ||||
|             "program": "${workspaceFolder}/cmd/server", | ||||
|             "cwd": "${workspaceFolder}" | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Start LDAP Outpost", | ||||
|             "type": "go", | ||||
|             "request": "launch", | ||||
|             "mode": "auto", | ||||
|             "program": "${workspaceFolder}/cmd/ldap", | ||||
|             "cwd": "${workspaceFolder}" | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Start Proxy Outpost", | ||||
|             "type": "go", | ||||
|             "request": "launch", | ||||
|             "mode": "auto", | ||||
|             "program": "${workspaceFolder}/cmd/proxy", | ||||
|             "cwd": "${workspaceFolder}" | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Start RAC Outpost", | ||||
|             "type": "go", | ||||
|             "request": "launch", | ||||
|             "mode": "auto", | ||||
|             "program": "${workspaceFolder}/cmd/rac", | ||||
|             "cwd": "${workspaceFolder}" | ||||
|         }, | ||||
|         { | ||||
|             "name": "Debug: Start Radius Outpost", | ||||
|             "type": "go", | ||||
|             "request": "launch", | ||||
|             "mode": "auto", | ||||
|             "program": "${workspaceFolder}/cmd/radius", | ||||
|             "cwd": "${workspaceFolder}" | ||||
|         } | ||||
|     ] | ||||
| } | ||||
|  | ||||
| @ -15,7 +15,6 @@ go.mod                          @goauthentik/backend | ||||
| go.sum                          @goauthentik/backend | ||||
| # Infrastructure | ||||
| .github/                        @goauthentik/infrastructure | ||||
| lifecycle/aws/                  @goauthentik/infrastructure | ||||
| Dockerfile                      @goauthentik/infrastructure | ||||
| *Dockerfile                     @goauthentik/infrastructure | ||||
| .dockerignore                   @goauthentik/infrastructure | ||||
|  | ||||
							
								
								
									
										34
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										34
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||
|     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|  | ||||
| # Stage 5: Python dependencies | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS python-deps | ||||
|  | ||||
| ARG TARGETARCH | ||||
| ARG TARGETVARIANT | ||||
| @ -116,30 +116,15 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||
|     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ | ||||
|     --mount=type=cache,target=/root/.cache/pip \ | ||||
|     --mount=type=cache,target=/root/.cache/pypoetry \ | ||||
|     pip install --no-cache cffi && \ | ||||
|     apt-get update && \ | ||||
|     apt-get install -y --no-install-recommends \ | ||||
|         build-essential libffi-dev \ | ||||
|         # Required for cryptography | ||||
|         curl pkg-config \ | ||||
|         # Required for lxml | ||||
|         libxslt-dev zlib1g-dev \ | ||||
|         # Required for xmlsec | ||||
|         libltdl-dev \ | ||||
|         # Required for kadmin | ||||
|         sccache clang && \ | ||||
|     curl https://sh.rustup.rs -sSf | sh -s -- -y && \ | ||||
|     . "$HOME/.cargo/env" && \ | ||||
|     python -m venv /ak-root/venv/ && \ | ||||
|     bash -c "source ${VENV_PATH}/bin/activate && \ | ||||
|     pip3 install --upgrade pip poetry && \ | ||||
|     poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \ | ||||
|     pip3 install --upgrade pip && \ | ||||
|     pip3 install poetry && \ | ||||
|     poetry install --only=main --no-ansi --no-interaction --no-root && \ | ||||
|     pip uninstall cryptography -y && \ | ||||
|     poetry install --only=main --no-ansi --no-interaction --no-root" | ||||
|     pip install --force-reinstall /wheels/*" | ||||
|  | ||||
| # Stage 6: Run | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS final-image | ||||
|  | ||||
| ARG VERSION | ||||
| ARG GIT_BUILD_HASH | ||||
| @ -155,12 +140,10 @@ WORKDIR / | ||||
|  | ||||
| # We cannot cache this layer otherwise we'll end up with a bigger image | ||||
| RUN apt-get update && \ | ||||
|     apt-get upgrade -y && \ | ||||
|     # Required for runtime | ||||
|     apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \ | ||||
|     apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 && \ | ||||
|     # Required for bootstrap & healtcheck | ||||
|     apt-get install -y --no-install-recommends runit && \ | ||||
|     pip3 install --no-cache-dir --upgrade pip && \ | ||||
|     apt-get clean && \ | ||||
|     rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \ | ||||
|     adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \ | ||||
| @ -193,8 +176,9 @@ ENV TMPDIR=/dev/shm/ \ | ||||
|     PYTHONUNBUFFERED=1 \ | ||||
|     PATH="/ak-root/venv/bin:/lifecycle:$PATH" \ | ||||
|     VENV_PATH="/ak-root/venv" \ | ||||
|     POETRY_VIRTUALENVS_CREATE=false \ | ||||
|     GOFIPS=1 | ||||
|     POETRY_VIRTUALENVS_CREATE=false | ||||
|  | ||||
| ENV GOFIPS=1 | ||||
|  | ||||
| HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | ||||
|  | ||||
|  | ||||
							
								
								
									
										38
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										38
									
								
								Makefile
									
									
									
									
									
								
							| @ -5,9 +5,7 @@ PWD = $(shell pwd) | ||||
| UID = $(shell id -u) | ||||
| GID = $(shell id -g) | ||||
| NPM_VERSION = $(shell python -m scripts.npm_version) | ||||
| PY_SOURCES = authentik tests scripts lifecycle .github | ||||
| GO_SOURCES = cmd internal | ||||
| WEB_SOURCES = web/src web/packages | ||||
| PY_SOURCES = authentik tests scripts lifecycle .github website/docs/install-config/install/aws | ||||
| DOCKER_IMAGE ?= "authentik:test" | ||||
|  | ||||
| GEN_API_TS = "gen-ts-api" | ||||
| @ -22,11 +20,10 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \ | ||||
| 		-I .github/codespell-words.txt \ | ||||
| 		-S 'web/src/locales/**' \ | ||||
| 		-S 'website/docs/developer-docs/api/reference/**' \ | ||||
| 		-S '**/node_modules/**' \ | ||||
| 		-S '**/dist/**' \ | ||||
| 		$(PY_SOURCES) \ | ||||
| 		$(GO_SOURCES) \ | ||||
| 		$(WEB_SOURCES) \ | ||||
| 		authentik \ | ||||
| 		internal \ | ||||
| 		cmd \ | ||||
| 		web/src \ | ||||
| 		website/src \ | ||||
| 		website/blog \ | ||||
| 		website/docs \ | ||||
| @ -48,6 +45,15 @@ help:  ## Show this help | ||||
| go-test: | ||||
| 	go test -timeout 0 -v -race -cover ./... | ||||
|  | ||||
| test-docker:  ## Run all tests in a docker-compose | ||||
| 	echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env | ||||
| 	echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env | ||||
| 	docker compose pull -q | ||||
| 	docker compose up --no-start | ||||
| 	docker compose start postgresql redis | ||||
| 	docker compose run -u root server test-all | ||||
| 	rm -f .env | ||||
|  | ||||
| test: ## Run the server tests and produce a coverage report (locally) | ||||
| 	coverage run manage.py test --keepdb authentik | ||||
| 	coverage html | ||||
| @ -72,9 +78,6 @@ migrate: ## Run the Authentik Django server's migrations | ||||
|  | ||||
| i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | ||||
|  | ||||
| aws-cfn: | ||||
| 	cd lifecycle/aws && npm run aws-cfn | ||||
|  | ||||
| core-i18n-extract: | ||||
| 	ak makemessages \ | ||||
| 		--add-location file \ | ||||
| @ -146,7 +149,7 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | ||||
| 	docker run \ | ||||
| 		--rm -v ${PWD}:/local \ | ||||
| 		--user ${UID}:${GID} \ | ||||
| 		docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \ | ||||
| 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||
| 		-i /local/schema.yml \ | ||||
| 		-g typescript-fetch \ | ||||
| 		-o /local/${GEN_API_TS} \ | ||||
| @ -249,6 +252,9 @@ website-build: | ||||
| website-watch:  ## Build and watch the documentation website, updating automatically | ||||
| 	cd website && npm run watch | ||||
|  | ||||
| aws-cfn: | ||||
| 	cd website && npm run aws-cfn | ||||
|  | ||||
| ######################### | ||||
| ## Docker | ||||
| ######################### | ||||
| @ -257,9 +263,6 @@ docker:  ## Build a docker image of the current source tree | ||||
| 	mkdir -p ${GEN_API_TS} | ||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||
|  | ||||
| test-docker: | ||||
| 	BUILD=true ./scripts/test_docker.sh | ||||
|  | ||||
| ######################### | ||||
| ## CI | ||||
| ######################### | ||||
| @ -284,8 +287,3 @@ ci-bandit: ci--meta-debug | ||||
|  | ||||
| ci-pending-migrations: ci--meta-debug | ||||
| 	ak makemigrations --check | ||||
|  | ||||
| ci-test: ci--meta-debug | ||||
| 	coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik | ||||
| 	coverage report | ||||
| 	coverage xml | ||||
|  | ||||
| @ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | ||||
|  | ||||
| | Version   | Supported | | ||||
| | --------- | --------- | | ||||
| | 2024.10.x | ✅        | | ||||
| | 2024.12.x | ✅        | | ||||
| | 2025.2.x  | ✅        | | ||||
|  | ||||
| ## Reporting a Vulnerability | ||||
|  | ||||
|  | ||||
| @ -2,7 +2,7 @@ | ||||
|  | ||||
| from os import environ | ||||
|  | ||||
| __version__ = "2025.2.2" | ||||
| __version__ = "2024.12.1" | ||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||
|  | ||||
|  | ||||
| @ -16,5 +16,5 @@ def get_full_version() -> str: | ||||
|     """Get full version, with build hash appended""" | ||||
|     version = __version__ | ||||
|     if (build_hash := get_build_hash()) != "": | ||||
|         return f"{version}+{build_hash}" | ||||
|         version += "." + build_hash | ||||
|     return version | ||||
|  | ||||
| @ -7,9 +7,7 @@ from sys import version as python_version | ||||
| from typing import TypedDict | ||||
|  | ||||
| from cryptography.hazmat.backends.openssl.backend import backend | ||||
| from django.conf import settings | ||||
| from django.utils.timezone import now | ||||
| from django.views.debug import SafeExceptionReporterFilter | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.request import Request | ||||
| @ -54,16 +52,10 @@ class SystemInfoSerializer(PassiveSerializer): | ||||
|     def get_http_headers(self, request: Request) -> dict[str, str]: | ||||
|         """Get HTTP Request headers""" | ||||
|         headers = {} | ||||
|         raw_session = request._request.COOKIES.get(settings.SESSION_COOKIE_NAME) | ||||
|         for key, value in request.META.items(): | ||||
|             if not isinstance(value, str): | ||||
|                 continue | ||||
|             actual_value = value | ||||
|             if raw_session in actual_value: | ||||
|                 actual_value = actual_value.replace( | ||||
|                     raw_session, SafeExceptionReporterFilter.cleansed_substitute | ||||
|                 ) | ||||
|             headers[key] = actual_value | ||||
|             headers[key] = value | ||||
|         return headers | ||||
|  | ||||
|     def get_http_host(self, request: Request) -> str: | ||||
|  | ||||
| @ -1,16 +1,12 @@ | ||||
| """authentik administration overview""" | ||||
|  | ||||
| from socket import gethostname | ||||
|  | ||||
| from django.conf import settings | ||||
| from drf_spectacular.utils import extend_schema, inline_serializer | ||||
| from packaging.version import parse | ||||
| from rest_framework.fields import BooleanField, CharField | ||||
| from rest_framework.fields import IntegerField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from authentik import get_full_version | ||||
| from authentik.rbac.permissions import HasPermission | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| @ -20,38 +16,11 @@ class WorkerView(APIView): | ||||
|  | ||||
|     permission_classes = [HasPermission("authentik_rbac.view_system_info")] | ||||
|  | ||||
|     @extend_schema( | ||||
|         responses=inline_serializer( | ||||
|             "Worker", | ||||
|             fields={ | ||||
|                 "worker_id": CharField(), | ||||
|                 "version": CharField(), | ||||
|                 "version_matching": BooleanField(), | ||||
|             }, | ||||
|             many=True, | ||||
|         ) | ||||
|     ) | ||||
|     @extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()})) | ||||
|     def get(self, request: Request) -> Response: | ||||
|         """Get currently connected worker count.""" | ||||
|         raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) | ||||
|         our_version = parse(get_full_version()) | ||||
|         response = [] | ||||
|         for worker in raw: | ||||
|             key = list(worker.keys())[0] | ||||
|             version = worker[key].get("version") | ||||
|             version_matching = False | ||||
|             if version: | ||||
|                 version_matching = parse(version) == our_version | ||||
|             response.append( | ||||
|                 {"worker_id": key, "version": version, "version_matching": version_matching} | ||||
|             ) | ||||
|         count = len(CELERY_APP.control.ping(timeout=0.5)) | ||||
|         # In debug we run with `task_always_eager`, so tasks are ran on the main process | ||||
|         if settings.DEBUG:  # pragma: no cover | ||||
|             response.append( | ||||
|                 { | ||||
|                     "worker_id": f"authentik-debug@{gethostname()}", | ||||
|                     "version": get_full_version(), | ||||
|                     "version_matching": True, | ||||
|                 } | ||||
|             ) | ||||
|         return Response(response) | ||||
|             count += 1 | ||||
|         return Response({"count": count}) | ||||
|  | ||||
| @ -1,10 +1,11 @@ | ||||
| """authentik admin app config""" | ||||
|  | ||||
| from prometheus_client import Info | ||||
| from prometheus_client import Gauge, Info | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
|  | ||||
| PROM_INFO = Info("authentik_version", "Currently running authentik version") | ||||
| GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers") | ||||
|  | ||||
|  | ||||
| class AuthentikAdminConfig(ManagedAppConfig): | ||||
|  | ||||
| @ -1,35 +1,14 @@ | ||||
| """admin signals""" | ||||
|  | ||||
| from django.dispatch import receiver | ||||
| from packaging.version import parse | ||||
| from prometheus_client import Gauge | ||||
|  | ||||
| from authentik import get_full_version | ||||
| from authentik.admin.apps import GAUGE_WORKERS | ||||
| from authentik.root.celery import CELERY_APP | ||||
| from authentik.root.monitoring import monitoring_set | ||||
|  | ||||
| GAUGE_WORKERS = Gauge( | ||||
|     "authentik_admin_workers", | ||||
|     "Currently connected workers, their versions and if they are the same version as authentik", | ||||
|     ["version", "version_matched"], | ||||
| ) | ||||
|  | ||||
|  | ||||
| _version = parse(get_full_version()) | ||||
|  | ||||
|  | ||||
| @receiver(monitoring_set) | ||||
| def monitoring_set_workers(sender, **kwargs): | ||||
|     """Set worker gauge""" | ||||
|     raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) | ||||
|     worker_version_count = {} | ||||
|     for worker in raw: | ||||
|         key = list(worker.keys())[0] | ||||
|         version = worker[key].get("version") | ||||
|         version_matching = False | ||||
|         if version: | ||||
|             version_matching = parse(version) == _version | ||||
|         worker_version_count.setdefault(version, {"count": 0, "matching": version_matching}) | ||||
|         worker_version_count[version]["count"] += 1 | ||||
|     for version, stats in worker_version_count.items(): | ||||
|         GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"]) | ||||
|     count = len(CELERY_APP.control.ping(timeout=0.5)) | ||||
|     GAUGE_WORKERS.set(count) | ||||
|  | ||||
| @ -34,7 +34,7 @@ class TestAdminAPI(TestCase): | ||||
|         response = self.client.get(reverse("authentik_api:admin_workers")) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         body = loads(response.content) | ||||
|         self.assertEqual(len(body), 0) | ||||
|         self.assertEqual(body["count"], 0) | ||||
|  | ||||
|     def test_metrics(self): | ||||
|         """Test metrics API""" | ||||
|  | ||||
							
								
								
									
										67
									
								
								authentik/api/authorization.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										67
									
								
								authentik/api/authorization.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,67 @@ | ||||
| """API Authorization""" | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db.models import Model | ||||
| from django.db.models.query import QuerySet | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from rest_framework.authentication import get_authorization_header | ||||
| from rest_framework.filters import BaseFilterBackend | ||||
| from rest_framework.permissions import BasePermission | ||||
| from rest_framework.request import Request | ||||
|  | ||||
| from authentik.api.authentication import validate_auth | ||||
| from authentik.rbac.filters import ObjectFilter | ||||
|  | ||||
|  | ||||
| class OwnerFilter(BaseFilterBackend): | ||||
|     """Filter objects by their owner""" | ||||
|  | ||||
|     owner_key = "user" | ||||
|  | ||||
|     def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet: | ||||
|         if request.user.is_superuser: | ||||
|             return queryset | ||||
|         return queryset.filter(**{self.owner_key: request.user}) | ||||
|  | ||||
|  | ||||
| class SecretKeyFilter(DjangoFilterBackend): | ||||
|     """Allow access to all objects when authenticated with secret key as token. | ||||
|  | ||||
|     Replaces both DjangoFilterBackend and ObjectFilter""" | ||||
|  | ||||
|     def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet: | ||||
|         auth_header = get_authorization_header(request) | ||||
|         token = validate_auth(auth_header) | ||||
|         if token and token == settings.SECRET_KEY: | ||||
|             return queryset | ||||
|         queryset = ObjectFilter().filter_queryset(request, queryset, view) | ||||
|         return super().filter_queryset(request, queryset, view) | ||||
|  | ||||
|  | ||||
| class OwnerPermissions(BasePermission): | ||||
|     """Authorize requests by an object's owner matching the requesting user""" | ||||
|  | ||||
|     owner_key = "user" | ||||
|  | ||||
|     def has_permission(self, request: Request, view) -> bool: | ||||
|         """If the user is authenticated, we allow all requests here. For listing, the | ||||
|         object-level permissions are done by the filter backend""" | ||||
|         return request.user.is_authenticated | ||||
|  | ||||
|     def has_object_permission(self, request: Request, view, obj: Model) -> bool: | ||||
|         """Check if the object's owner matches the currently logged in user""" | ||||
|         if not hasattr(obj, self.owner_key): | ||||
|             return False | ||||
|         owner = getattr(obj, self.owner_key) | ||||
|         if owner != request.user: | ||||
|             return False | ||||
|         return True | ||||
|  | ||||
|  | ||||
| class OwnerSuperuserPermissions(OwnerPermissions): | ||||
|     """Similar to OwnerPermissions, except always allow access for superusers""" | ||||
|  | ||||
|     def has_object_permission(self, request: Request, view, obj: Model) -> bool: | ||||
|         if request.user.is_superuser: | ||||
|             return True | ||||
|         return super().has_object_permission(request, view, obj) | ||||
| @ -1,68 +0,0 @@ | ||||
| """Test and debug Blueprints""" | ||||
|  | ||||
| import atexit | ||||
| import readline | ||||
| from pathlib import Path | ||||
| from pprint import pformat | ||||
| from sys import exit as sysexit | ||||
| from textwrap import indent | ||||
|  | ||||
| from django.core.management.base import BaseCommand, no_translations | ||||
| from structlog.stdlib import get_logger | ||||
| from yaml import load | ||||
|  | ||||
| from authentik.blueprints.v1.common import BlueprintLoader, EntryInvalidError | ||||
| from authentik.core.management.commands.shell import get_banner_text | ||||
| from authentik.lib.utils.errors import exception_to_string | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     """Test and debug Blueprints""" | ||||
|  | ||||
|     lines = [] | ||||
|  | ||||
|     def __init__(self, *args, **kwargs) -> None: | ||||
|         super().__init__(*args, **kwargs) | ||||
|         histfolder = Path("~").expanduser() / Path(".local/share/authentik") | ||||
|         histfolder.mkdir(parents=True, exist_ok=True) | ||||
|         histfile = histfolder / Path("blueprint_shell_history") | ||||
|         readline.parse_and_bind("tab: complete") | ||||
|         readline.parse_and_bind("set editing-mode vi") | ||||
|  | ||||
|         try: | ||||
|             readline.read_history_file(str(histfile)) | ||||
|         except FileNotFoundError: | ||||
|             pass | ||||
|  | ||||
|         atexit.register(readline.write_history_file, str(histfile)) | ||||
|  | ||||
|     @no_translations | ||||
|     def handle(self, *args, **options): | ||||
|         """Interactively debug blueprint files""" | ||||
|         self.stdout.write(get_banner_text("Blueprint shell")) | ||||
|         self.stdout.write("Type '.eval' to evaluate previously entered statement(s).") | ||||
|  | ||||
|         def do_eval(): | ||||
|             yaml_input = "\n".join([line for line in self.lines if line]) | ||||
|             data = load(yaml_input, BlueprintLoader) | ||||
|             self.stdout.write(pformat(data)) | ||||
|             self.lines = [] | ||||
|  | ||||
|         while True: | ||||
|             try: | ||||
|                 line = input("> ") | ||||
|                 if line == ".eval": | ||||
|                     do_eval() | ||||
|                 else: | ||||
|                     self.lines.append(line) | ||||
|             except EntryInvalidError as exc: | ||||
|                 self.stdout.write("Failed to evaluate expression:") | ||||
|                 self.stdout.write(indent(exception_to_string(exc), prefix="  ")) | ||||
|             except EOFError: | ||||
|                 break | ||||
|             except KeyboardInterrupt: | ||||
|                 self.stdout.write() | ||||
|                 sysexit(0) | ||||
|         self.stdout.write() | ||||
| @ -126,7 +126,7 @@ class Command(BaseCommand): | ||||
|         def_name_perm = f"model_{model_path}_permissions" | ||||
|         def_path_perm = f"#/$defs/{def_name_perm}" | ||||
|         self.schema["$defs"][def_name_perm] = self.model_permissions(model) | ||||
|         template = { | ||||
|         return { | ||||
|             "type": "object", | ||||
|             "required": ["model", "identifiers"], | ||||
|             "properties": { | ||||
| @ -143,11 +143,6 @@ class Command(BaseCommand): | ||||
|                 "identifiers": {"$ref": def_path}, | ||||
|             }, | ||||
|         } | ||||
|         # Meta models don't require identifiers, as there's no matching database model to find | ||||
|         if issubclass(model, BaseMetaModel): | ||||
|             del template["properties"]["identifiers"] | ||||
|             template["required"].remove("identifiers") | ||||
|         return template | ||||
|  | ||||
|     def field_to_jsonschema(self, field: Field) -> dict: | ||||
|         """Convert a single field to json schema""" | ||||
|  | ||||
| @ -202,9 +202,6 @@ class Blueprint: | ||||
| class YAMLTag: | ||||
|     """Base class for all YAML Tags""" | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         return str(self.resolve(BlueprintEntry(""), Blueprint())) | ||||
|  | ||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||
|         """Implement yaml tag logic""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
| @ -50,7 +50,7 @@ from authentik.enterprise.providers.microsoft_entra.models import ( | ||||
|     MicrosoftEntraProviderGroup, | ||||
|     MicrosoftEntraProviderUser, | ||||
| ) | ||||
| from authentik.enterprise.providers.ssf.models import StreamEvent | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken | ||||
| from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import ( | ||||
|     EndpointDevice, | ||||
|     EndpointDeviceConnection, | ||||
| @ -71,7 +71,6 @@ from authentik.providers.oauth2.models import ( | ||||
|     DeviceToken, | ||||
|     RefreshToken, | ||||
| ) | ||||
| from authentik.providers.rac.models import ConnectionToken | ||||
| from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser | ||||
| from authentik.rbac.models import Role | ||||
| from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser | ||||
| @ -132,7 +131,6 @@ def excluded_models() -> list[type[Model]]: | ||||
|         EndpointDevice, | ||||
|         EndpointDeviceConnection, | ||||
|         DeviceToken, | ||||
|         StreamEvent, | ||||
|     ) | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -14,10 +14,10 @@ from rest_framework.response import Response | ||||
| from rest_framework.validators import UniqueValidator | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.api.authorization import SecretKeyFilter | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||
| from authentik.rbac.filters import SecretKeyFilter | ||||
| from authentik.tenants.utils import get_current_tenant | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -1,16 +1,15 @@ | ||||
| """Application Roles API Viewset""" | ||||
|  | ||||
| from django.http import HttpRequest | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.core.models import ( | ||||
|     Application, | ||||
|     ApplicationEntitlement, | ||||
|     User, | ||||
| ) | ||||
|  | ||||
|  | ||||
| @ -19,10 +18,7 @@ class ApplicationEntitlementSerializer(ModelSerializer): | ||||
|  | ||||
|     def validate_app(self, app: Application) -> Application: | ||||
|         """Ensure user has permission to view""" | ||||
|         request: HttpRequest = self.context.get("request") | ||||
|         if not request and SERIALIZER_CONTEXT_BLUEPRINT in self.context: | ||||
|             return app | ||||
|         user = request.user | ||||
|         user: User = self._context["request"].user | ||||
|         if user.has_perm("view_application", app) or user.has_perm( | ||||
|             "authentik_core.view_application" | ||||
|         ): | ||||
|  | ||||
| @ -2,12 +2,16 @@ | ||||
|  | ||||
| from typing import TypedDict | ||||
|  | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from guardian.utils import get_anonymous_user | ||||
| from rest_framework import mixins | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.viewsets import GenericViewSet | ||||
| from ua_parser import user_agent_parser | ||||
|  | ||||
| from authentik.api.authorization import OwnerSuperuserPermissions | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.core.models import AuthenticatedSession | ||||
| @ -106,4 +110,11 @@ class AuthenticatedSessionViewSet( | ||||
|     search_fields = ["user__username", "last_ip", "last_user_agent"] | ||||
|     filterset_fields = ["user__username", "last_ip", "last_user_agent"] | ||||
|     ordering = ["user__username"] | ||||
|     owner_field = "user" | ||||
|     permission_classes = [OwnerSuperuserPermissions] | ||||
|     filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|         user = self.request.user if self.request else get_anonymous_user() | ||||
|         if user.is_superuser: | ||||
|             return super().get_queryset() | ||||
|         return super().get_queryset().filter(user=user.pk) | ||||
|  | ||||
| @ -3,7 +3,6 @@ | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import OpenApiParameter, extend_schema | ||||
| from guardian.shortcuts import get_objects_for_user | ||||
| from rest_framework.fields import ( | ||||
|     BooleanField, | ||||
|     CharField, | ||||
| @ -17,6 +16,7 @@ from rest_framework.viewsets import ViewSet | ||||
|  | ||||
| from authentik.core.api.utils import MetaNameSerializer | ||||
| from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice | ||||
| from authentik.rbac.decorators import permission_required | ||||
| from authentik.stages.authenticator import device_classes, devices_for_user | ||||
| from authentik.stages.authenticator.models import Device | ||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDevice | ||||
| @ -73,9 +73,7 @@ class AdminDeviceViewSet(ViewSet): | ||||
|     def get_devices(self, **kwargs): | ||||
|         """Get all devices in all child classes""" | ||||
|         for model in device_classes(): | ||||
|             device_set = get_objects_for_user( | ||||
|                 self.request.user, f"{model._meta.app_label}.view_{model._meta.model_name}", model | ||||
|             ).filter(**kwargs) | ||||
|             device_set = model.objects.filter(**kwargs) | ||||
|             yield from device_set | ||||
|  | ||||
|     @extend_schema( | ||||
| @ -88,6 +86,10 @@ class AdminDeviceViewSet(ViewSet): | ||||
|         ], | ||||
|         responses={200: DeviceSerializer(many=True)}, | ||||
|     ) | ||||
|     @permission_required( | ||||
|         None, | ||||
|         [f"{model._meta.app_label}.view_{model._meta.model_name}" for model in device_classes()], | ||||
|     ) | ||||
|     def list(self, request: Request) -> Response: | ||||
|         """Get all devices for current user""" | ||||
|         kwargs = {} | ||||
|  | ||||
| @ -4,7 +4,6 @@ from json import loads | ||||
|  | ||||
| from django.db.models import Prefetch | ||||
| from django.http import Http404 | ||||
| from django.utils.translation import gettext as _ | ||||
| from django_filters.filters import CharFilter, ModelMultipleChoiceFilter | ||||
| from django_filters.filterset import FilterSet | ||||
| from drf_spectacular.utils import ( | ||||
| @ -82,37 +81,9 @@ class GroupSerializer(ModelSerializer): | ||||
|         if not self.instance or not parent: | ||||
|             return parent | ||||
|         if str(parent.group_uuid) == str(self.instance.group_uuid): | ||||
|             raise ValidationError(_("Cannot set group as parent of itself.")) | ||||
|             raise ValidationError("Cannot set group as parent of itself.") | ||||
|         return parent | ||||
|  | ||||
|     def validate_is_superuser(self, superuser: bool): | ||||
|         """Ensure that the user creating this group has permissions to set the superuser flag""" | ||||
|         request: Request = self.context.get("request", None) | ||||
|         if not request: | ||||
|             return superuser | ||||
|         # If we're updating an instance, and the state hasn't changed, we don't need to check perms | ||||
|         if self.instance and superuser == self.instance.is_superuser: | ||||
|             return superuser | ||||
|         user: User = request.user | ||||
|         perm = ( | ||||
|             "authentik_core.enable_group_superuser" | ||||
|             if superuser | ||||
|             else "authentik_core.disable_group_superuser" | ||||
|         ) | ||||
|         has_perm = user.has_perm(perm) | ||||
|         if self.instance and not has_perm: | ||||
|             has_perm = user.has_perm(perm, self.instance) | ||||
|         if not has_perm: | ||||
|             raise ValidationError( | ||||
|                 _( | ||||
|                     ( | ||||
|                         "User does not have permission to set " | ||||
|                         "superuser status to {superuser_status}." | ||||
|                     ).format_map({"superuser_status": superuser}) | ||||
|                 ) | ||||
|             ) | ||||
|         return superuser | ||||
|  | ||||
|     class Meta: | ||||
|         model = Group | ||||
|         fields = [ | ||||
|  | ||||
| @ -2,16 +2,19 @@ | ||||
|  | ||||
| from collections.abc import Iterable | ||||
|  | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from drf_spectacular.utils import OpenApiResponse, extend_schema | ||||
| from rest_framework import mixins | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField | ||||
| from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.parsers import MultiPartParser | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.viewsets import GenericViewSet | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.core.api.object_types import TypesMixin | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| @ -85,7 +88,7 @@ class SourceViewSet( | ||||
|     serializer_class = SourceSerializer | ||||
|     lookup_field = "slug" | ||||
|     search_fields = ["slug", "name"] | ||||
|     filterset_fields = ["slug", "name", "managed", "pbm_uuid"] | ||||
|     filterset_fields = ["slug", "name", "managed"] | ||||
|  | ||||
|     def get_queryset(self):  # pragma: no cover | ||||
|         return Source.objects.select_subclasses() | ||||
| @ -186,10 +189,11 @@ class UserSourceConnectionViewSet( | ||||
|  | ||||
|     queryset = UserSourceConnection.objects.all() | ||||
|     serializer_class = UserSourceConnectionSerializer | ||||
|     permission_classes = [OwnerSuperuserPermissions] | ||||
|     filterset_fields = ["user", "source__slug"] | ||||
|     search_fields = ["source__slug"] | ||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|     ordering = ["source__slug", "pk"] | ||||
|     owner_field = "user" | ||||
|  | ||||
|  | ||||
| class GroupSourceConnectionSerializer(SourceSerializer): | ||||
| @ -224,7 +228,8 @@ class GroupSourceConnectionViewSet( | ||||
|  | ||||
|     queryset = GroupSourceConnection.objects.all() | ||||
|     serializer_class = GroupSourceConnectionSerializer | ||||
|     permission_classes = [OwnerSuperuserPermissions] | ||||
|     filterset_fields = ["group", "source__slug"] | ||||
|     search_fields = ["source__slug"] | ||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|     ordering = ["source__slug", "pk"] | ||||
|     owner_field = "user" | ||||
|  | ||||
| @ -3,15 +3,18 @@ | ||||
| from typing import Any | ||||
|  | ||||
| from django.utils.timezone import now | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer | ||||
| from guardian.shortcuts import assign_perm, get_anonymous_user | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.fields import CharField | ||||
| from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.api.authorization import OwnerSuperuserPermissions | ||||
| from authentik.blueprints.api import ManagedSerializer | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| @ -135,8 +138,8 @@ class TokenViewSet(UsedByMixin, ModelViewSet): | ||||
|         "managed", | ||||
|     ] | ||||
|     ordering = ["identifier", "expires"] | ||||
|     owner_field = "user" | ||||
|     rbac_allow_create_without_perm = True | ||||
|     permission_classes = [OwnerSuperuserPermissions] | ||||
|     filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|         user = self.request.user if self.request else get_anonymous_user() | ||||
|  | ||||
| @ -236,11 +236,9 @@ class UserSerializer(ModelSerializer): | ||||
|             "path", | ||||
|             "type", | ||||
|             "uuid", | ||||
|             "password_change_date", | ||||
|         ] | ||||
|         extra_kwargs = { | ||||
|             "name": {"allow_blank": True}, | ||||
|             "password_change_date": {"read_only": True}, | ||||
|         } | ||||
|  | ||||
|  | ||||
| @ -429,7 +427,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|     queryset = User.objects.none() | ||||
|     ordering = ["username"] | ||||
|     serializer_class = UserSerializer | ||||
|     search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"] | ||||
|     search_fields = ["username", "name", "is_active", "email", "uuid"] | ||||
|     filterset_class = UsersFilter | ||||
|  | ||||
|     def get_queryset(self): | ||||
| @ -587,7 +585,7 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|         """Set password for user""" | ||||
|         user: User = self.get_object() | ||||
|         try: | ||||
|             user.set_password(request.data.get("password"), request=request) | ||||
|             user.set_password(request.data.get("password")) | ||||
|             user.save() | ||||
|         except (ValidationError, IntegrityError) as exc: | ||||
|             LOGGER.debug("Failed to set password", exc=exc) | ||||
|  | ||||
| @ -44,12 +44,13 @@ class TokenBackend(InbuiltBackend): | ||||
|         self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any | ||||
|     ) -> User | None: | ||||
|         try: | ||||
|  | ||||
|             user = User._default_manager.get_by_natural_key(username) | ||||
|  | ||||
|         except User.DoesNotExist: | ||||
|             # Run the default password hasher once to reduce the timing | ||||
|             # difference between an existing and a nonexistent user (#20760). | ||||
|             User().set_password(password, request=request) | ||||
|             User().set_password(password) | ||||
|             return None | ||||
|  | ||||
|         tokens = Token.filter_not_expired( | ||||
|  | ||||
| @ -58,7 +58,6 @@ class PropertyMappingEvaluator(BaseEvaluator): | ||||
|             self._context["user"] = user | ||||
|         if request: | ||||
|             req.http_request = request | ||||
|             self._context["http_request"] = request | ||||
|         req.context.update(**kwargs) | ||||
|         self._context["request"] = req | ||||
|         self._context.update(**kwargs) | ||||
|  | ||||
| @ -5,7 +5,6 @@ from typing import TextIO | ||||
| from daphne.management.commands.runserver import Command as RunServer | ||||
| from daphne.server import Server | ||||
|  | ||||
| from authentik.lib.debug import start_debug_server | ||||
| from authentik.root.signals import post_startup, pre_startup, startup | ||||
|  | ||||
|  | ||||
| @ -14,7 +13,6 @@ class SignalServer(Server): | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         super().__init__(*args, **kwargs) | ||||
|         start_debug_server() | ||||
|  | ||||
|         def ready_callable(): | ||||
|             pre_startup.send(sender=self) | ||||
|  | ||||
| @ -17,9 +17,7 @@ from authentik.events.middleware import should_log_model | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.events.utils import model_to_dict | ||||
|  | ||||
|  | ||||
| def get_banner_text(shell_type="shell") -> str: | ||||
|     return f"""### authentik {shell_type} ({get_full_version()}) | ||||
| BANNER_TEXT = f"""### authentik shell ({get_full_version()}) | ||||
| ### Node {platform.node()} | Arch {platform.machine()} | Python {platform.python_version()} """ | ||||
|  | ||||
|  | ||||
| @ -116,4 +114,4 @@ class Command(BaseCommand): | ||||
|             readline.parse_and_bind("tab: complete") | ||||
|  | ||||
|         # Run interactive shell | ||||
|         code.interact(banner=get_banner_text(), local=namespace) | ||||
|         code.interact(banner=BANNER_TEXT, local=namespace) | ||||
|  | ||||
| @ -9,7 +9,6 @@ from django.db import close_old_connections | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.debug import start_debug_server | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| @ -29,7 +28,10 @@ class Command(BaseCommand): | ||||
|     def handle(self, **options): | ||||
|         LOGGER.debug("Celery options", **options) | ||||
|         close_old_connections() | ||||
|         start_debug_server() | ||||
|         if CONFIG.get_bool("remote_debug"): | ||||
|             import debugpy | ||||
|  | ||||
|             debugpy.listen(("0.0.0.0", 6900))  # nosec | ||||
|         worker: Worker = CELERY_APP.Worker( | ||||
|             no_color=False, | ||||
|             quiet=True, | ||||
|  | ||||
| @ -1,45 +0,0 @@ | ||||
| # Generated by Django 5.0.10 on 2025-01-13 18:05 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0041_applicationentitlement"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddIndex( | ||||
|             model_name="authenticatedsession", | ||||
|             index=models.Index(fields=["expires"], name="authentik_c_expires_08251d_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="authenticatedsession", | ||||
|             index=models.Index(fields=["expiring"], name="authentik_c_expirin_9cd839_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="authenticatedsession", | ||||
|             index=models.Index( | ||||
|                 fields=["expiring", "expires"], name="authentik_c_expirin_195a84_idx" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="authenticatedsession", | ||||
|             index=models.Index(fields=["session_key"], name="authentik_c_session_d0f005_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="token", | ||||
|             index=models.Index(fields=["expires"], name="authentik_c_expires_a62b4b_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="token", | ||||
|             index=models.Index(fields=["expiring"], name="authentik_c_expirin_a1b838_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="token", | ||||
|             index=models.Index( | ||||
|                 fields=["expiring", "expires"], name="authentik_c_expirin_ba04d9_idx" | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,26 +0,0 @@ | ||||
| # Generated by Django 5.0.11 on 2025-01-30 23:55 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterModelOptions( | ||||
|             name="group", | ||||
|             options={ | ||||
|                 "permissions": [ | ||||
|                     ("add_user_to_group", "Add user to group"), | ||||
|                     ("remove_user_from_group", "Remove user from group"), | ||||
|                     ("enable_group_superuser", "Enable superuser status"), | ||||
|                     ("disable_group_superuser", "Disable superuser status"), | ||||
|                 ], | ||||
|                 "verbose_name": "Group", | ||||
|                 "verbose_name_plural": "Groups", | ||||
|             }, | ||||
|         ), | ||||
|     ] | ||||
| @ -204,8 +204,6 @@ class Group(SerializerModel, AttributesMixin): | ||||
|         permissions = [ | ||||
|             ("add_user_to_group", _("Add user to group")), | ||||
|             ("remove_user_from_group", _("Remove user from group")), | ||||
|             ("enable_group_superuser", _("Enable superuser status")), | ||||
|             ("disable_group_superuser", _("Disable superuser status")), | ||||
|         ] | ||||
|  | ||||
|     def __str__(self): | ||||
| @ -358,13 +356,13 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser): | ||||
|         """superuser == staff user""" | ||||
|         return self.is_superuser  # type: ignore | ||||
|  | ||||
|     def set_password(self, raw_password, signal=True, sender=None, request=None): | ||||
|     def set_password(self, raw_password, signal=True, sender=None): | ||||
|         if self.pk and signal: | ||||
|             from authentik.core.signals import password_changed | ||||
|  | ||||
|             if not sender: | ||||
|                 sender = self | ||||
|             password_changed.send(sender=sender, user=self, password=raw_password, request=request) | ||||
|             password_changed.send(sender=sender, user=self, password=raw_password) | ||||
|         self.password_change_date = now() | ||||
|         return super().set_password(raw_password) | ||||
|  | ||||
| @ -601,14 +599,6 @@ class Application(SerializerModel, PolicyBindingModel): | ||||
|             return None | ||||
|         return candidates[-1] | ||||
|  | ||||
|     def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None: | ||||
|         """Get Backchannel provider for a specific type""" | ||||
|         providers = self.backchannel_providers.filter( | ||||
|             **{f"{provider_type._meta.model_name}__isnull": False}, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         return getattr(providers.first(), provider_type._meta.model_name) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return str(self.name) | ||||
|  | ||||
| @ -856,11 +846,6 @@ class ExpiringModel(models.Model): | ||||
|  | ||||
|     class Meta: | ||||
|         abstract = True | ||||
|         indexes = [ | ||||
|             models.Index(fields=["expires"]), | ||||
|             models.Index(fields=["expiring"]), | ||||
|             models.Index(fields=["expiring", "expires"]), | ||||
|         ] | ||||
|  | ||||
|     def expire_action(self, *args, **kwargs): | ||||
|         """Handler which is called when this object is expired. By | ||||
| @ -916,7 +901,7 @@ class Token(SerializerModel, ManagedModel, ExpiringModel): | ||||
|     class Meta: | ||||
|         verbose_name = _("Token") | ||||
|         verbose_name_plural = _("Tokens") | ||||
|         indexes = ExpiringModel.Meta.indexes + [ | ||||
|         indexes = [ | ||||
|             models.Index(fields=["identifier"]), | ||||
|             models.Index(fields=["key"]), | ||||
|         ] | ||||
| @ -1016,9 +1001,6 @@ class AuthenticatedSession(ExpiringModel): | ||||
|     class Meta: | ||||
|         verbose_name = _("Authenticated Session") | ||||
|         verbose_name_plural = _("Authenticated Sessions") | ||||
|         indexes = ExpiringModel.Meta.indexes + [ | ||||
|             models.Index(fields=["session_key"]), | ||||
|         ] | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"Authenticated Session {self.session_key[:10]}" | ||||
|  | ||||
| @ -35,7 +35,8 @@ from authentik.flows.planner import ( | ||||
|     FlowPlanner, | ||||
| ) | ||||
| from authentik.flows.stage import StageView | ||||
| from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET | ||||
| from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN | ||||
| from authentik.lib.utils.urls import redirect_with_qs | ||||
| from authentik.lib.views import bad_request_message | ||||
| from authentik.policies.denied import AccessDeniedResponse | ||||
| from authentik.policies.utils import delete_none_values | ||||
| @ -46,9 +47,8 @@ from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
| PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | ||||
| SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages" | ||||
| SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | ||||
| PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" | ||||
|  | ||||
|  | ||||
| class MessageStage(StageView): | ||||
| @ -219,28 +219,28 @@ class SourceFlowManager: | ||||
|             } | ||||
|         ) | ||||
|         flow_context.update(self.policy_context) | ||||
|         if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: | ||||
|             token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) | ||||
|             self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) | ||||
|             plan = token.plan | ||||
|             plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||
|             plan.context.update(flow_context) | ||||
|             for stage in self.get_stages_to_append(flow): | ||||
|                 plan.append_stage(stage) | ||||
|             if stages: | ||||
|                 for stage in stages: | ||||
|                     plan.append_stage(stage) | ||||
|             self.request.session[SESSION_KEY_PLAN] = plan | ||||
|             flow_slug = token.flow.slug | ||||
|             token.delete() | ||||
|             return redirect_with_qs( | ||||
|                 "authentik_core:if-flow", | ||||
|                 self.request.GET, | ||||
|                 flow_slug=flow_slug, | ||||
|             ) | ||||
|         flow_context.setdefault(PLAN_CONTEXT_REDIRECT, final_redirect) | ||||
|  | ||||
|         if not flow: | ||||
|             # We only check for the flow token here if we don't have a flow, otherwise we rely on | ||||
|             # SESSION_KEY_SOURCE_FLOW_STAGES to delegate the usage of this token and dynamically add | ||||
|             # stages that deal with this token to return to another flow | ||||
|             if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: | ||||
|                 token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) | ||||
|                 self._logger.info( | ||||
|                     "Replacing source flow with overridden flow", flow=token.flow.slug | ||||
|                 ) | ||||
|                 plan = token.plan | ||||
|                 plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||
|                 plan.context.update(flow_context) | ||||
|                 for stage in self.get_stages_to_append(flow): | ||||
|                     plan.append_stage(stage) | ||||
|                 if stages: | ||||
|                     for stage in stages: | ||||
|                         plan.append_stage(stage) | ||||
|                 redirect = plan.to_redirect(self.request, token.flow) | ||||
|                 token.delete() | ||||
|                 return redirect | ||||
|             return bad_request_message( | ||||
|                 self.request, | ||||
|                 _("Configured flow does not exist."), | ||||
| @ -259,8 +259,6 @@ class SourceFlowManager: | ||||
|         if stages: | ||||
|             for stage in stages: | ||||
|                 plan.append_stage(stage) | ||||
|         for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []): | ||||
|             plan.append_stage(stage) | ||||
|         return plan.to_redirect(self.request, flow) | ||||
|  | ||||
|     def handle_auth( | ||||
| @ -297,8 +295,6 @@ class SourceFlowManager: | ||||
|         # When request isn't authenticated we jump straight to auth | ||||
|         if not self.request.user.is_authenticated: | ||||
|             return self.handle_auth(connection) | ||||
|         # When an override flow token exists we actually still use a flow for link | ||||
|         # to continue the existing flow we came from | ||||
|         if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: | ||||
|             return self._prepare_flow(None, connection) | ||||
|         connection.save() | ||||
|  | ||||
| @ -67,8 +67,6 @@ def clean_expired_models(self: SystemTask): | ||||
|                 raise ImproperlyConfigured( | ||||
|                     "Invalid session_storage setting, allowed values are db and cache" | ||||
|                 ) | ||||
|     if CONFIG.get("session_storage", "cache") == "db": | ||||
|         DBSessionStore.clear_expired() | ||||
|     LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount) | ||||
|  | ||||
|     messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}") | ||||
|  | ||||
| @ -11,7 +11,6 @@ | ||||
|         build: "{{ build }}", | ||||
|         api: { | ||||
|             base: "{{ base_url }}", | ||||
|             relBase: "{{ base_url_rel }}", | ||||
|         }, | ||||
|     }; | ||||
|     window.addEventListener("DOMContentLoaded", function () { | ||||
|  | ||||
| @ -8,8 +8,6 @@ | ||||
|     <head> | ||||
|         <meta charset="UTF-8"> | ||||
|         <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"> | ||||
|         {# Darkreader breaks the site regardless of theme as its not compatible with webcomponents, and we default to a dark theme based on preferred colour-scheme #} | ||||
|         <meta name="darkreader-lock"> | ||||
|         <title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title> | ||||
|         <link rel="icon" href="{{ brand.branding_favicon_url }}"> | ||||
|         <link rel="shortcut icon" href="{{ brand.branding_favicon_url }}"> | ||||
|  | ||||
| @ -4,7 +4,7 @@ from django.urls.base import reverse | ||||
| from guardian.shortcuts import assign_perm | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import Group | ||||
| from authentik.core.models import Group, User | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_user | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
| @ -14,7 +14,7 @@ class TestGroupsAPI(APITestCase): | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         self.login_user = create_test_user() | ||||
|         self.user = create_test_user() | ||||
|         self.user = User.objects.create(username="test-user") | ||||
|  | ||||
|     def test_list_with_users(self): | ||||
|         """Test listing with users""" | ||||
| @ -109,57 +109,3 @@ class TestGroupsAPI(APITestCase): | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 400) | ||||
|  | ||||
|     def test_superuser_no_perm(self): | ||||
|         """Test creating a superuser group without permission""" | ||||
|         assign_perm("authentik_core.add_group", self.login_user) | ||||
|         self.client.force_login(self.login_user) | ||||
|         res = self.client.post( | ||||
|             reverse("authentik_api:group-list"), | ||||
|             data={"name": generate_id(), "is_superuser": True}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 400) | ||||
|         self.assertJSONEqual( | ||||
|             res.content, | ||||
|             {"is_superuser": ["User does not have permission to set superuser status to True."]}, | ||||
|         ) | ||||
|  | ||||
|     def test_superuser_update_no_perm(self): | ||||
|         """Test updating a superuser group without permission""" | ||||
|         group = Group.objects.create(name=generate_id(), is_superuser=True) | ||||
|         assign_perm("view_group", self.login_user, group) | ||||
|         assign_perm("change_group", self.login_user, group) | ||||
|         self.client.force_login(self.login_user) | ||||
|         res = self.client.patch( | ||||
|             reverse("authentik_api:group-detail", kwargs={"pk": group.pk}), | ||||
|             data={"is_superuser": False}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 400) | ||||
|         self.assertJSONEqual( | ||||
|             res.content, | ||||
|             {"is_superuser": ["User does not have permission to set superuser status to False."]}, | ||||
|         ) | ||||
|  | ||||
|     def test_superuser_update_no_change(self): | ||||
|         """Test updating a superuser group without permission | ||||
|         and without changing the superuser status""" | ||||
|         group = Group.objects.create(name=generate_id(), is_superuser=True) | ||||
|         assign_perm("view_group", self.login_user, group) | ||||
|         assign_perm("change_group", self.login_user, group) | ||||
|         self.client.force_login(self.login_user) | ||||
|         res = self.client.patch( | ||||
|             reverse("authentik_api:group-detail", kwargs={"pk": group.pk}), | ||||
|             data={"name": generate_id(), "is_superuser": True}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|  | ||||
|     def test_superuser_create(self): | ||||
|         """Test creating a superuser group with permission""" | ||||
|         assign_perm("authentik_core.add_group", self.login_user) | ||||
|         assign_perm("authentik_core.enable_group_superuser", self.login_user) | ||||
|         self.client.force_login(self.login_user) | ||||
|         res = self.client.post( | ||||
|             reverse("authentik_api:group-list"), | ||||
|             data={"name": generate_id(), "is_superuser": True}, | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 201) | ||||
|  | ||||
| @ -55,7 +55,7 @@ class RedirectToAppLaunch(View): | ||||
|             ) | ||||
|         except FlowNonApplicableException: | ||||
|             raise Http404 from None | ||||
|         plan.append_stage(in_memory_stage(RedirectToAppStage)) | ||||
|         plan.insert_stage(in_memory_stage(RedirectToAppStage)) | ||||
|         return plan.to_redirect(request, flow) | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -53,7 +53,6 @@ class InterfaceView(TemplateView): | ||||
|         kwargs["build"] = get_build_hash() | ||||
|         kwargs["url_kwargs"] = self.kwargs | ||||
|         kwargs["base_url"] = self.request.build_absolute_uri(CONFIG.get("web.path", "/")) | ||||
|         kwargs["base_url_rel"] = CONFIG.get("web.path", "/") | ||||
|         return super().get_context_data(**kwargs) | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -28,6 +28,7 @@ from rest_framework.validators import UniqueValidator | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.api.authorization import SecretKeyFilter | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||
| from authentik.crypto.apps import MANAGED_KEY | ||||
| @ -35,7 +36,7 @@ from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.rbac.decorators import permission_required | ||||
| from authentik.rbac.filters import ObjectFilter, SecretKeyFilter | ||||
| from authentik.rbac.filters import ObjectFilter | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| @ -97,8 +97,6 @@ class EnterpriseAuditMiddleware(AuditMiddleware): | ||||
|         thread_kwargs: dict | None = None, | ||||
|         **_, | ||||
|     ): | ||||
|         if not self.enabled: | ||||
|             return super().post_save_handler(request, sender, instance, created, thread_kwargs, **_) | ||||
|         if not should_log_model(instance): | ||||
|             return None | ||||
|         thread_kwargs = {} | ||||
| @ -124,8 +122,6 @@ class EnterpriseAuditMiddleware(AuditMiddleware): | ||||
|     ): | ||||
|         thread_kwargs = {} | ||||
|         m2m_field = None | ||||
|         if not self.enabled: | ||||
|             return super().m2m_changed_handler(request, sender, instance, action, thread_kwargs) | ||||
|         # For the audit log we don't care about `pre_` or `post_` so we trim that part off | ||||
|         _, _, action_direction = action.partition("_") | ||||
|         # resolve the "through" model to an actual field | ||||
|  | ||||
| @ -1,27 +0,0 @@ | ||||
| # Generated by Django 5.0.10 on 2025-01-13 18:05 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_enterprise", "0003_remove_licenseusage_within_limits_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddIndex( | ||||
|             model_name="licenseusage", | ||||
|             index=models.Index(fields=["expires"], name="authentik_e_expires_3f2956_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="licenseusage", | ||||
|             index=models.Index(fields=["expiring"], name="authentik_e_expirin_11d3d7_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="licenseusage", | ||||
|             index=models.Index( | ||||
|                 fields=["expiring", "expires"], name="authentik_e_expirin_4d558f_idx" | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -93,4 +93,3 @@ class LicenseUsage(ExpiringModel): | ||||
|     class Meta: | ||||
|         verbose_name = _("License Usage") | ||||
|         verbose_name_plural = _("License Usage Records") | ||||
|         indexes = ExpiringModel.Meta.indexes | ||||
|  | ||||
| @ -1,17 +1,21 @@ | ||||
| """RAC Provider API Views""" | ||||
| 
 | ||||
| from django_filters.rest_framework.backends import DjangoFilterBackend | ||||
| from rest_framework import mixins | ||||
| from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.viewsets import GenericViewSet | ||||
| 
 | ||||
| from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions | ||||
| from authentik.core.api.groups import GroupMemberSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.providers.rac.api.endpoints import EndpointSerializer | ||||
| from authentik.providers.rac.api.providers import RACProviderSerializer | ||||
| from authentik.providers.rac.models import ConnectionToken | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.providers.rac.api.endpoints import EndpointSerializer | ||||
| from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken | ||||
| 
 | ||||
| 
 | ||||
| class ConnectionTokenSerializer(ModelSerializer): | ||||
| class ConnectionTokenSerializer(EnterpriseRequiredMixin, ModelSerializer): | ||||
|     """ConnectionToken Serializer""" | ||||
| 
 | ||||
|     provider_obj = RACProviderSerializer(source="provider", read_only=True) | ||||
| @ -30,6 +34,12 @@ class ConnectionTokenSerializer(ModelSerializer): | ||||
|         ] | ||||
| 
 | ||||
| 
 | ||||
| class ConnectionTokenOwnerFilter(OwnerFilter): | ||||
|     """Owner filter for connection tokens (checks session's user)""" | ||||
| 
 | ||||
|     owner_key = "session__user" | ||||
| 
 | ||||
| 
 | ||||
| class ConnectionTokenViewSet( | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.UpdateModelMixin, | ||||
| @ -45,4 +55,10 @@ class ConnectionTokenViewSet( | ||||
|     filterset_fields = ["endpoint", "session__user", "provider"] | ||||
|     search_fields = ["endpoint__name", "provider__name"] | ||||
|     ordering = ["endpoint__name", "provider__name"] | ||||
|     owner_field = "session__user" | ||||
|     permission_classes = [OwnerSuperuserPermissions] | ||||
|     filter_backends = [ | ||||
|         ConnectionTokenOwnerFilter, | ||||
|         DjangoFilterBackend, | ||||
|         OrderingFilter, | ||||
|         SearchFilter, | ||||
|     ] | ||||
| @ -14,9 +14,10 @@ from structlog.stdlib import get_logger | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.core.models import Provider | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer | ||||
| from authentik.enterprise.providers.rac.models import Endpoint | ||||
| from authentik.policies.engine import PolicyEngine | ||||
| from authentik.providers.rac.api.providers import RACProviderSerializer | ||||
| from authentik.providers.rac.models import Endpoint | ||||
| from authentik.rbac.filters import ObjectFilter | ||||
| 
 | ||||
| LOGGER = get_logger() | ||||
| @ -27,7 +28,7 @@ def user_endpoint_cache_key(user_pk: str) -> str: | ||||
|     return f"goauthentik.io/providers/rac/endpoint_access/{user_pk}" | ||||
| 
 | ||||
| 
 | ||||
| class EndpointSerializer(ModelSerializer): | ||||
| class EndpointSerializer(EnterpriseRequiredMixin, ModelSerializer): | ||||
|     """Endpoint Serializer""" | ||||
| 
 | ||||
|     provider_obj = RACProviderSerializer(source="provider", read_only=True) | ||||
| @ -10,7 +10,7 @@ from rest_framework.viewsets import ModelViewSet | ||||
| from authentik.core.api.property_mappings import PropertyMappingSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import JSONDictField | ||||
| from authentik.providers.rac.models import RACPropertyMapping | ||||
| from authentik.enterprise.providers.rac.models import RACPropertyMapping | ||||
| 
 | ||||
| 
 | ||||
| class RACPropertyMappingSerializer(PropertyMappingSerializer): | ||||
| @ -5,10 +5,11 @@ from rest_framework.viewsets import ModelViewSet | ||||
| 
 | ||||
| from authentik.core.api.providers import ProviderSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.providers.rac.models import RACProvider | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.providers.rac.models import RACProvider | ||||
| 
 | ||||
| 
 | ||||
| class RACProviderSerializer(ProviderSerializer): | ||||
| class RACProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer): | ||||
|     """RACProvider Serializer""" | ||||
| 
 | ||||
|     outpost_set = ListField(child=CharField(), read_only=True, source="outpost_set.all") | ||||
							
								
								
									
										14
									
								
								authentik/enterprise/providers/rac/apps.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								authentik/enterprise/providers/rac/apps.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,14 @@ | ||||
| """RAC app config""" | ||||
|  | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
|  | ||||
|  | ||||
| class AuthentikEnterpriseProviderRAC(EnterpriseConfig): | ||||
|     """authentik enterprise rac app config""" | ||||
|  | ||||
|     name = "authentik.enterprise.providers.rac" | ||||
|     label = "authentik_providers_rac" | ||||
|     verbose_name = "authentik Enterprise.Providers.RAC" | ||||
|     default = True | ||||
|     mountpoint = "" | ||||
|     ws_mountpoint = "authentik.enterprise.providers.rac.urls" | ||||
| @ -7,22 +7,22 @@ from channels.generic.websocket import AsyncWebsocketConsumer | ||||
| from django.http.request import QueryDict | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
| 
 | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken, RACProvider | ||||
| from authentik.outposts.consumer import OUTPOST_GROUP_INSTANCE | ||||
| from authentik.outposts.models import Outpost, OutpostState, OutpostType | ||||
| from authentik.providers.rac.models import ConnectionToken, RACProvider | ||||
| 
 | ||||
| # Global broadcast group, which messages are sent to when the outpost connects back | ||||
| # to authentik for a specific connection | ||||
| # The `RACClientConsumer` consumer adds itself to this group on connection, | ||||
| # and removes itself once it has been assigned a specific outpost channel | ||||
| RAC_CLIENT_GROUP = "group_rac_client" | ||||
| RAC_CLIENT_GROUP = "group_enterprise_rac_client" | ||||
| # A group for all connections in a given authentik session ID | ||||
| # A disconnect message is sent to this group when the session expires/is deleted | ||||
| RAC_CLIENT_GROUP_SESSION = "group_rac_client_%(session)s" | ||||
| RAC_CLIENT_GROUP_SESSION = "group_enterprise_rac_client_%(session)s" | ||||
| # A group for all connections with a specific token, which in almost all cases | ||||
| # is just one connection, however this is used to disconnect the connection | ||||
| # when the token is deleted | ||||
| RAC_CLIENT_GROUP_TOKEN = "group_rac_token_%(token)s"  # nosec | ||||
| RAC_CLIENT_GROUP_TOKEN = "group_enterprise_rac_token_%(token)s"  # nosec | ||||
| 
 | ||||
| # Step 1: Client connects to this websocket endpoint | ||||
| # Step 2: We prepare all the connection args for Guac | ||||
| @ -3,7 +3,7 @@ | ||||
| from channels.exceptions import ChannelFull | ||||
| from channels.generic.websocket import AsyncWebsocketConsumer | ||||
| 
 | ||||
| from authentik.providers.rac.consumer_client import RAC_CLIENT_GROUP | ||||
| from authentik.enterprise.providers.rac.consumer_client import RAC_CLIENT_GROUP | ||||
| 
 | ||||
| 
 | ||||
| class RACOutpostConsumer(AsyncWebsocketConsumer): | ||||
| @ -74,7 +74,7 @@ class RACProvider(Provider): | ||||
| 
 | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.providers.rac.api.providers import RACProviderSerializer | ||||
|         from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer | ||||
| 
 | ||||
|         return RACProviderSerializer | ||||
| 
 | ||||
| @ -100,7 +100,7 @@ class Endpoint(SerializerModel, PolicyBindingModel): | ||||
| 
 | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.providers.rac.api.endpoints import EndpointSerializer | ||||
|         from authentik.enterprise.providers.rac.api.endpoints import EndpointSerializer | ||||
| 
 | ||||
|         return EndpointSerializer | ||||
| 
 | ||||
| @ -129,7 +129,7 @@ class RACPropertyMapping(PropertyMapping): | ||||
| 
 | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.providers.rac.api.property_mappings import ( | ||||
|         from authentik.enterprise.providers.rac.api.property_mappings import ( | ||||
|             RACPropertyMappingSerializer, | ||||
|         ) | ||||
| 
 | ||||
| @ -159,9 +159,9 @@ class ConnectionToken(ExpiringModel): | ||||
|             default_settings["port"] = str(port) | ||||
|         else: | ||||
|             default_settings["hostname"] = self.endpoint.host | ||||
|         if self.endpoint.protocol == Protocols.RDP: | ||||
|             default_settings["resize-method"] = "display-update" | ||||
|         default_settings["client-name"] = f"authentik - {self.session.user}" | ||||
|         default_settings["client-name"] = "authentik" | ||||
|         # default_settings["enable-drive"] = "true" | ||||
|         # default_settings["drive-name"] = "authentik" | ||||
|         settings = {} | ||||
|         always_merger.merge(settings, default_settings) | ||||
|         always_merger.merge(settings, self.endpoint.provider.settings) | ||||
| @ -211,4 +211,3 @@ class ConnectionToken(ExpiringModel): | ||||
|     class Meta: | ||||
|         verbose_name = _("RAC Connection token") | ||||
|         verbose_name_plural = _("RAC Connection tokens") | ||||
|         indexes = ExpiringModel.Meta.indexes | ||||
| @ -4,17 +4,18 @@ from asgiref.sync import async_to_sync | ||||
| from channels.layers import get_channel_layer | ||||
| from django.contrib.auth.signals import user_logged_out | ||||
| from django.core.cache import cache | ||||
| from django.db.models.signals import post_delete, post_save, pre_delete | ||||
| from django.db.models import Model | ||||
| from django.db.models.signals import post_save, pre_delete | ||||
| from django.dispatch import receiver | ||||
| from django.http import HttpRequest | ||||
| 
 | ||||
| from authentik.core.models import User | ||||
| from authentik.providers.rac.api.endpoints import user_endpoint_cache_key | ||||
| from authentik.providers.rac.consumer_client import ( | ||||
| from authentik.enterprise.providers.rac.api.endpoints import user_endpoint_cache_key | ||||
| from authentik.enterprise.providers.rac.consumer_client import ( | ||||
|     RAC_CLIENT_GROUP_SESSION, | ||||
|     RAC_CLIENT_GROUP_TOKEN, | ||||
| ) | ||||
| from authentik.providers.rac.models import ConnectionToken, Endpoint | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken, Endpoint | ||||
| 
 | ||||
| 
 | ||||
| @receiver(user_logged_out) | ||||
| @ -45,8 +46,12 @@ def pre_delete_connection_token_disconnect(sender, instance: ConnectionToken, ** | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| @receiver([post_save, post_delete], sender=Endpoint) | ||||
| def post_save_post_delete_endpoint(**_): | ||||
|     """Clear user's endpoint cache upon endpoint creation or deletion""" | ||||
| @receiver(post_save, sender=Endpoint) | ||||
| def post_save_endpoint(sender: type[Model], instance, created: bool, **_): | ||||
|     """Clear user's endpoint cache upon endpoint creation""" | ||||
|     if not created:  # pragma: no cover | ||||
|         return | ||||
| 
 | ||||
|     # Delete user endpoint cache | ||||
|     keys = cache.keys(user_endpoint_cache_key("*")) | ||||
|     cache.delete_many(keys) | ||||
| @ -3,7 +3,7 @@ | ||||
| {% load authentik_core %} | ||||
| 
 | ||||
| {% block head %} | ||||
| <script src="{% versioned_script 'dist/rac/index-%v.js' %}" type="module"></script> | ||||
| <script src="{% versioned_script 'dist/enterprise/rac/index-%v.js' %}" type="module"></script> | ||||
| <meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)"> | ||||
| <meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)"> | ||||
| <link rel="icon" href="{{ tenant.branding_favicon_url }}"> | ||||
| @ -1,9 +1,16 @@ | ||||
| """Test RAC Provider""" | ||||
| 
 | ||||
| from datetime import timedelta | ||||
| from time import mktime | ||||
| from unittest.mock import MagicMock, patch | ||||
| 
 | ||||
| from django.urls import reverse | ||||
| from django.utils.timezone import now | ||||
| from rest_framework.test import APITestCase | ||||
| 
 | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_flow | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.enterprise.models import License | ||||
| from authentik.lib.generators import generate_id | ||||
| 
 | ||||
| 
 | ||||
| @ -13,8 +20,21 @@ class TestAPI(APITestCase): | ||||
|     def setUp(self) -> None: | ||||
|         self.user = create_test_admin_user() | ||||
| 
 | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=int(mktime((now() + timedelta(days=3000)).timetuple())), | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     def test_create(self): | ||||
|         """Test creation of RAC Provider""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:racprovider-list"), | ||||
| @ -5,10 +5,10 @@ from rest_framework.test import APITestCase | ||||
| 
 | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
| from authentik.enterprise.providers.rac.models import Endpoint, Protocols, RACProvider | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.policies.dummy.models import DummyPolicy | ||||
| from authentik.policies.models import PolicyBinding | ||||
| from authentik.providers.rac.models import Endpoint, Protocols, RACProvider | ||||
| 
 | ||||
| 
 | ||||
| class TestEndpointsAPI(APITestCase): | ||||
| @ -4,14 +4,14 @@ from django.test import TransactionTestCase | ||||
| 
 | ||||
| from authentik.core.models import Application, AuthenticatedSession | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.providers.rac.models import ( | ||||
| from authentik.enterprise.providers.rac.models import ( | ||||
|     ConnectionToken, | ||||
|     Endpoint, | ||||
|     Protocols, | ||||
|     RACPropertyMapping, | ||||
|     RACProvider, | ||||
| ) | ||||
| from authentik.lib.generators import generate_id | ||||
| 
 | ||||
| 
 | ||||
| class TestModels(TransactionTestCase): | ||||
| @ -50,10 +50,9 @@ class TestModels(TransactionTestCase): | ||||
|             { | ||||
|                 "hostname": self.endpoint.host.split(":")[0], | ||||
|                 "port": "1324", | ||||
|                 "client-name": f"authentik - {self.user}", | ||||
|                 "client-name": "authentik", | ||||
|                 "drive-path": path, | ||||
|                 "create-drive-path": "true", | ||||
|                 "resize-method": "display-update", | ||||
|             }, | ||||
|         ) | ||||
|         # Set settings in provider | ||||
| @ -64,11 +63,10 @@ class TestModels(TransactionTestCase): | ||||
|             { | ||||
|                 "hostname": self.endpoint.host.split(":")[0], | ||||
|                 "port": "1324", | ||||
|                 "client-name": f"authentik - {self.user}", | ||||
|                 "client-name": "authentik", | ||||
|                 "drive-path": path, | ||||
|                 "create-drive-path": "true", | ||||
|                 "level": "provider", | ||||
|                 "resize-method": "display-update", | ||||
|             }, | ||||
|         ) | ||||
|         # Set settings in endpoint | ||||
| @ -81,11 +79,10 @@ class TestModels(TransactionTestCase): | ||||
|             { | ||||
|                 "hostname": self.endpoint.host.split(":")[0], | ||||
|                 "port": "1324", | ||||
|                 "client-name": f"authentik - {self.user}", | ||||
|                 "client-name": "authentik", | ||||
|                 "drive-path": path, | ||||
|                 "create-drive-path": "true", | ||||
|                 "level": "endpoint", | ||||
|                 "resize-method": "display-update", | ||||
|             }, | ||||
|         ) | ||||
|         # Set settings in token | ||||
| @ -98,11 +95,10 @@ class TestModels(TransactionTestCase): | ||||
|             { | ||||
|                 "hostname": self.endpoint.host.split(":")[0], | ||||
|                 "port": "1324", | ||||
|                 "client-name": f"authentik - {self.user}", | ||||
|                 "client-name": "authentik", | ||||
|                 "drive-path": path, | ||||
|                 "create-drive-path": "true", | ||||
|                 "level": "token", | ||||
|                 "resize-method": "display-update", | ||||
|             }, | ||||
|         ) | ||||
|         # Set settings in property mapping (provider) | ||||
| @ -118,11 +114,10 @@ class TestModels(TransactionTestCase): | ||||
|             { | ||||
|                 "hostname": self.endpoint.host.split(":")[0], | ||||
|                 "port": "1324", | ||||
|                 "client-name": f"authentik - {self.user}", | ||||
|                 "client-name": "authentik", | ||||
|                 "drive-path": path, | ||||
|                 "create-drive-path": "true", | ||||
|                 "level": "property_mapping_provider", | ||||
|                 "resize-method": "display-update", | ||||
|             }, | ||||
|         ) | ||||
|         # Set settings in property mapping (endpoint) | ||||
| @ -140,12 +135,11 @@ class TestModels(TransactionTestCase): | ||||
|             { | ||||
|                 "hostname": self.endpoint.host.split(":")[0], | ||||
|                 "port": "1324", | ||||
|                 "client-name": f"authentik - {self.user}", | ||||
|                 "client-name": "authentik", | ||||
|                 "drive-path": path, | ||||
|                 "create-drive-path": "true", | ||||
|                 "level": "property_mapping_endpoint", | ||||
|                 "foo": "true", | ||||
|                 "bar": "6", | ||||
|                 "resize-method": "display-update", | ||||
|             }, | ||||
|         ) | ||||
| @ -1,17 +1,23 @@ | ||||
| """RAC Views tests""" | ||||
| 
 | ||||
| from datetime import timedelta | ||||
| from json import loads | ||||
| from time import mktime | ||||
| from unittest.mock import MagicMock, patch | ||||
| 
 | ||||
| from django.urls import reverse | ||||
| from django.utils.timezone import now | ||||
| from rest_framework.test import APITestCase | ||||
| 
 | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_flow | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.enterprise.models import License | ||||
| from authentik.enterprise.providers.rac.models import Endpoint, Protocols, RACProvider | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.policies.denied import AccessDeniedResponse | ||||
| from authentik.policies.dummy.models import DummyPolicy | ||||
| from authentik.policies.models import PolicyBinding | ||||
| from authentik.providers.rac.models import Endpoint, Protocols, RACProvider | ||||
| 
 | ||||
| 
 | ||||
| class TestRACViews(APITestCase): | ||||
| @ -33,8 +39,21 @@ class TestRACViews(APITestCase): | ||||
|             provider=self.provider, | ||||
|         ) | ||||
| 
 | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=int(mktime((now() + timedelta(days=3000)).timetuple())), | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     def test_no_policy(self): | ||||
|         """Test request""" | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
| @ -51,6 +70,18 @@ class TestRACViews(APITestCase): | ||||
|         final_response = self.client.get(next_url) | ||||
|         self.assertEqual(final_response.status_code, 200) | ||||
| 
 | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=int(mktime((now() + timedelta(days=3000)).timetuple())), | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     def test_app_deny(self): | ||||
|         """Test request (deny on app level)""" | ||||
|         PolicyBinding.objects.create( | ||||
| @ -58,6 +89,7 @@ class TestRACViews(APITestCase): | ||||
|             policy=DummyPolicy.objects.create(name="deny", result=False, wait_min=1, wait_max=2), | ||||
|             order=0, | ||||
|         ) | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
| @ -67,6 +99,18 @@ class TestRACViews(APITestCase): | ||||
|         ) | ||||
|         self.assertIsInstance(response, AccessDeniedResponse) | ||||
| 
 | ||||
|     @patch( | ||||
|         "authentik.enterprise.license.LicenseKey.validate", | ||||
|         MagicMock( | ||||
|             return_value=LicenseKey( | ||||
|                 aud="", | ||||
|                 exp=int(mktime((now() + timedelta(days=3000)).timetuple())), | ||||
|                 name=generate_id(), | ||||
|                 internal_users=100, | ||||
|                 external_users=100, | ||||
|             ) | ||||
|         ), | ||||
|     ) | ||||
|     def test_endpoint_deny(self): | ||||
|         """Test request (deny on endpoint level)""" | ||||
|         PolicyBinding.objects.create( | ||||
| @ -74,6 +118,7 @@ class TestRACViews(APITestCase): | ||||
|             policy=DummyPolicy.objects.create(name="deny", result=False, wait_min=1, wait_max=2), | ||||
|             order=0, | ||||
|         ) | ||||
|         License.objects.create(key=generate_id()) | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
| @ -4,14 +4,14 @@ from channels.auth import AuthMiddleware | ||||
| from channels.sessions import CookieMiddleware | ||||
| from django.urls import path | ||||
| 
 | ||||
| from authentik.enterprise.providers.rac.api.connection_tokens import ConnectionTokenViewSet | ||||
| from authentik.enterprise.providers.rac.api.endpoints import EndpointViewSet | ||||
| from authentik.enterprise.providers.rac.api.property_mappings import RACPropertyMappingViewSet | ||||
| from authentik.enterprise.providers.rac.api.providers import RACProviderViewSet | ||||
| from authentik.enterprise.providers.rac.consumer_client import RACClientConsumer | ||||
| from authentik.enterprise.providers.rac.consumer_outpost import RACOutpostConsumer | ||||
| from authentik.enterprise.providers.rac.views import RACInterface, RACStartView | ||||
| from authentik.outposts.channels import TokenOutpostMiddleware | ||||
| from authentik.providers.rac.api.connection_tokens import ConnectionTokenViewSet | ||||
| from authentik.providers.rac.api.endpoints import EndpointViewSet | ||||
| from authentik.providers.rac.api.property_mappings import RACPropertyMappingViewSet | ||||
| from authentik.providers.rac.api.providers import RACProviderViewSet | ||||
| from authentik.providers.rac.consumer_client import RACClientConsumer | ||||
| from authentik.providers.rac.consumer_outpost import RACOutpostConsumer | ||||
| from authentik.providers.rac.views import RACInterface, RACStartView | ||||
| from authentik.root.asgi_middleware import SessionMiddleware | ||||
| from authentik.root.middleware import ChannelsLoggingMiddleware | ||||
| 
 | ||||
| @ -10,6 +10,8 @@ from django.utils.translation import gettext as _ | ||||
| 
 | ||||
| from authentik.core.models import Application, AuthenticatedSession | ||||
| from authentik.core.views.interface import InterfaceView | ||||
| from authentik.enterprise.policy import EnterprisePolicyAccessView | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken, Endpoint, RACProvider | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.flows.challenge import RedirectChallenge | ||||
| from authentik.flows.exceptions import FlowNonApplicableException | ||||
| @ -18,11 +20,9 @@ from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, FlowPlanner | ||||
| from authentik.flows.stage import RedirectStage | ||||
| from authentik.lib.utils.time import timedelta_from_string | ||||
| from authentik.policies.engine import PolicyEngine | ||||
| from authentik.policies.views import PolicyAccessView | ||||
| from authentik.providers.rac.models import ConnectionToken, Endpoint, RACProvider | ||||
| 
 | ||||
| 
 | ||||
| class RACStartView(PolicyAccessView): | ||||
| class RACStartView(EnterprisePolicyAccessView): | ||||
|     """Start a RAC connection by checking access and creating a connection token""" | ||||
| 
 | ||||
|     endpoint: Endpoint | ||||
| @ -46,7 +46,7 @@ class RACStartView(PolicyAccessView): | ||||
|             ) | ||||
|         except FlowNonApplicableException: | ||||
|             raise Http404 from None | ||||
|         plan.append_stage( | ||||
|         plan.insert_stage( | ||||
|             in_memory_stage( | ||||
|                 RACFinalStage, | ||||
|                 application=self.application, | ||||
| @ -1,64 +0,0 @@ | ||||
| """SSF Provider API Views""" | ||||
|  | ||||
| from django.urls import reverse | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.core.api.providers import ProviderSerializer | ||||
| from authentik.core.api.tokens import TokenSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.enterprise.api import EnterpriseRequiredMixin | ||||
| from authentik.enterprise.providers.ssf.models import SSFProvider | ||||
|  | ||||
|  | ||||
| class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer): | ||||
|     """SSFProvider Serializer""" | ||||
|  | ||||
|     ssf_url = SerializerMethodField() | ||||
|     token_obj = TokenSerializer(source="token", required=False, read_only=True) | ||||
|  | ||||
|     def get_ssf_url(self, instance: SSFProvider) -> str | None: | ||||
|         request: Request = self._context.get("request") | ||||
|         if not request: | ||||
|             return None | ||||
|         if not instance.backchannel_application: | ||||
|             return None | ||||
|         return request.build_absolute_uri( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:configuration", | ||||
|                 kwargs={ | ||||
|                     "application_slug": instance.backchannel_application.slug, | ||||
|                 }, | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     class Meta: | ||||
|         model = SSFProvider | ||||
|         fields = [ | ||||
|             "pk", | ||||
|             "name", | ||||
|             "component", | ||||
|             "verbose_name", | ||||
|             "verbose_name_plural", | ||||
|             "meta_model_name", | ||||
|             "signing_key", | ||||
|             "token_obj", | ||||
|             "oidc_auth_providers", | ||||
|             "ssf_url", | ||||
|             "event_retention", | ||||
|         ] | ||||
|         extra_kwargs = {} | ||||
|  | ||||
|  | ||||
| class SSFProviderViewSet(UsedByMixin, ModelViewSet): | ||||
|     """SSFProvider Viewset""" | ||||
|  | ||||
|     queryset = SSFProvider.objects.all() | ||||
|     serializer_class = SSFProviderSerializer | ||||
|     filterset_fields = { | ||||
|         "application": ["isnull"], | ||||
|         "name": ["iexact"], | ||||
|     } | ||||
|     search_fields = ["name"] | ||||
|     ordering = ["name"] | ||||
| @ -1,37 +0,0 @@ | ||||
| """SSF Stream API Views""" | ||||
|  | ||||
| from rest_framework.viewsets import ReadOnlyModelViewSet | ||||
|  | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer | ||||
| from authentik.enterprise.providers.ssf.models import Stream | ||||
|  | ||||
|  | ||||
| class SSFStreamSerializer(ModelSerializer): | ||||
|     """SSFStream Serializer""" | ||||
|  | ||||
|     provider_obj = SSFProviderSerializer(source="provider", read_only=True) | ||||
|  | ||||
|     class Meta: | ||||
|         model = Stream | ||||
|         fields = [ | ||||
|             "pk", | ||||
|             "provider", | ||||
|             "provider_obj", | ||||
|             "delivery_method", | ||||
|             "endpoint_url", | ||||
|             "events_requested", | ||||
|             "format", | ||||
|             "aud", | ||||
|             "iss", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class SSFStreamViewSet(ReadOnlyModelViewSet): | ||||
|     """SSFStream Viewset""" | ||||
|  | ||||
|     queryset = Stream.objects.all() | ||||
|     serializer_class = SSFStreamSerializer | ||||
|     filterset_fields = ["provider", "endpoint_url", "delivery_method"] | ||||
|     search_fields = ["provider__name", "endpoint_url"] | ||||
|     ordering = ["provider", "uuid"] | ||||
| @ -1,13 +0,0 @@ | ||||
| """SSF app config""" | ||||
|  | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
|  | ||||
|  | ||||
| class AuthentikEnterpriseProviderSSF(EnterpriseConfig): | ||||
|     """authentik enterprise ssf app config""" | ||||
|  | ||||
|     name = "authentik.enterprise.providers.ssf" | ||||
|     label = "authentik_providers_ssf" | ||||
|     verbose_name = "authentik Enterprise.Providers.SSF" | ||||
|     default = True | ||||
|     mountpoint = "" | ||||
| @ -1,201 +0,0 @@ | ||||
| # Generated by Django 5.0.11 on 2025-02-05 16:20 | ||||
|  | ||||
| import authentik.lib.utils.time | ||||
| import django.contrib.postgres.fields | ||||
| import django.db.models.deletion | ||||
| import uuid | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     initial = True | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"), | ||||
|         ("authentik_crypto", "0004_alter_certificatekeypair_name"), | ||||
|         ("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="SSFProvider", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "provider_ptr", | ||||
|                     models.OneToOneField( | ||||
|                         auto_created=True, | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         parent_link=True, | ||||
|                         primary_key=True, | ||||
|                         serialize=False, | ||||
|                         to="authentik_core.provider", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "event_retention", | ||||
|                     models.TextField( | ||||
|                         default="days=30", | ||||
|                         validators=[authentik.lib.utils.time.timedelta_string_validator], | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "oidc_auth_providers", | ||||
|                     models.ManyToManyField( | ||||
|                         blank=True, default=None, to="authentik_providers_oauth2.oauth2provider" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "signing_key", | ||||
|                     models.ForeignKey( | ||||
|                         help_text="Key used to sign the SSF Events.", | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="authentik_crypto.certificatekeypair", | ||||
|                         verbose_name="Signing Key", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "token", | ||||
|                     models.ForeignKey( | ||||
|                         default=None, | ||||
|                         null=True, | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="authentik_core.token", | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "Shared Signals Framework Provider", | ||||
|                 "verbose_name_plural": "Shared Signals Framework Providers", | ||||
|                 "permissions": [("add_stream", "Add stream to SSF provider")], | ||||
|             }, | ||||
|             bases=("authentik_core.provider",), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="Stream", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "uuid", | ||||
|                     models.UUIDField( | ||||
|                         default=uuid.uuid4, editable=False, primary_key=True, serialize=False | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "delivery_method", | ||||
|                     models.TextField( | ||||
|                         choices=[ | ||||
|                             ( | ||||
|                                 "https://schemas.openid.net/secevent/risc/delivery-method/push", | ||||
|                                 "Risc Push", | ||||
|                             ), | ||||
|                             ( | ||||
|                                 "https://schemas.openid.net/secevent/risc/delivery-method/poll", | ||||
|                                 "Risc Poll", | ||||
|                             ), | ||||
|                         ] | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("endpoint_url", models.TextField(null=True)), | ||||
|                 ( | ||||
|                     "events_requested", | ||||
|                     django.contrib.postgres.fields.ArrayField( | ||||
|                         base_field=models.TextField( | ||||
|                             choices=[ | ||||
|                                 ( | ||||
|                                     "https://schemas.openid.net/secevent/caep/event-type/session-revoked", | ||||
|                                     "Caep Session Revoked", | ||||
|                                 ), | ||||
|                                 ( | ||||
|                                     "https://schemas.openid.net/secevent/caep/event-type/credential-change", | ||||
|                                     "Caep Credential Change", | ||||
|                                 ), | ||||
|                                 ( | ||||
|                                     "https://schemas.openid.net/secevent/ssf/event-type/verification", | ||||
|                                     "Set Verification", | ||||
|                                 ), | ||||
|                             ] | ||||
|                         ), | ||||
|                         default=list, | ||||
|                         size=None, | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("format", models.TextField()), | ||||
|                 ( | ||||
|                     "aud", | ||||
|                     django.contrib.postgres.fields.ArrayField( | ||||
|                         base_field=models.TextField(), default=list, size=None | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("iss", models.TextField()), | ||||
|                 ( | ||||
|                     "provider", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="authentik_providers_ssf.ssfprovider", | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "SSF Stream", | ||||
|                 "verbose_name_plural": "SSF Streams", | ||||
|                 "default_permissions": ["change", "delete", "view"], | ||||
|             }, | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="StreamEvent", | ||||
|             fields=[ | ||||
|                 ("created", models.DateTimeField(auto_now_add=True)), | ||||
|                 ("last_updated", models.DateTimeField(auto_now=True)), | ||||
|                 ("expires", models.DateTimeField(default=None, null=True)), | ||||
|                 ("expiring", models.BooleanField(default=True)), | ||||
|                 ( | ||||
|                     "uuid", | ||||
|                     models.UUIDField( | ||||
|                         default=uuid.uuid4, editable=False, primary_key=True, serialize=False | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "status", | ||||
|                     models.TextField( | ||||
|                         choices=[ | ||||
|                             ("pending_new", "Pending New"), | ||||
|                             ("pending_failed", "Pending Failed"), | ||||
|                             ("sent", "Sent"), | ||||
|                         ] | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "type", | ||||
|                     models.TextField( | ||||
|                         choices=[ | ||||
|                             ( | ||||
|                                 "https://schemas.openid.net/secevent/caep/event-type/session-revoked", | ||||
|                                 "Caep Session Revoked", | ||||
|                             ), | ||||
|                             ( | ||||
|                                 "https://schemas.openid.net/secevent/caep/event-type/credential-change", | ||||
|                                 "Caep Credential Change", | ||||
|                             ), | ||||
|                             ( | ||||
|                                 "https://schemas.openid.net/secevent/ssf/event-type/verification", | ||||
|                                 "Set Verification", | ||||
|                             ), | ||||
|                         ] | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("payload", models.JSONField(default=dict)), | ||||
|                 ( | ||||
|                     "stream", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="authentik_providers_ssf.stream", | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "verbose_name": "SSF Stream Event", | ||||
|                 "verbose_name_plural": "SSF Stream Events", | ||||
|                 "ordering": ("-created",), | ||||
|             }, | ||||
|         ), | ||||
|     ] | ||||
| @ -1,178 +0,0 @@ | ||||
| from datetime import datetime | ||||
| from functools import cached_property | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey | ||||
| from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey | ||||
| from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.db import models | ||||
| from django.templatetags.static import static | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from jwt import encode | ||||
|  | ||||
| from authentik.core.models import BackchannelProvider, ExpiringModel, Token | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.lib.models import CreatedUpdatedModel | ||||
| from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator | ||||
| from authentik.providers.oauth2.models import JWTAlgorithms, OAuth2Provider | ||||
|  | ||||
|  | ||||
| class EventTypes(models.TextChoices): | ||||
|     """SSF Event types supported by authentik""" | ||||
|  | ||||
|     CAEP_SESSION_REVOKED = "https://schemas.openid.net/secevent/caep/event-type/session-revoked" | ||||
|     CAEP_CREDENTIAL_CHANGE = "https://schemas.openid.net/secevent/caep/event-type/credential-change" | ||||
|     SET_VERIFICATION = "https://schemas.openid.net/secevent/ssf/event-type/verification" | ||||
|  | ||||
|  | ||||
| class DeliveryMethods(models.TextChoices): | ||||
|     """SSF Delivery methods""" | ||||
|  | ||||
|     RISC_PUSH = "https://schemas.openid.net/secevent/risc/delivery-method/push" | ||||
|     RISC_POLL = "https://schemas.openid.net/secevent/risc/delivery-method/poll" | ||||
|  | ||||
|  | ||||
| class SSFEventStatus(models.TextChoices): | ||||
|     """SSF Event status""" | ||||
|  | ||||
|     PENDING_NEW = "pending_new" | ||||
|     PENDING_FAILED = "pending_failed" | ||||
|     SENT = "sent" | ||||
|  | ||||
|  | ||||
| class SSFProvider(BackchannelProvider): | ||||
|     """Shared Signals Framework provider to allow applications to | ||||
|     receive user events from authentik.""" | ||||
|  | ||||
|     signing_key = models.ForeignKey( | ||||
|         CertificateKeyPair, | ||||
|         verbose_name=_("Signing Key"), | ||||
|         on_delete=models.CASCADE, | ||||
|         help_text=_("Key used to sign the SSF Events."), | ||||
|     ) | ||||
|  | ||||
|     oidc_auth_providers = models.ManyToManyField(OAuth2Provider, blank=True, default=None) | ||||
|  | ||||
|     token = models.ForeignKey(Token, on_delete=models.CASCADE, null=True, default=None) | ||||
|  | ||||
|     event_retention = models.TextField( | ||||
|         default="days=30", | ||||
|         validators=[timedelta_string_validator], | ||||
|     ) | ||||
|  | ||||
|     @cached_property | ||||
|     def jwt_key(self) -> tuple[PrivateKeyTypes, str]: | ||||
|         """Get either the configured certificate or the client secret""" | ||||
|         key: CertificateKeyPair = self.signing_key | ||||
|         private_key = key.private_key | ||||
|         if isinstance(private_key, RSAPrivateKey): | ||||
|             return private_key, JWTAlgorithms.RS256 | ||||
|         if isinstance(private_key, EllipticCurvePrivateKey): | ||||
|             return private_key, JWTAlgorithms.ES256 | ||||
|         raise ValueError(f"Invalid private key type: {type(private_key)}") | ||||
|  | ||||
|     @property | ||||
|     def service_account_identifier(self) -> str: | ||||
|         return f"ak-providers-ssf-{self.pk}" | ||||
|  | ||||
|     @property | ||||
|     def serializer(self): | ||||
|         from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer | ||||
|  | ||||
|         return SSFProviderSerializer | ||||
|  | ||||
|     @property | ||||
|     def icon_url(self) -> str | None: | ||||
|         return static("authentik/sources/ssf.svg") | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         return "ak-provider-ssf-form" | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Shared Signals Framework Provider") | ||||
|         verbose_name_plural = _("Shared Signals Framework Providers") | ||||
|         permissions = [ | ||||
|             # This overrides the default "add_stream" permission of the Stream object, | ||||
|             # as the user requesting to add a stream must have the permission on the provider | ||||
|             ("add_stream", _("Add stream to SSF provider")), | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class Stream(models.Model): | ||||
|     """SSF Stream""" | ||||
|  | ||||
|     uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False) | ||||
|     provider = models.ForeignKey(SSFProvider, on_delete=models.CASCADE) | ||||
|  | ||||
|     delivery_method = models.TextField(choices=DeliveryMethods.choices) | ||||
|     endpoint_url = models.TextField(null=True) | ||||
|  | ||||
|     events_requested = ArrayField(models.TextField(choices=EventTypes.choices), default=list) | ||||
|     format = models.TextField() | ||||
|     aud = ArrayField(models.TextField(), default=list) | ||||
|  | ||||
|     iss = models.TextField() | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("SSF Stream") | ||||
|         verbose_name_plural = _("SSF Streams") | ||||
|         default_permissions = ["change", "delete", "view"] | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return "SSF Stream" | ||||
|  | ||||
|     def prepare_event_payload(self, type: EventTypes, event_data: dict, **kwargs) -> dict: | ||||
|         jti = uuid4() | ||||
|         _now = now() | ||||
|         return { | ||||
|             "uuid": jti, | ||||
|             "stream_id": str(self.pk), | ||||
|             "type": type, | ||||
|             "expiring": True, | ||||
|             "status": SSFEventStatus.PENDING_NEW, | ||||
|             "expires": _now + timedelta_from_string(self.provider.event_retention), | ||||
|             "payload": { | ||||
|                 "jti": jti.hex, | ||||
|                 "aud": self.aud, | ||||
|                 "iat": int(datetime.now().timestamp()), | ||||
|                 "iss": self.iss, | ||||
|                 "events": {type: event_data}, | ||||
|                 **kwargs, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|     def encode(self, data: dict) -> str: | ||||
|         headers = {} | ||||
|         if self.provider.signing_key: | ||||
|             headers["kid"] = self.provider.signing_key.kid | ||||
|         key, alg = self.provider.jwt_key | ||||
|         return encode(data, key, algorithm=alg, headers=headers) | ||||
|  | ||||
|  | ||||
| class StreamEvent(CreatedUpdatedModel, ExpiringModel): | ||||
|     """Single stream event to be sent""" | ||||
|  | ||||
|     uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False) | ||||
|  | ||||
|     stream = models.ForeignKey(Stream, on_delete=models.CASCADE) | ||||
|     status = models.TextField(choices=SSFEventStatus.choices) | ||||
|  | ||||
|     type = models.TextField(choices=EventTypes.choices) | ||||
|     payload = models.JSONField(default=dict) | ||||
|  | ||||
|     def expire_action(self, *args, **kwargs): | ||||
|         """Only allow automatic cleanup of successfully sent event""" | ||||
|         if self.status != SSFEventStatus.SENT: | ||||
|             return | ||||
|         return super().expire_action(*args, **kwargs) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return f"Stream event {self.type}" | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("SSF Stream Event") | ||||
|         verbose_name_plural = _("SSF Stream Events") | ||||
|         ordering = ("-created",) | ||||
| @ -1,193 +0,0 @@ | ||||
| from hashlib import sha256 | ||||
|  | ||||
| from django.contrib.auth.signals import user_logged_out | ||||
| from django.db.models import Model | ||||
| from django.db.models.signals import post_delete, post_save, pre_delete | ||||
| from django.dispatch import receiver | ||||
| from django.http.request import HttpRequest | ||||
| from guardian.shortcuts import assign_perm | ||||
|  | ||||
| from authentik.core.models import ( | ||||
|     USER_PATH_SYSTEM_PREFIX, | ||||
|     AuthenticatedSession, | ||||
|     Token, | ||||
|     TokenIntents, | ||||
|     User, | ||||
|     UserTypes, | ||||
| ) | ||||
| from authentik.core.signals import password_changed | ||||
| from authentik.enterprise.providers.ssf.models import ( | ||||
|     EventTypes, | ||||
|     SSFProvider, | ||||
| ) | ||||
| from authentik.enterprise.providers.ssf.tasks import send_ssf_event | ||||
| from authentik.events.middleware import audit_ignore | ||||
| from authentik.stages.authenticator.models import Device | ||||
| from authentik.stages.authenticator_duo.models import DuoDevice | ||||
| from authentik.stages.authenticator_static.models import StaticDevice | ||||
| from authentik.stages.authenticator_totp.models import TOTPDevice | ||||
| from authentik.stages.authenticator_webauthn.models import ( | ||||
|     UNKNOWN_DEVICE_TYPE_AAGUID, | ||||
|     WebAuthnDevice, | ||||
| ) | ||||
|  | ||||
| USER_PATH_PROVIDERS_SSF = USER_PATH_SYSTEM_PREFIX + "/providers/ssf" | ||||
|  | ||||
|  | ||||
| @receiver(post_save, sender=SSFProvider) | ||||
| def ssf_providers_post_save(sender: type[Model], instance: SSFProvider, created: bool, **_): | ||||
|     """Create service account before provider is saved""" | ||||
|     identifier = instance.service_account_identifier | ||||
|     user, _ = User.objects.update_or_create( | ||||
|         username=identifier, | ||||
|         defaults={ | ||||
|             "name": f"SSF Provider {instance.name} Service-Account", | ||||
|             "type": UserTypes.INTERNAL_SERVICE_ACCOUNT, | ||||
|             "path": USER_PATH_PROVIDERS_SSF, | ||||
|         }, | ||||
|     ) | ||||
|     assign_perm("add_stream", user, instance) | ||||
|     token, token_created = Token.objects.update_or_create( | ||||
|         identifier=identifier, | ||||
|         defaults={ | ||||
|             "user": user, | ||||
|             "intent": TokenIntents.INTENT_API, | ||||
|             "expiring": False, | ||||
|             "managed": f"goauthentik.io/providers/ssf/{instance.pk}", | ||||
|         }, | ||||
|     ) | ||||
|     if created or token_created: | ||||
|         with audit_ignore(): | ||||
|             instance.token = token | ||||
|             instance.save() | ||||
|  | ||||
|  | ||||
| @receiver(user_logged_out) | ||||
| def ssf_user_logged_out_session_revoked(sender, request: HttpRequest, user: User, **_): | ||||
|     """Session revoked trigger (user logged out)""" | ||||
|     if not request.session or not request.session.session_key or not user: | ||||
|         return | ||||
|     send_ssf_event( | ||||
|         EventTypes.CAEP_SESSION_REVOKED, | ||||
|         { | ||||
|             "initiating_entity": "user", | ||||
|         }, | ||||
|         sub_id={ | ||||
|             "format": "complex", | ||||
|             "session": { | ||||
|                 "format": "opaque", | ||||
|                 "id": sha256(request.session.session_key.encode("ascii")).hexdigest(), | ||||
|             }, | ||||
|             "user": { | ||||
|                 "format": "email", | ||||
|                 "email": user.email, | ||||
|             }, | ||||
|         }, | ||||
|         request=request, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @receiver(pre_delete, sender=AuthenticatedSession) | ||||
| def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSession, **_): | ||||
|     """Session revoked trigger (users' session has been deleted) | ||||
|  | ||||
|     As this signal is also triggered with a regular logout, we can't be sure | ||||
|     if the session has been deleted by an admin or by the user themselves.""" | ||||
|     send_ssf_event( | ||||
|         EventTypes.CAEP_SESSION_REVOKED, | ||||
|         { | ||||
|             "initiating_entity": "user", | ||||
|         }, | ||||
|         sub_id={ | ||||
|             "format": "complex", | ||||
|             "session": { | ||||
|                 "format": "opaque", | ||||
|                 "id": sha256(instance.session_key.encode("ascii")).hexdigest(), | ||||
|             }, | ||||
|             "user": { | ||||
|                 "format": "email", | ||||
|                 "email": instance.user.email, | ||||
|             }, | ||||
|         }, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @receiver(password_changed) | ||||
| def ssf_password_changed_cred_change(sender, user: User, password: str | None, **_): | ||||
|     """Credential change trigger (password changed)""" | ||||
|     send_ssf_event( | ||||
|         EventTypes.CAEP_CREDENTIAL_CHANGE, | ||||
|         { | ||||
|             "credential_type": "password", | ||||
|             "change_type": "revoke" if password is None else "update", | ||||
|         }, | ||||
|         sub_id={ | ||||
|             "format": "complex", | ||||
|             "user": { | ||||
|                 "format": "email", | ||||
|                 "email": user.email, | ||||
|             }, | ||||
|         }, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| device_type_map = { | ||||
|     StaticDevice: "pin", | ||||
|     TOTPDevice: "pin", | ||||
|     WebAuthnDevice: "fido-u2f", | ||||
|     DuoDevice: "app", | ||||
| } | ||||
|  | ||||
|  | ||||
| @receiver(post_save) | ||||
| def ssf_device_post_save(sender: type[Model], instance: Device, created: bool, **_): | ||||
|     if not isinstance(instance, Device): | ||||
|         return | ||||
|     if not instance.confirmed: | ||||
|         return | ||||
|     device_type = device_type_map.get(instance.__class__) | ||||
|     data = { | ||||
|         "credential_type": device_type, | ||||
|         "change_type": "create" if created else "update", | ||||
|         "friendly_name": instance.name, | ||||
|     } | ||||
|     if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID: | ||||
|         data["fido2_aaguid"] = instance.aaguid | ||||
|     send_ssf_event( | ||||
|         EventTypes.CAEP_CREDENTIAL_CHANGE, | ||||
|         data, | ||||
|         sub_id={ | ||||
|             "format": "complex", | ||||
|             "user": { | ||||
|                 "format": "email", | ||||
|                 "email": instance.user.email, | ||||
|             }, | ||||
|         }, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @receiver(post_delete) | ||||
| def ssf_device_post_delete(sender: type[Model], instance: Device, **_): | ||||
|     if not isinstance(instance, Device): | ||||
|         return | ||||
|     if not instance.confirmed: | ||||
|         return | ||||
|     device_type = device_type_map.get(instance.__class__) | ||||
|     data = { | ||||
|         "credential_type": device_type, | ||||
|         "change_type": "delete", | ||||
|         "friendly_name": instance.name, | ||||
|     } | ||||
|     if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID: | ||||
|         data["fido2_aaguid"] = instance.aaguid | ||||
|     send_ssf_event( | ||||
|         EventTypes.CAEP_CREDENTIAL_CHANGE, | ||||
|         data, | ||||
|         sub_id={ | ||||
|             "format": "complex", | ||||
|             "user": { | ||||
|                 "format": "email", | ||||
|                 "email": instance.user.email, | ||||
|             }, | ||||
|         }, | ||||
|     ) | ||||
| @ -1,136 +0,0 @@ | ||||
| from celery import group | ||||
| from django.http import HttpRequest | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from requests.exceptions import RequestException | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.enterprise.providers.ssf.models import ( | ||||
|     DeliveryMethods, | ||||
|     EventTypes, | ||||
|     SSFEventStatus, | ||||
|     Stream, | ||||
|     StreamEvent, | ||||
| ) | ||||
| from authentik.events.logs import LogEvent | ||||
| from authentik.events.models import TaskStatus | ||||
| from authentik.events.system_tasks import SystemTask | ||||
| from authentik.lib.utils.http import get_http_session | ||||
| from authentik.lib.utils.time import timedelta_from_string | ||||
| from authentik.policies.engine import PolicyEngine | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| session = get_http_session() | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| def send_ssf_event( | ||||
|     event_type: EventTypes, | ||||
|     data: dict, | ||||
|     stream_filter: dict | None = None, | ||||
|     request: HttpRequest | None = None, | ||||
|     **extra_data, | ||||
| ): | ||||
|     """Wrapper to send an SSF event to multiple streams""" | ||||
|     payload = [] | ||||
|     if not stream_filter: | ||||
|         stream_filter = {} | ||||
|     stream_filter["events_requested__contains"] = [event_type] | ||||
|     if request and hasattr(request, "request_id"): | ||||
|         extra_data.setdefault("txn", request.request_id) | ||||
|     for stream in Stream.objects.filter(**stream_filter): | ||||
|         event_data = stream.prepare_event_payload(event_type, data, **extra_data) | ||||
|         payload.append((str(stream.uuid), event_data)) | ||||
|     return _send_ssf_event.delay(payload) | ||||
|  | ||||
|  | ||||
| def _check_app_access(stream_uuid: str, event_data: dict) -> bool: | ||||
|     """Check if event is related to user and if so, check | ||||
|     if the user has access to the application""" | ||||
|     stream = Stream.objects.filter(pk=stream_uuid).first() | ||||
|     if not stream: | ||||
|         return False | ||||
|     # `event_data` is a dict version of a StreamEvent | ||||
|     sub_id = event_data.get("payload", {}).get("sub_id", {}) | ||||
|     email = sub_id.get("user", {}).get("email", None) | ||||
|     if not email: | ||||
|         return True | ||||
|     user = User.objects.filter(email=email).first() | ||||
|     if not user: | ||||
|         return True | ||||
|     engine = PolicyEngine(stream.provider.backchannel_application, user) | ||||
|     engine.use_cache = False | ||||
|     engine.build() | ||||
|     return engine.passing | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task() | ||||
| def _send_ssf_event(event_data: list[tuple[str, dict]]): | ||||
|     tasks = [] | ||||
|     for stream, data in event_data: | ||||
|         if not _check_app_access(stream, data): | ||||
|             continue | ||||
|         event = StreamEvent.objects.create(**data) | ||||
|         tasks.extend(send_single_ssf_event(stream, str(event.uuid))) | ||||
|     main_task = group(*tasks) | ||||
|     main_task() | ||||
|  | ||||
|  | ||||
| def send_single_ssf_event(stream_id: str, evt_id: str): | ||||
|     stream = Stream.objects.filter(pk=stream_id).first() | ||||
|     if not stream: | ||||
|         return | ||||
|     event = StreamEvent.objects.filter(pk=evt_id).first() | ||||
|     if not event: | ||||
|         return | ||||
|     if event.status == SSFEventStatus.SENT: | ||||
|         return | ||||
|     if stream.delivery_method == DeliveryMethods.RISC_PUSH: | ||||
|         return [ssf_push_event.si(str(event.pk))] | ||||
|     return [] | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task(bind=True, base=SystemTask) | ||||
| def ssf_push_event(self: SystemTask, event_id: str): | ||||
|     self.save_on_success = False | ||||
|     event = StreamEvent.objects.filter(pk=event_id).first() | ||||
|     if not event: | ||||
|         return | ||||
|     self.set_uid(event_id) | ||||
|     if event.status == SSFEventStatus.SENT: | ||||
|         self.set_status(TaskStatus.SUCCESSFUL) | ||||
|         return | ||||
|     try: | ||||
|         response = session.post( | ||||
|             event.stream.endpoint_url, | ||||
|             data=event.stream.encode(event.payload), | ||||
|             headers={"Content-Type": "application/secevent+jwt", "Accept": "application/json"}, | ||||
|         ) | ||||
|         response.raise_for_status() | ||||
|         event.status = SSFEventStatus.SENT | ||||
|         event.save() | ||||
|         self.set_status(TaskStatus.SUCCESSFUL) | ||||
|         return | ||||
|     except RequestException as exc: | ||||
|         LOGGER.warning("Failed to send SSF event", exc=exc) | ||||
|         self.set_status(TaskStatus.ERROR) | ||||
|         attrs = {} | ||||
|         if exc.response: | ||||
|             attrs["response"] = { | ||||
|                 "content": exc.response.text, | ||||
|                 "status": exc.response.status_code, | ||||
|             } | ||||
|         self.set_error( | ||||
|             exc, | ||||
|             LogEvent( | ||||
|                 _("Failed to send request"), | ||||
|                 log_level="warning", | ||||
|                 logger=self.__name__, | ||||
|                 attributes=attrs, | ||||
|             ), | ||||
|         ) | ||||
|         # Re-up the expiry of the stream event | ||||
|         event.expires = now() + timedelta_from_string(event.stream.provider.event_retention) | ||||
|         event.status = SSFEventStatus.PENDING_FAILED | ||||
|         event.save() | ||||
| @ -1,46 +0,0 @@ | ||||
| import json | ||||
|  | ||||
| from django.urls import reverse | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_cert | ||||
| from authentik.enterprise.providers.ssf.models import ( | ||||
|     SSFProvider, | ||||
| ) | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| class TestConfiguration(APITestCase): | ||||
|     def setUp(self): | ||||
|         self.application = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         self.provider = SSFProvider.objects.create( | ||||
|             name=generate_id(), | ||||
|             signing_key=create_test_cert(), | ||||
|             backchannel_application=self.application, | ||||
|         ) | ||||
|  | ||||
|     def test_config_fetch(self): | ||||
|         """test SSF configuration (unauthenticated)""" | ||||
|         res = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:configuration", | ||||
|                 kwargs={"application_slug": self.application.slug}, | ||||
|             ), | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         content = json.loads(res.content) | ||||
|         self.assertEqual(content["spec_version"], "1_0-ID2") | ||||
|  | ||||
|     def test_config_fetch_authenticated(self): | ||||
|         """test SSF configuration (authenticated)""" | ||||
|         res = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:configuration", | ||||
|                 kwargs={"application_slug": self.application.slug}, | ||||
|             ), | ||||
|             HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}", | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 200) | ||||
|         content = json.loads(res.content) | ||||
|         self.assertEqual(content["spec_version"], "1_0-ID2") | ||||
| @ -1,51 +0,0 @@ | ||||
| """JWKS tests""" | ||||
|  | ||||
| import base64 | ||||
| import json | ||||
|  | ||||
| from cryptography.hazmat.backends import default_backend | ||||
| from cryptography.x509 import load_der_x509_certificate | ||||
| from django.test import TestCase | ||||
| from django.urls.base import reverse | ||||
| from jwt import PyJWKSet | ||||
|  | ||||
| from authentik.core.models import Application | ||||
| from authentik.core.tests.utils import create_test_cert | ||||
| from authentik.enterprise.providers.ssf.models import SSFProvider | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| class TestJWKS(TestCase): | ||||
|     """Test JWKS view""" | ||||
|  | ||||
|     def test_rs256(self): | ||||
|         """Test JWKS request with RS256""" | ||||
|         provider = SSFProvider.objects.create( | ||||
|             name=generate_id(), | ||||
|             signing_key=create_test_cert(), | ||||
|         ) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         app.backchannel_providers.add(provider) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug}) | ||||
|         ) | ||||
|         body = json.loads(response.content.decode()) | ||||
|         self.assertEqual(len(body["keys"]), 1) | ||||
|         PyJWKSet.from_dict(body) | ||||
|         key = body["keys"][0] | ||||
|         load_der_x509_certificate(base64.b64decode(key["x5c"][0]), default_backend()).public_key() | ||||
|  | ||||
|     def test_es256(self): | ||||
|         """Test JWKS request with ES256""" | ||||
|         provider = SSFProvider.objects.create( | ||||
|             name=generate_id(), | ||||
|             signing_key=create_test_cert(), | ||||
|         ) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         app.backchannel_providers.add(provider) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug}) | ||||
|         ) | ||||
|         body = json.loads(response.content.decode()) | ||||
|         self.assertEqual(len(body["keys"]), 1) | ||||
|         PyJWKSet.from_dict(body) | ||||
| @ -1,168 +0,0 @@ | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from django.urls import reverse | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import Application, Group | ||||
| from authentik.core.tests.utils import ( | ||||
|     create_test_cert, | ||||
|     create_test_user, | ||||
| ) | ||||
| from authentik.enterprise.providers.ssf.models import ( | ||||
|     EventTypes, | ||||
|     SSFEventStatus, | ||||
|     SSFProvider, | ||||
|     Stream, | ||||
|     StreamEvent, | ||||
| ) | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.policies.models import PolicyBinding | ||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDevice | ||||
|  | ||||
|  | ||||
| class TestSignals(APITestCase): | ||||
|     """Test individual SSF Signals""" | ||||
|  | ||||
|     def setUp(self): | ||||
|         self.application = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         self.provider = SSFProvider.objects.create( | ||||
|             name=generate_id(), | ||||
|             signing_key=create_test_cert(), | ||||
|             backchannel_application=self.application, | ||||
|         ) | ||||
|         res = self.client.post( | ||||
|             reverse( | ||||
|                 "authentik_providers_ssf:stream", | ||||
|                 kwargs={"application_slug": self.application.slug}, | ||||
|             ), | ||||
|             data={ | ||||
|                 "iss": "https://authentik.company/.well-known/ssf-configuration/foo/5", | ||||
|                 "aud": ["https://app.authentik.company"], | ||||
|                 "delivery": { | ||||
|                     "method": "https://schemas.openid.net/secevent/risc/delivery-method/push", | ||||
|                     "endpoint_url": "https://app.authentik.company", | ||||
|                 }, | ||||
|                 "events_requested": [ | ||||
|                     "https://schemas.openid.net/secevent/caep/event-type/credential-change", | ||||
|                     "https://schemas.openid.net/secevent/caep/event-type/session-revoked", | ||||
|                 ], | ||||
|                 "format": "iss_sub", | ||||
|             }, | ||||
|             HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}", | ||||
|         ) | ||||
|         self.assertEqual(res.status_code, 201, res.content) | ||||
|  | ||||
|     def test_signal_logout(self): | ||||
|         """Test user logout""" | ||||
|         user = create_test_user() | ||||
|         self.client.force_login(user) | ||||
|         self.client.logout() | ||||
|  | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter(stream=stream).first() | ||||
|         self.assertIsNotNone(event) | ||||
|         self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED) | ||||
|         event_payload = event.payload["events"][ | ||||
|             "https://schemas.openid.net/secevent/caep/event-type/session-revoked" | ||||
|         ] | ||||
|         self.assertEqual(event_payload["initiating_entity"], "user") | ||||
|         self.assertEqual(event.payload["sub_id"]["format"], "complex") | ||||
|         self.assertEqual(event.payload["sub_id"]["session"]["format"], "opaque") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["format"], "email") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email) | ||||
|  | ||||
|     def test_signal_password_change(self): | ||||
|         """Test user password change""" | ||||
|         user = create_test_user() | ||||
|         self.client.force_login(user) | ||||
|         user.set_password(generate_id()) | ||||
|         user.save() | ||||
|  | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter(stream=stream).first() | ||||
|         self.assertIsNotNone(event) | ||||
|         self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED) | ||||
|         event_payload = event.payload["events"][ | ||||
|             "https://schemas.openid.net/secevent/caep/event-type/credential-change" | ||||
|         ] | ||||
|         self.assertEqual(event_payload["change_type"], "update") | ||||
|         self.assertEqual(event_payload["credential_type"], "password") | ||||
|         self.assertEqual(event.payload["sub_id"]["format"], "complex") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["format"], "email") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email) | ||||
|  | ||||
|     def test_signal_authenticator_added(self): | ||||
|         """Test authenticator creation signal""" | ||||
|         user = create_test_user() | ||||
|         self.client.force_login(user) | ||||
|         dev = WebAuthnDevice.objects.create( | ||||
|             user=user, | ||||
|             name=generate_id(), | ||||
|             credential_id=generate_id(), | ||||
|             public_key=generate_id(), | ||||
|             aaguid=str(uuid4()), | ||||
|         ) | ||||
|  | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter(stream=stream).exclude().first() | ||||
|         self.assertIsNotNone(event) | ||||
|         self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED) | ||||
|         event_payload = event.payload["events"][ | ||||
|             "https://schemas.openid.net/secevent/caep/event-type/credential-change" | ||||
|         ] | ||||
|         self.assertEqual(event_payload["change_type"], "create") | ||||
|         self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid) | ||||
|         self.assertEqual(event_payload["friendly_name"], dev.name) | ||||
|         self.assertEqual(event_payload["credential_type"], "fido-u2f") | ||||
|         self.assertEqual(event.payload["sub_id"]["format"], "complex") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["format"], "email") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email) | ||||
|  | ||||
|     def test_signal_authenticator_deleted(self): | ||||
|         """Test authenticator deletion signal""" | ||||
|         user = create_test_user() | ||||
|         self.client.force_login(user) | ||||
|         dev = WebAuthnDevice.objects.create( | ||||
|             user=user, | ||||
|             name=generate_id(), | ||||
|             credential_id=generate_id(), | ||||
|             public_key=generate_id(), | ||||
|             aaguid=str(uuid4()), | ||||
|         ) | ||||
|         dev.delete() | ||||
|  | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter(stream=stream).exclude().first() | ||||
|         self.assertIsNotNone(event) | ||||
|         self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED) | ||||
|         event_payload = event.payload["events"][ | ||||
|             "https://schemas.openid.net/secevent/caep/event-type/credential-change" | ||||
|         ] | ||||
|         self.assertEqual(event_payload["change_type"], "delete") | ||||
|         self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid) | ||||
|         self.assertEqual(event_payload["friendly_name"], dev.name) | ||||
|         self.assertEqual(event_payload["credential_type"], "fido-u2f") | ||||
|         self.assertEqual(event.payload["sub_id"]["format"], "complex") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["format"], "email") | ||||
|         self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email) | ||||
|  | ||||
|     def test_signal_policy_ignore(self): | ||||
|         """Test event not being created for user that doesn't have access to the application""" | ||||
|         PolicyBinding.objects.create( | ||||
|             target=self.application, group=Group.objects.create(name=generate_id()), order=0 | ||||
|         ) | ||||
|         user = create_test_user() | ||||
|         self.client.force_login(user) | ||||
|         user.set_password(generate_id()) | ||||
|         user.save() | ||||
|  | ||||
|         stream = Stream.objects.filter(provider=self.provider).first() | ||||
|         self.assertIsNotNone(stream) | ||||
|         event = StreamEvent.objects.filter( | ||||
|             stream=stream, type=EventTypes.CAEP_CREDENTIAL_CHANGE | ||||
|         ).first() | ||||
|         self.assertIsNone(event) | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	