Compare commits
	
		
			24 Commits
		
	
	
		
			web/add-ht
			...
			version/20
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 8256f1897d | |||
| 16d321835d | |||
| f34612efe6 | |||
| e82f147130 | |||
| 0ea6ad8eea | |||
| f731443220 | |||
| b70a66cde5 | |||
| b733dbbcb0 | |||
| e34d4c0669 | |||
| 310983a4d0 | |||
| 47b0fc86f7 | |||
| b6e961b1f3 | |||
| 874d7ff320 | |||
| e4a5bc9df6 | |||
| 318e0cf9f8 | |||
| bd0815d894 | |||
| af35ecfe66 | |||
| 0c05cd64bb | |||
| cb80b76490 | |||
| 061d4bc758 | |||
| 8ff27f69e1 | |||
| 045cd98276 | |||
| b520843984 | |||
| 92216e4ea8 | 
| @ -1,5 +1,5 @@ | ||||
| [bumpversion] | ||||
| current_version = 2024.6.1 | ||||
| current_version = 2024.2.1 | ||||
| tag = True | ||||
| commit = True | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||
| @ -17,14 +17,10 @@ optional_value = final | ||||
|  | ||||
| [bumpversion:file:pyproject.toml] | ||||
|  | ||||
| [bumpversion:file:package.json] | ||||
|  | ||||
| [bumpversion:file:docker-compose.yml] | ||||
|  | ||||
| [bumpversion:file:schema.yml] | ||||
|  | ||||
| [bumpversion:file:blueprints/schema.json] | ||||
|  | ||||
| [bumpversion:file:authentik/__init__.py] | ||||
|  | ||||
| [bumpversion:file:internal/constants/constants.go] | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							| @ -1 +1 @@ | ||||
| custom: https://goauthentik.io/pricing/ | ||||
| github: [BeryJu] | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/ISSUE_TEMPLATE/question.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/ISSUE_TEMPLATE/question.md
									
									
									
									
										vendored
									
									
								
							| @ -9,7 +9,7 @@ assignees: "" | ||||
| **Describe your question/** | ||||
| A clear and concise description of what you're trying to do. | ||||
|  | ||||
| **Relevant info** | ||||
| **Relevant infos** | ||||
| i.e. Version of other software you're using, specifics of your setup | ||||
|  | ||||
| **Screenshots** | ||||
|  | ||||
| @ -54,10 +54,9 @@ runs: | ||||
|             authentik: | ||||
|                 outposts: | ||||
|                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||
|             global: | ||||
|                 image: | ||||
|                     repository: ghcr.io/goauthentik/dev-server | ||||
|                     tag: ${{ inputs.tag }} | ||||
|             image: | ||||
|                 repository: ghcr.io/goauthentik/dev-server | ||||
|                 tag: ${{ inputs.tag }} | ||||
|             ``` | ||||
|  | ||||
|             For arm64, use these values: | ||||
| @ -66,10 +65,9 @@ runs: | ||||
|             authentik: | ||||
|                 outposts: | ||||
|                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||
|             global: | ||||
|                 image: | ||||
|                     repository: ghcr.io/goauthentik/dev-server | ||||
|                     tag: ${{ inputs.tag }}-arm64 | ||||
|             image: | ||||
|                 repository: ghcr.io/goauthentik/dev-server | ||||
|                 tag: ${{ inputs.tag }}-arm64 | ||||
|             ``` | ||||
|  | ||||
|             Afterwards, run the upgrade commands from the latest release notes. | ||||
|  | ||||
| @ -11,10 +11,6 @@ inputs: | ||||
|     description: "Docker image arch" | ||||
|  | ||||
| outputs: | ||||
|   shouldBuild: | ||||
|     description: "Whether to build image or not" | ||||
|     value: ${{ steps.ev.outputs.shouldBuild }} | ||||
|  | ||||
|   sha: | ||||
|     description: "sha" | ||||
|     value: ${{ steps.ev.outputs.sha }} | ||||
|  | ||||
| @ -7,12 +7,10 @@ from time import time | ||||
| parser = configparser.ConfigParser() | ||||
| parser.read(".bumpversion.cfg") | ||||
|  | ||||
| should_build = str(os.environ.get("DOCKER_USERNAME", None) is not None).lower() | ||||
|  | ||||
| branch_name = os.environ["GITHUB_REF"] | ||||
| if os.environ.get("GITHUB_HEAD_REF", "") != "": | ||||
|     branch_name = os.environ["GITHUB_HEAD_REF"] | ||||
| safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-").replace("'", "-") | ||||
| safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-") | ||||
|  | ||||
| image_names = os.getenv("IMAGE_NAME").split(",") | ||||
| image_arch = os.getenv("IMAGE_ARCH") or None | ||||
| @ -54,9 +52,8 @@ image_main_tag = image_tags[0] | ||||
| image_tags_rendered = ",".join(image_tags) | ||||
|  | ||||
| with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output: | ||||
|     print(f"shouldBuild={should_build}", file=_output) | ||||
|     print(f"sha={sha}", file=_output) | ||||
|     print(f"version={version}", file=_output) | ||||
|     print(f"prerelease={prerelease}", file=_output) | ||||
|     print(f"imageTags={image_tags_rendered}", file=_output) | ||||
|     print(f"imageMainTag={image_main_tag}", file=_output) | ||||
|     print("sha=%s" % sha, file=_output) | ||||
|     print("version=%s" % version, file=_output) | ||||
|     print("prerelease=%s" % prerelease, file=_output) | ||||
|     print("imageTags=%s" % image_tags_rendered, file=_output) | ||||
|     print("imageMainTag=%s" % image_main_tag, file=_output) | ||||
|  | ||||
							
								
								
									
										8
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -16,25 +16,25 @@ runs: | ||||
|         sudo apt-get update | ||||
|         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext | ||||
|     - name: Setup python and restore poetry | ||||
|       uses: actions/setup-python@v5 | ||||
|       uses: actions/setup-python@v4 | ||||
|       with: | ||||
|         python-version-file: "pyproject.toml" | ||||
|         cache: "poetry" | ||||
|     - name: Setup node | ||||
|       uses: actions/setup-node@v4 | ||||
|       uses: actions/setup-node@v3 | ||||
|       with: | ||||
|         node-version-file: web/package.json | ||||
|         cache: "npm" | ||||
|         cache-dependency-path: web/package-lock.json | ||||
|     - name: Setup go | ||||
|       uses: actions/setup-go@v5 | ||||
|       uses: actions/setup-go@v4 | ||||
|       with: | ||||
|         go-version-file: "go.mod" | ||||
|     - name: Setup dependencies | ||||
|       shell: bash | ||||
|       run: | | ||||
|         export PSQL_TAG=${{ inputs.postgresql_version }} | ||||
|         docker compose -f .github/actions/setup/docker-compose.yml up -d | ||||
|         docker-compose -f .github/actions/setup/docker-compose.yml up -d | ||||
|         poetry install | ||||
|         cd web && npm ci | ||||
|     - name: Generate config | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,3 +1,5 @@ | ||||
| version: "3.7" | ||||
|  | ||||
| services: | ||||
|   postgresql: | ||||
|     image: docker.io/library/postgres:${PSQL_TAG:-16} | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							| @ -4,4 +4,3 @@ hass | ||||
| warmup | ||||
| ontext | ||||
| singed | ||||
| assertIn | ||||
|  | ||||
							
								
								
									
										40
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										40
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -21,10 +21,7 @@ updates: | ||||
|     labels: | ||||
|       - dependencies | ||||
|   - package-ecosystem: npm | ||||
|     directories: | ||||
|       - "/web" | ||||
|       - "/tests/wdio" | ||||
|       - "/web/sfe" | ||||
|     directory: "/web" | ||||
|     schedule: | ||||
|       interval: daily | ||||
|       time: "04:00" | ||||
| @ -33,6 +30,7 @@ updates: | ||||
|     open-pull-requests-limit: 10 | ||||
|     commit-message: | ||||
|       prefix: "web:" | ||||
|     # TODO: deduplicate these groups | ||||
|     groups: | ||||
|       sentry: | ||||
|         patterns: | ||||
| @ -54,10 +52,38 @@ updates: | ||||
|       esbuild: | ||||
|         patterns: | ||||
|           - "@esbuild/*" | ||||
|       rollup: | ||||
|   - package-ecosystem: npm | ||||
|     directory: "/tests/wdio" | ||||
|     schedule: | ||||
|       interval: daily | ||||
|       time: "04:00" | ||||
|     labels: | ||||
|       - dependencies | ||||
|     open-pull-requests-limit: 10 | ||||
|     commit-message: | ||||
|       prefix: "web:" | ||||
|     # TODO: deduplicate these groups | ||||
|     groups: | ||||
|       sentry: | ||||
|         patterns: | ||||
|           - "@rollup/*" | ||||
|           - "rollup-*" | ||||
|           - "@sentry/*" | ||||
|           - "@spotlightjs/*" | ||||
|       babel: | ||||
|         patterns: | ||||
|           - "@babel/*" | ||||
|           - "babel-*" | ||||
|       eslint: | ||||
|         patterns: | ||||
|           - "@typescript-eslint/*" | ||||
|           - "eslint" | ||||
|           - "eslint-*" | ||||
|       storybook: | ||||
|         patterns: | ||||
|           - "@storybook/*" | ||||
|           - "*storybook*" | ||||
|       esbuild: | ||||
|         patterns: | ||||
|           - "@esbuild/*" | ||||
|       wdio: | ||||
|         patterns: | ||||
|           - "@wdio/*" | ||||
|  | ||||
							
								
								
									
										65
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										65
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,65 +0,0 @@ | ||||
| name: authentik-api-py-publish | ||||
| on: | ||||
|   push: | ||||
|     branches: [main] | ||||
|     paths: | ||||
|       - "schema.yml" | ||||
|   workflow_dispatch: | ||||
| jobs: | ||||
|   build: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       id-token: write | ||||
|     steps: | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         with: | ||||
|           app_id: ${{ secrets.GH_APP_ID }} | ||||
|           private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|       - name: Install poetry & deps | ||||
|         shell: bash | ||||
|         run: | | ||||
|           pipx install poetry || true | ||||
|           sudo apt-get update | ||||
|           sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext | ||||
|       - name: Setup python and restore poetry | ||||
|         uses: actions/setup-python@v5 | ||||
|         with: | ||||
|           python-version-file: "pyproject.toml" | ||||
|           cache: "poetry" | ||||
|       - name: Generate API Client | ||||
|         run: make gen-client-py | ||||
|       - name: Publish package | ||||
|         working-directory: gen-py-api/ | ||||
|         run: | | ||||
|           poetry build | ||||
|       - name: Publish package to PyPI | ||||
|         uses: pypa/gh-action-pypi-publish@release/v1 | ||||
|         with: | ||||
|           packages-dir: gen-py-api/dist/ | ||||
|       # We can't easily upgrade the API client being used due to poetry being poetry | ||||
|       # so we'll have to rely on dependabot | ||||
|       # - name: Upgrade / | ||||
|       #   run: | | ||||
|       #     export VERSION=$(cd gen-py-api && poetry version -s) | ||||
|       #     poetry add "authentik_client=$VERSION" --allow-prereleases --lock | ||||
|       # - uses: peter-evans/create-pull-request@v6 | ||||
|       #   id: cpr | ||||
|       #   with: | ||||
|       #     token: ${{ steps.generate_token.outputs.token }} | ||||
|       #     branch: update-root-api-client | ||||
|       #     commit-message: "root: bump API Client version" | ||||
|       #     title: "root: bump API Client version" | ||||
|       #     body: "root: bump API Client version" | ||||
|       #     delete-branch: true | ||||
|       #     signoff: true | ||||
|       #     # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|       #     author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|       # - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|       #   with: | ||||
|       #     token: ${{ steps.generate_token.outputs.token }} | ||||
|       #     pull-request-number: ${{ steps.cpr.outputs.pull-request-number }} | ||||
|       #     merge-method: squash | ||||
							
								
								
									
										29
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										29
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,6 +7,8 @@ on: | ||||
|       - main | ||||
|       - next | ||||
|       - version-* | ||||
|     paths-ignore: | ||||
|       - website/** | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - main | ||||
| @ -26,7 +28,10 @@ jobs: | ||||
|           - bandit | ||||
|           - black | ||||
|           - codespell | ||||
|           - isort | ||||
|           - pending-migrations | ||||
|           # - pylint | ||||
|           - pyright | ||||
|           - ruff | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
| @ -50,6 +55,7 @@ jobs: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         psql: | ||||
|           - 12-alpine | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|     steps: | ||||
| @ -103,6 +109,7 @@ jobs: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         psql: | ||||
|           - 12-alpine | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|     steps: | ||||
| @ -128,7 +135,7 @@ jobs: | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: Create k8s Kind Cluster | ||||
|         uses: helm/kind-action@v1.10.0 | ||||
|         uses: helm/kind-action@v1.9.0 | ||||
|       - name: run integration | ||||
|         run: | | ||||
|           poetry run coverage run manage.py test tests/integration | ||||
| @ -158,8 +165,6 @@ jobs: | ||||
|             glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap* | ||||
|           - name: radius | ||||
|             glob: tests/e2e/test_provider_radius* | ||||
|           - name: scim | ||||
|             glob: tests/e2e/test_source_scim* | ||||
|           - name: flows | ||||
|             glob: tests/e2e/test_flows* | ||||
|     steps: | ||||
| @ -168,7 +173,7 @@ jobs: | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: Setup e2e env (chrome, etc) | ||||
|         run: | | ||||
|           docker compose -f tests/e2e/docker-compose.yml up -d | ||||
|           docker-compose -f tests/e2e/docker-compose.yml up -d | ||||
|       - id: cache-web | ||||
|         uses: actions/cache@v4 | ||||
|         with: | ||||
| @ -214,24 +219,22 @@ jobs: | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|     timeout-minutes: 120 | ||||
|     if: "github.repository == 'goauthentik/authentik'" | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.1.0 | ||||
|         uses: docker/setup-qemu-action@v3.0.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/dev-server | ||||
|           image-arch: ${{ matrix.arch }} | ||||
|       - name: Login to Container Registry | ||||
|         if: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
| @ -240,18 +243,18 @@ jobs: | ||||
|       - name: generate ts client | ||||
|         run: make gen-client-ts | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         uses: docker/build-push-action@v5 | ||||
|         with: | ||||
|           context: . | ||||
|           secrets: | | ||||
|             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||
|             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           push: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|           push: true | ||||
|           build-args: | | ||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache | ||||
|           cache-to: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max | ||||
|           cache-from: type=gha | ||||
|           cache-to: type=gha,mode=max | ||||
|           platforms: linux/${{ matrix.arch }} | ||||
|   pr-comment: | ||||
|     needs: | ||||
| @ -269,8 +272,6 @@ jobs: | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/dev-server | ||||
|       - name: Comment on PR | ||||
|  | ||||
							
								
								
									
										16
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										16
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -29,7 +29,7 @@ jobs: | ||||
|       - name: Generate API | ||||
|         run: make gen-client-go | ||||
|       - name: golangci-lint | ||||
|         uses: golangci/golangci-lint-action@v6 | ||||
|         uses: golangci/golangci-lint-action@v4 | ||||
|         with: | ||||
|           version: v1.54.2 | ||||
|           args: --timeout 5000s --verbose | ||||
| @ -71,23 +71,21 @@ jobs: | ||||
|     permissions: | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|     if: "github.repository == 'goauthentik/authentik'" | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.1.0 | ||||
|         uses: docker/setup-qemu-action@v3.0.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/dev-${{ matrix.type }} | ||||
|       - name: Login to Container Registry | ||||
|         if: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
| @ -96,17 +94,17 @@ jobs: | ||||
|       - name: Generate API | ||||
|         run: make gen-client-go | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         uses: docker/build-push-action@v5 | ||||
|         with: | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           file: ${{ matrix.type }}.Dockerfile | ||||
|           push: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|           push: true | ||||
|           build-args: | | ||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache | ||||
|           cache-to: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache,mode=max | ||||
|           cache-from: type=gha | ||||
|           cache-to: type=gha,mode=max | ||||
|   build-binary: | ||||
|     timeout-minutes: 120 | ||||
|     needs: | ||||
|  | ||||
							
								
								
									
										114
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										114
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,36 +12,14 @@ on: | ||||
|       - version-* | ||||
|  | ||||
| jobs: | ||||
|   lint: | ||||
|   lint-eslint: | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         command: | ||||
|           - lint | ||||
|           - lint:lockfile | ||||
|           - tsc | ||||
|           - prettier-check | ||||
|         project: | ||||
|           - web | ||||
|           - tests/wdio | ||||
|         include: | ||||
|           - command: tsc | ||||
|             project: web | ||||
|             extra_setup: | | ||||
|               cd sfe/ && npm ci | ||||
|           - command: lit-analyse | ||||
|             project: web | ||||
|             extra_setup: | | ||||
|               # lit-analyse doesn't understand path rewrites, so make it | ||||
|               # belive it's an actual module | ||||
|               cd node_modules/@goauthentik | ||||
|               ln -s ../../src/ web | ||||
|         exclude: | ||||
|           - command: lint:lockfile | ||||
|             project: tests/wdio | ||||
|           - command: tsc | ||||
|             project: tests/wdio | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
| @ -50,17 +28,77 @@ jobs: | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: ${{ matrix.project }}/package-lock.json | ||||
|       - working-directory: ${{ matrix.project }}/ | ||||
|         run: | | ||||
|           npm ci | ||||
|           ${{ matrix.extra_setup }} | ||||
|         run: npm ci | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: Lint | ||||
|       - name: Eslint | ||||
|         working-directory: ${{ matrix.project }}/ | ||||
|         run: npm run ${{ matrix.command }} | ||||
|         run: npm run lint | ||||
|   lint-build: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: web/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: web/package-lock.json | ||||
|       - working-directory: web/ | ||||
|         run: npm ci | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: TSC | ||||
|         working-directory: web/ | ||||
|         run: npm run tsc | ||||
|   lint-prettier: | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         project: | ||||
|           - web | ||||
|           - tests/wdio | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: ${{ matrix.project }}/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: ${{ matrix.project }}/package-lock.json | ||||
|       - working-directory: ${{ matrix.project }}/ | ||||
|         run: npm ci | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: prettier | ||||
|         working-directory: ${{ matrix.project }}/ | ||||
|         run: npm run prettier-check | ||||
|   lint-lit-analyse: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: web/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: web/package-lock.json | ||||
|       - working-directory: web/ | ||||
|         run: | | ||||
|           npm ci | ||||
|           # lit-analyse doesn't understand path rewrites, so make it | ||||
|           # belive it's an actual module | ||||
|           cd node_modules/@goauthentik | ||||
|           ln -s ../../src/ web | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: lit-analyse | ||||
|         working-directory: web/ | ||||
|         run: npm run lit-analyse | ||||
|   ci-web-mark: | ||||
|     needs: | ||||
|       - lint | ||||
|       - lint-eslint | ||||
|       - lint-prettier | ||||
|       - lint-lit-analyse | ||||
|       - lint-build | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - run: echo mark | ||||
| @ -82,21 +120,3 @@ jobs: | ||||
|       - name: build | ||||
|         working-directory: web/ | ||||
|         run: npm run build | ||||
|   test: | ||||
|     needs: | ||||
|       - ci-web-mark | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: web/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: web/package-lock.json | ||||
|       - working-directory: web/ | ||||
|         run: npm ci | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: test | ||||
|         working-directory: web/ | ||||
|         run: npm run test | ||||
|  | ||||
							
								
								
									
										20
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										20
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,21 +12,20 @@ on: | ||||
|       - version-* | ||||
|  | ||||
| jobs: | ||||
|   lint: | ||||
|   lint-prettier: | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         command: | ||||
|           - lint:lockfile | ||||
|           - prettier-check | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: website/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: website/package-lock.json | ||||
|       - working-directory: website/ | ||||
|         run: npm ci | ||||
|       - name: Lint | ||||
|       - name: prettier | ||||
|         working-directory: website/ | ||||
|         run: npm run ${{ matrix.command }} | ||||
|         run: npm run prettier-check | ||||
|   test: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
| @ -49,6 +48,7 @@ jobs: | ||||
|       matrix: | ||||
|         job: | ||||
|           - build | ||||
|           - build-docs-only | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
| @ -63,7 +63,7 @@ jobs: | ||||
|         run: npm run ${{ matrix.job }} | ||||
|   ci-website-mark: | ||||
|     needs: | ||||
|       - lint | ||||
|       - lint-prettier | ||||
|       - test | ||||
|       - build | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
							
								
								
									
										43
									
								
								.github/workflows/gen-update-webauthn-mds.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										43
									
								
								.github/workflows/gen-update-webauthn-mds.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,43 +0,0 @@ | ||||
| name: authentik-gen-update-webauthn-mds | ||||
| on: | ||||
|   workflow_dispatch: | ||||
|   schedule: | ||||
|     - cron: '30 1 1,15 * *' | ||||
|  | ||||
| env: | ||||
|   POSTGRES_DB: authentik | ||||
|   POSTGRES_USER: authentik | ||||
|   POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77" | ||||
|  | ||||
| jobs: | ||||
|   build: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         with: | ||||
|           app_id: ${{ secrets.GH_APP_ID }} | ||||
|           private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - run: poetry run ak update_webauthn_mds | ||||
|       - uses: peter-evans/create-pull-request@v6 | ||||
|         id: cpr | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|           branch: update-fido-mds-client | ||||
|           commit-message: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs" | ||||
|           title: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs" | ||||
|           body: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs" | ||||
|           delete-branch: true | ||||
|           signoff: true | ||||
|           # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|           pull-request-number: ${{ steps.cpr.outputs.pull-request-number }} | ||||
|           merge-method: squash | ||||
							
								
								
									
										26
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										26
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -14,14 +14,12 @@ jobs: | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.1.0 | ||||
|         uses: docker/setup-qemu-action@v3.0.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/server,beryju/authentik | ||||
|       - name: Docker Login Registry | ||||
| @ -40,7 +38,7 @@ jobs: | ||||
|           mkdir -p ./gen-ts-api | ||||
|           mkdir -p ./gen-go-api | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         uses: docker/build-push-action@v5 | ||||
|         with: | ||||
|           context: . | ||||
|           push: true | ||||
| @ -68,14 +66,12 @@ jobs: | ||||
|         with: | ||||
|           go-version-file: "go.mod" | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.1.0 | ||||
|         uses: docker/setup-qemu-action@v3.0.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }} | ||||
|       - name: make empty clients | ||||
| @ -94,7 +90,7 @@ jobs: | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         uses: docker/build-push-action@v5 | ||||
|         with: | ||||
|           push: true | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
| @ -155,12 +151,12 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Run test suite in final docker images | ||||
|         run: | | ||||
|           echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           docker compose pull -q | ||||
|           docker compose up --no-start | ||||
|           docker compose start postgresql redis | ||||
|           docker compose run -u root server test-all | ||||
|           echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||
|           docker-compose pull -q | ||||
|           docker-compose up --no-start | ||||
|           docker-compose start postgresql redis | ||||
|           docker-compose run -u root server test-all | ||||
|   sentry-release: | ||||
|     needs: | ||||
|       - build-server | ||||
| @ -172,8 +168,6 @@ jobs: | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/server | ||||
|       - name: Get static files from docker image | ||||
|  | ||||
							
								
								
									
										12
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										12
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							| @ -14,16 +14,16 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Pre-release test | ||||
|         run: | | ||||
|           echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||
|           docker buildx install | ||||
|           mkdir -p ./gen-ts-api | ||||
|           docker build -t testing:latest . | ||||
|           echo "AUTHENTIK_IMAGE=testing" >> .env | ||||
|           echo "AUTHENTIK_TAG=latest" >> .env | ||||
|           docker compose up --no-start | ||||
|           docker compose start postgresql redis | ||||
|           docker compose run -u root server test-all | ||||
|           docker-compose up --no-start | ||||
|           docker-compose start postgresql redis | ||||
|           docker-compose run -u root server test-all | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         with: | ||||
| @ -32,8 +32,6 @@ jobs: | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/server | ||||
|       - name: Create Release | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							| @ -23,7 +23,7 @@ jobs: | ||||
|           repo-token: ${{ steps.generate_token.outputs.token }} | ||||
|           days-before-stale: 60 | ||||
|           days-before-close: 7 | ||||
|           exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing | ||||
|           exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question | ||||
|           stale-issue-label: wontfix | ||||
|           stale-issue-message: > | ||||
|             This issue has been automatically marked as stale because it has not had | ||||
|  | ||||
| @ -1,4 +1,4 @@ | ||||
| name: authentik-api-ts-publish | ||||
| name: authentik-web-api-publish | ||||
| on: | ||||
|   push: | ||||
|     branches: [main] | ||||
| @ -31,12 +31,7 @@ jobs: | ||||
|         env: | ||||
|           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} | ||||
|       - name: Upgrade /web | ||||
|         working-directory: web | ||||
|         run: | | ||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` | ||||
|           npm i @goauthentik/api@$VERSION | ||||
|       - name: Upgrade /web/sfe | ||||
|         working-directory: web/sfe | ||||
|         working-directory: web/ | ||||
|         run: | | ||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` | ||||
|           npm i @goauthentik/api@$VERSION | ||||
							
								
								
									
										3
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @ -10,7 +10,8 @@ | ||||
|         "Gruntfuggly.todo-tree", | ||||
|         "mechatroner.rainbow-csv", | ||||
|         "ms-python.black-formatter", | ||||
|         "charliermarsh.ruff", | ||||
|         "ms-python.isort", | ||||
|         "ms-python.pylint", | ||||
|         "ms-python.python", | ||||
|         "ms-python.vscode-pylance", | ||||
|         "ms-python.black-formatter", | ||||
|  | ||||
							
								
								
									
										13
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										13
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -4,21 +4,20 @@ | ||||
|         "asgi", | ||||
|         "authentik", | ||||
|         "authn", | ||||
|         "entra", | ||||
|         "goauthentik", | ||||
|         "jwks", | ||||
|         "kubernetes", | ||||
|         "oidc", | ||||
|         "openid", | ||||
|         "passwordless", | ||||
|         "plex", | ||||
|         "saml", | ||||
|         "scim", | ||||
|         "slo", | ||||
|         "sso", | ||||
|         "totp", | ||||
|         "traefik", | ||||
|         "webauthn", | ||||
|         "traefik", | ||||
|         "passwordless", | ||||
|         "kubernetes", | ||||
|         "sso", | ||||
|         "slo", | ||||
|         "scim", | ||||
|     ], | ||||
|     "todo-tree.tree.showCountsInTree": true, | ||||
|     "todo-tree.tree.showBadges": true, | ||||
|  | ||||
							
								
								
									
										52
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										52
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,7 @@ | ||||
| # syntax=docker/dockerfile:1 | ||||
|  | ||||
| # Stage 1: Build website | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:22 as website-builder | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder | ||||
|  | ||||
| ENV NODE_ENV=production | ||||
|  | ||||
| @ -14,41 +14,30 @@ RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.js | ||||
|  | ||||
| COPY ./website /work/website/ | ||||
| COPY ./blueprints /work/blueprints/ | ||||
| COPY ./schema.yml /work/ | ||||
| COPY ./SECURITY.md /work/ | ||||
|  | ||||
| RUN npm run build-bundled | ||||
| RUN npm run build-docs-only | ||||
|  | ||||
| # Stage 2: Build webui | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:22 as web-builder | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder | ||||
|  | ||||
| ARG GIT_BUILD_HASH | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
| ENV NODE_ENV=production | ||||
|  | ||||
| WORKDIR /work/web | ||||
|  | ||||
| RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | ||||
|     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ | ||||
|     --mount=type=bind,target=/work/web/sfe/package.json,src=./web/sfe/package.json \ | ||||
|     --mount=type=bind,target=/work/web/sfe/package-lock.json,src=./web/sfe/package-lock.json \ | ||||
|     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ | ||||
|     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ | ||||
|     npm ci --include=dev && \ | ||||
|     cd sfe && \ | ||||
|     npm ci --include=dev | ||||
|  | ||||
| COPY ./package.json /work | ||||
| COPY ./web /work/web/ | ||||
| COPY ./website /work/website/ | ||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||
|  | ||||
| RUN npm run build && \ | ||||
|     cd sfe && \ | ||||
|     npm run build | ||||
| RUN npm run build | ||||
|  | ||||
| # Stage 3: Build go proxy | ||||
| FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.22-fips-bookworm AS go-builder | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/golang:1.22.0-bookworm AS go-builder | ||||
|  | ||||
| ARG TARGETOS | ||||
| ARG TARGETARCH | ||||
| @ -59,11 +48,6 @@ ARG GOARCH=$TARGETARCH | ||||
|  | ||||
| WORKDIR /go/src/goauthentik.io | ||||
|  | ||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | ||||
|     dpkg --add-architecture arm64 && \ | ||||
|     apt-get update && \ | ||||
|     apt-get install -y --no-install-recommends crossbuild-essential-arm64 gcc-aarch64-linux-gnu | ||||
|  | ||||
| RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | ||||
|     --mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \ | ||||
|     --mount=type=cache,target=/go/pkg/mod \ | ||||
| @ -78,17 +62,17 @@ COPY ./internal /go/src/goauthentik.io/internal | ||||
| COPY ./go.mod /go/src/goauthentik.io/go.mod | ||||
| COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||
|  | ||||
| ENV CGO_ENABLED=0 | ||||
|  | ||||
| RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | ||||
|     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ | ||||
|     if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ | ||||
|     CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \ | ||||
|     go build -o /go/authentik ./cmd/server | ||||
|     GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server | ||||
|  | ||||
| # Stage 4: MaxMind GeoIP | ||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 as geoip | ||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.1 as geoip | ||||
|  | ||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||
| ENV GEOIPUPDATE_VERBOSE="1" | ||||
| ENV GEOIPUPDATE_VERBOSE="true" | ||||
| ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ||||
| ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY" | ||||
|  | ||||
| @ -99,7 +83,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||
|     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|  | ||||
| # Stage 5: Python dependencies | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS python-deps | ||||
| FROM docker.io/python:3.12.2-slim-bookworm AS python-deps | ||||
|  | ||||
| WORKDIR /ak-root/poetry | ||||
|  | ||||
| @ -112,21 +96,19 @@ RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloa | ||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | ||||
|     apt-get update && \ | ||||
|     # Required for installing pip packages | ||||
|     apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev | ||||
|     apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev | ||||
|  | ||||
| RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||
|     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ | ||||
|     --mount=type=cache,target=/root/.cache/pip \ | ||||
|     --mount=type=cache,target=/root/.cache/pypoetry \ | ||||
|     python -m venv /ak-root/venv/ && \ | ||||
|     bash -c "source ${VENV_PATH}/bin/activate && \ | ||||
|     pip3 install --upgrade pip && \ | ||||
|     pip3 install poetry && \ | ||||
|     poetry install --only=main --no-ansi --no-interaction --no-root && \ | ||||
|     pip install --force-reinstall /wheels/*" | ||||
|     poetry install --only=main --no-ansi --no-interaction | ||||
|  | ||||
| # Stage 6: Run | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS final-image | ||||
| FROM docker.io/python:3.12.2-slim-bookworm AS final-image | ||||
|  | ||||
| ARG GIT_BUILD_HASH | ||||
| ARG VERSION | ||||
| @ -143,7 +125,7 @@ WORKDIR / | ||||
| # We cannot cache this layer otherwise we'll end up with a bigger image | ||||
| RUN apt-get update && \ | ||||
|     # Required for runtime | ||||
|     apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates && \ | ||||
|     apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 ca-certificates && \ | ||||
|     # Required for bootstrap & healtcheck | ||||
|     apt-get install -y --no-install-recommends runit && \ | ||||
|     apt-get clean && \ | ||||
| @ -167,7 +149,7 @@ COPY --from=go-builder /go/authentik /bin/authentik | ||||
| COPY --from=python-deps /ak-root/venv /ak-root/venv | ||||
| COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||
| COPY --from=web-builder /work/web/authentik/ /web/authentik/ | ||||
| COPY --from=website-builder /work/website/build/ /website/help/ | ||||
| COPY --from=website-builder /work/website/help/ /website/help/ | ||||
| COPY --from=geoip /usr/share/GeoIP /geoip | ||||
|  | ||||
| USER 1000 | ||||
| @ -179,8 +161,6 @@ ENV TMPDIR=/dev/shm/ \ | ||||
|     VENV_PATH="/ak-root/venv" \ | ||||
|     POETRY_VIRTUALENVS_CREATE=false | ||||
|  | ||||
| ENV GOFIPS=1 | ||||
|  | ||||
| HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | ||||
|  | ||||
| ENTRYPOINT [ "dumb-init", "--", "ak" ] | ||||
|  | ||||
							
								
								
									
										56
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										56
									
								
								Makefile
									
									
									
									
									
								
							| @ -9,7 +9,6 @@ PY_SOURCES = authentik tests scripts lifecycle .github | ||||
| DOCKER_IMAGE ?= "authentik:test" | ||||
|  | ||||
| GEN_API_TS = "gen-ts-api" | ||||
| GEN_API_PY = "gen-py-api" | ||||
| GEN_API_GO = "gen-go-api" | ||||
|  | ||||
| pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||
| @ -19,7 +18,6 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null) | ||||
| CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \ | ||||
| 		-I .github/codespell-words.txt \ | ||||
| 		-S 'web/src/locales/**' \ | ||||
| 		-S 'website/developer-docs/api/reference/**' \ | ||||
| 		authentik \ | ||||
| 		internal \ | ||||
| 		cmd \ | ||||
| @ -47,12 +45,12 @@ test-go: | ||||
| 	go test -timeout 0 -v -race -cover ./... | ||||
|  | ||||
| test-docker:  ## Run all tests in a docker-compose | ||||
| 	echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env | ||||
| 	echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env | ||||
| 	docker compose pull -q | ||||
| 	docker compose up --no-start | ||||
| 	docker compose start postgresql redis | ||||
| 	docker compose run -u root server test-all | ||||
| 	echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||
| 	echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||
| 	docker-compose pull -q | ||||
| 	docker-compose up --no-start | ||||
| 	docker-compose start postgresql redis | ||||
| 	docker-compose run -u root server test-all | ||||
| 	rm -f .env | ||||
|  | ||||
| test: ## Run the server tests and produce a coverage report (locally) | ||||
| @ -60,15 +58,16 @@ test: ## Run the server tests and produce a coverage report (locally) | ||||
| 	coverage html | ||||
| 	coverage report | ||||
|  | ||||
| lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | ||||
| lint-fix:  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | ||||
| 	isort $(PY_SOURCES) | ||||
| 	black $(PY_SOURCES) | ||||
| 	ruff check --fix $(PY_SOURCES) | ||||
|  | ||||
| lint-codespell:  ## Reports spelling errors. | ||||
| 	ruff --fix $(PY_SOURCES) | ||||
| 	codespell -w $(CODESPELL_ARGS) | ||||
|  | ||||
| lint: ## Lint the python and golang sources | ||||
| 	bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules | ||||
| 	bandit -r $(PY_SOURCES) -x node_modules | ||||
| 	./web/node_modules/.bin/pyright $(PY_SOURCES) | ||||
| 	pylint $(PY_SOURCES) | ||||
| 	golangci-lint run -v | ||||
|  | ||||
| core-install: | ||||
| @ -141,10 +140,7 @@ gen-clean-ts:  ## Remove generated API client for Typescript | ||||
| gen-clean-go:  ## Remove generated API client for Go | ||||
| 	rm -rf ./${GEN_API_GO}/ | ||||
|  | ||||
| gen-clean-py:  ## Remove generated API client for Python | ||||
| 	rm -rf ./${GEN_API_PY}/ | ||||
|  | ||||
| gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | ||||
| gen-clean: gen-clean-ts gen-clean-go  ## Remove generated API clients | ||||
|  | ||||
| gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescript into the authentik UI Application | ||||
| 	docker run \ | ||||
| @ -162,20 +158,6 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | ||||
| 	cd ./${GEN_API_TS} && npm i | ||||
| 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||
|  | ||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||
| 	docker run \ | ||||
| 		--rm -v ${PWD}:/local \ | ||||
| 		--user ${UID}:${GID} \ | ||||
| 		docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \ | ||||
| 		-i /local/schema.yml \ | ||||
| 		-g python \ | ||||
| 		-o /local/${GEN_API_PY} \ | ||||
| 		-c /local/scripts/api-py-config.yaml \ | ||||
| 		--additional-properties=packageVersion=${NPM_VERSION} \ | ||||
| 		--git-repo-id authentik \ | ||||
| 		--git-user-id goauthentik | ||||
| 	pip install ./${GEN_API_PY} | ||||
|  | ||||
| gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||
| 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | ||||
| @ -241,7 +223,7 @@ website: website-lint-fix website-build  ## Automatically fix formatting issues | ||||
| website-install: | ||||
| 	cd website && npm ci | ||||
|  | ||||
| website-lint-fix: lint-codespell | ||||
| website-lint-fix: | ||||
| 	cd website && npm run prettier | ||||
|  | ||||
| website-build: | ||||
| @ -255,7 +237,6 @@ website-watch:  ## Build and watch the documentation website, updating automatic | ||||
| ######################### | ||||
|  | ||||
| docker:  ## Build a docker image of the current source tree | ||||
| 	mkdir -p ${GEN_API_TS} | ||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||
|  | ||||
| ######################### | ||||
| @ -268,6 +249,9 @@ ci--meta-debug: | ||||
| 	python -V | ||||
| 	node --version | ||||
|  | ||||
| ci-pylint: ci--meta-debug | ||||
| 	pylint $(PY_SOURCES) | ||||
|  | ||||
| ci-black: ci--meta-debug | ||||
| 	black --check $(PY_SOURCES) | ||||
|  | ||||
| @ -277,8 +261,14 @@ ci-ruff: ci--meta-debug | ||||
| ci-codespell: ci--meta-debug | ||||
| 	codespell $(CODESPELL_ARGS) -s | ||||
|  | ||||
| ci-isort: ci--meta-debug | ||||
| 	isort --check $(PY_SOURCES) | ||||
|  | ||||
| ci-bandit: ci--meta-debug | ||||
| 	bandit -r $(PY_SOURCES) | ||||
|  | ||||
| ci-pyright: ci--meta-debug | ||||
| 	./web/node_modules/.bin/pyright $(PY_SOURCES) | ||||
|  | ||||
| ci-pending-migrations: ci--meta-debug | ||||
| 	ak makemigrations --check | ||||
|  | ||||
| @ -25,10 +25,10 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h | ||||
|  | ||||
| ## Screenshots | ||||
|  | ||||
| | Light                                                       | Dark                                                       | | ||||
| | ----------------------------------------------------------- | ---------------------------------------------------------- | | ||||
| |   |   | | ||||
| |  |  | | ||||
| | Light                                                  | Dark                                                  | | ||||
| | ------------------------------------------------------ | ----------------------------------------------------- | | ||||
| |   |   | | ||||
| |  |  | | ||||
|  | ||||
| ## Development | ||||
|  | ||||
|  | ||||
							
								
								
									
										20
									
								
								SECURITY.md
									
									
									
									
									
								
							
							
						
						
									
										20
									
								
								SECURITY.md
									
									
									
									
									
								
							| @ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | ||||
|  | ||||
| (.x being the latest patch release for each version) | ||||
|  | ||||
| | Version  | Supported | | ||||
| | -------- | --------- | | ||||
| | 2024.4.x | ✅        | | ||||
| | 2024.6.x | ✅        | | ||||
| | Version | Supported | | ||||
| | --- | --- | | ||||
| | 2023.6.x | ✅ | | ||||
| | 2023.8.x | ✅ | | ||||
|  | ||||
| ## Reporting a Vulnerability | ||||
|  | ||||
| @ -31,12 +31,12 @@ To report a vulnerability, send an email to [security@goauthentik.io](mailto:se | ||||
|  | ||||
| authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories: | ||||
|  | ||||
| | Score      | Severity | | ||||
| | ---------- | -------- | | ||||
| | 0.0        | None     | | ||||
| | 0.1 – 3.9  | Low      | | ||||
| | 4.0 – 6.9  | Medium   | | ||||
| | 7.0 – 8.9  | High     | | ||||
| | Score | Severity | | ||||
| | --- | --- | | ||||
| | 0.0 | None | | ||||
| | 0.1 – 3.9 | Low | | ||||
| | 4.0 – 6.9 | Medium | | ||||
| | 7.0 – 8.9 | High | | ||||
| | 9.0 – 10.0 | Critical | | ||||
|  | ||||
| ## Disclosure process | ||||
|  | ||||
| @ -1,12 +1,13 @@ | ||||
| """authentik root module""" | ||||
|  | ||||
| from os import environ | ||||
| from typing import Optional | ||||
|  | ||||
| __version__ = "2024.6.1" | ||||
| __version__ = "2024.2.1" | ||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||
|  | ||||
|  | ||||
| def get_build_hash(fallback: str | None = None) -> str: | ||||
| def get_build_hash(fallback: Optional[str] = None) -> str: | ||||
|     """Get build hash""" | ||||
|     build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "") | ||||
|     return fallback if build_hash == "" and fallback else build_hash | ||||
|  | ||||
| @ -2,21 +2,18 @@ | ||||
|  | ||||
| import platform | ||||
| from datetime import datetime | ||||
| from ssl import OPENSSL_VERSION | ||||
| from sys import version as python_version | ||||
| from typing import TypedDict | ||||
|  | ||||
| from cryptography.hazmat.backends.openssl.backend import backend | ||||
| from django.utils.timezone import now | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from gunicorn import version_info as gunicorn_version | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from authentik import get_full_version | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.utils.reflection import get_env | ||||
| from authentik.outposts.apps import MANAGED_OUTPOST | ||||
| @ -28,13 +25,11 @@ class RuntimeDict(TypedDict): | ||||
|     """Runtime information""" | ||||
|  | ||||
|     python_version: str | ||||
|     gunicorn_version: str | ||||
|     environment: str | ||||
|     architecture: str | ||||
|     platform: str | ||||
|     uname: str | ||||
|     openssl_version: str | ||||
|     openssl_fips_enabled: bool | None | ||||
|     authentik_version: str | ||||
|  | ||||
|  | ||||
| class SystemInfoSerializer(PassiveSerializer): | ||||
| @ -69,15 +64,11 @@ class SystemInfoSerializer(PassiveSerializer): | ||||
|     def get_runtime(self, request: Request) -> RuntimeDict: | ||||
|         """Get versions""" | ||||
|         return { | ||||
|             "architecture": platform.machine(), | ||||
|             "authentik_version": get_full_version(), | ||||
|             "environment": get_env(), | ||||
|             "openssl_fips_enabled": ( | ||||
|                 backend._fips_enabled if LicenseKey.get_total().is_valid() else None | ||||
|             ), | ||||
|             "openssl_version": OPENSSL_VERSION, | ||||
|             "platform": platform.platform(), | ||||
|             "python_version": python_version, | ||||
|             "gunicorn_version": ".".join(str(x) for x in gunicorn_version), | ||||
|             "environment": get_env(), | ||||
|             "architecture": platform.machine(), | ||||
|             "platform": platform.platform(), | ||||
|             "uname": " ".join(platform.uname()), | ||||
|         } | ||||
|  | ||||
|  | ||||
| @ -10,7 +10,7 @@ from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from authentik import __version__, get_build_hash | ||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | ||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
|  | ||||
|  | ||||
| @ -19,7 +19,6 @@ class VersionSerializer(PassiveSerializer): | ||||
|  | ||||
|     version_current = SerializerMethodField() | ||||
|     version_latest = SerializerMethodField() | ||||
|     version_latest_valid = SerializerMethodField() | ||||
|     build_hash = SerializerMethodField() | ||||
|     outdated = SerializerMethodField() | ||||
|  | ||||
| @ -39,10 +38,6 @@ class VersionSerializer(PassiveSerializer): | ||||
|             return __version__ | ||||
|         return version_in_cache | ||||
|  | ||||
|     def get_version_latest_valid(self, _) -> bool: | ||||
|         """Check if latest version is valid""" | ||||
|         return cache.get(VERSION_CACHE_KEY) != VERSION_NULL | ||||
|  | ||||
|     def get_outdated(self, instance) -> bool: | ||||
|         """Check if we're running the latest version""" | ||||
|         return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance)) | ||||
|  | ||||
| @ -18,7 +18,6 @@ from authentik.lib.utils.http import get_http_session | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| VERSION_NULL = "0.0.0" | ||||
| VERSION_CACHE_KEY = "authentik_latest_version" | ||||
| VERSION_CACHE_TIMEOUT = 8 * 60 * 60  # 8 hours | ||||
| # Chop of the first ^ because we want to search the entire string | ||||
| @ -56,7 +55,7 @@ def clear_update_notifications(): | ||||
| def update_latest_version(self: SystemTask): | ||||
|     """Update latest version info""" | ||||
|     if CONFIG.get_bool("disable_update_check"): | ||||
|         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) | ||||
|         cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT) | ||||
|         self.set_status(TaskStatus.WARNING, "Version check disabled.") | ||||
|         return | ||||
|     try: | ||||
| @ -83,7 +82,7 @@ def update_latest_version(self: SystemTask): | ||||
|                 event_dict["message"] = f"Changelog: {match.group()}" | ||||
|             Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save() | ||||
|     except (RequestException, IndexError) as exc: | ||||
|         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) | ||||
|         cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT) | ||||
|         self.set_error(exc) | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -10,3 +10,26 @@ class AuthentikAPIConfig(AppConfig): | ||||
|     label = "authentik_api" | ||||
|     mountpoint = "api/" | ||||
|     verbose_name = "authentik API" | ||||
|  | ||||
|     def ready(self) -> None: | ||||
|         from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||
|  | ||||
|         from authentik.api.authentication import TokenAuthentication | ||||
|  | ||||
|         # Class is defined here as it needs to be created early enough that drf-spectacular will | ||||
|         # find it, but also won't cause any import issues | ||||
|         # pylint: disable=unused-variable | ||||
|         class TokenSchema(OpenApiAuthenticationExtension): | ||||
|             """Auth schema""" | ||||
|  | ||||
|             target_class = TokenAuthentication | ||||
|             name = "authentik" | ||||
|  | ||||
|             def get_security_definition(self, auto_schema): | ||||
|                 """Auth schema""" | ||||
|                 return { | ||||
|                     "type": "apiKey", | ||||
|                     "in": "header", | ||||
|                     "name": "Authorization", | ||||
|                     "scheme": "bearer", | ||||
|                 } | ||||
|  | ||||
| @ -1,10 +1,9 @@ | ||||
| """API Authentication""" | ||||
|  | ||||
| from hmac import compare_digest | ||||
| from typing import Any | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from django.conf import settings | ||||
| from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||
| from rest_framework.exceptions import AuthenticationFailed | ||||
| from rest_framework.request import Request | ||||
| @ -18,7 +17,7 @@ from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| def validate_auth(header: bytes) -> str | None: | ||||
| def validate_auth(header: bytes) -> Optional[str]: | ||||
|     """Validate that the header is in a correct format, | ||||
|     returns type and credentials""" | ||||
|     auth_credentials = header.decode().strip() | ||||
| @ -33,7 +32,7 @@ def validate_auth(header: bytes) -> str | None: | ||||
|     return auth_credentials | ||||
|  | ||||
|  | ||||
| def bearer_auth(raw_header: bytes) -> User | None: | ||||
| def bearer_auth(raw_header: bytes) -> Optional[User]: | ||||
|     """raw_header in the Format of `Bearer ....`""" | ||||
|     user = auth_user_lookup(raw_header) | ||||
|     if not user: | ||||
| @ -43,7 +42,7 @@ def bearer_auth(raw_header: bytes) -> User | None: | ||||
|     return user | ||||
|  | ||||
|  | ||||
| def auth_user_lookup(raw_header: bytes) -> User | None: | ||||
| def auth_user_lookup(raw_header: bytes) -> Optional[User]: | ||||
|     """raw_header in the Format of `Bearer ....`""" | ||||
|     from authentik.providers.oauth2.models import AccessToken | ||||
|  | ||||
| @ -76,7 +75,7 @@ def auth_user_lookup(raw_header: bytes) -> User | None: | ||||
|     raise AuthenticationFailed("Token invalid/expired") | ||||
|  | ||||
|  | ||||
| def token_secret_key(value: str) -> User | None: | ||||
| def token_secret_key(value: str) -> Optional[User]: | ||||
|     """Check if the token is the secret key | ||||
|     and return the service account for the managed outpost""" | ||||
|     from authentik.outposts.apps import MANAGED_OUTPOST | ||||
| @ -103,14 +102,3 @@ class TokenAuthentication(BaseAuthentication): | ||||
|             return None | ||||
|  | ||||
|         return (user, None)  # pragma: no cover | ||||
|  | ||||
|  | ||||
| class TokenSchema(OpenApiAuthenticationExtension): | ||||
|     """Auth schema""" | ||||
|  | ||||
|     target_class = TokenAuthentication | ||||
|     name = "authentik" | ||||
|  | ||||
|     def get_security_definition(self, auto_schema): | ||||
|         """Auth schema""" | ||||
|         return {"type": "http", "scheme": "bearer"} | ||||
|  | ||||
| @ -12,7 +12,6 @@ from drf_spectacular.settings import spectacular_settings | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from rest_framework.settings import api_settings | ||||
|  | ||||
| from authentik.api.apps import AuthentikAPIConfig | ||||
| from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA | ||||
|  | ||||
|  | ||||
| @ -102,12 +101,3 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): | ||||
|             comp = result["components"]["schemas"][component] | ||||
|             comp["additionalProperties"] = {} | ||||
|     return result | ||||
|  | ||||
|  | ||||
| def preprocess_schema_exclude_non_api(endpoints, **kwargs): | ||||
|     """Filter out all API Views which are not mounted under /api""" | ||||
|     return [ | ||||
|         (path, path_regex, method, callback) | ||||
|         for path, path_regex, method, callback in endpoints | ||||
|         if path.startswith("/" + AuthentikAPIConfig.mountpoint) | ||||
|     ] | ||||
|  | ||||
| @ -1,13 +1,13 @@ | ||||
| {% extends "base/skeleton.html" %} | ||||
|  | ||||
| {% load authentik_core %} | ||||
| {% load static %} | ||||
|  | ||||
| {% block title %} | ||||
| API Browser - {{ brand.branding_title }} | ||||
| {% endblock %} | ||||
|  | ||||
| {% block head %} | ||||
| {% versioned_script "dist/standalone/api-browser/index-%v.js" %} | ||||
| <script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script> | ||||
| <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> | ||||
| <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> | ||||
| {% endblock %} | ||||
|  | ||||
| @ -25,17 +25,17 @@ class TestAPIAuth(TestCase): | ||||
|     def test_invalid_type(self): | ||||
|         """Test invalid type""" | ||||
|         with self.assertRaises(AuthenticationFailed): | ||||
|             bearer_auth(b"foo bar") | ||||
|             bearer_auth("foo bar".encode()) | ||||
|  | ||||
|     def test_invalid_empty(self): | ||||
|         """Test invalid type""" | ||||
|         self.assertIsNone(bearer_auth(b"Bearer ")) | ||||
|         self.assertIsNone(bearer_auth(b"")) | ||||
|         self.assertIsNone(bearer_auth("Bearer ".encode())) | ||||
|         self.assertIsNone(bearer_auth("".encode())) | ||||
|  | ||||
|     def test_invalid_no_token(self): | ||||
|         """Test invalid with no token""" | ||||
|         with self.assertRaises(AuthenticationFailed): | ||||
|             auth = b64encode(b":abc").decode() | ||||
|             auth = b64encode(":abc".encode()).decode() | ||||
|             self.assertIsNone(bearer_auth(f"Basic :{auth}".encode())) | ||||
|  | ||||
|     def test_bearer_valid(self): | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| """authentik API Modelviewset tests""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from typing import Callable | ||||
|  | ||||
| from django.test import TestCase | ||||
| from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet | ||||
| @ -26,6 +26,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable: | ||||
|  | ||||
|  | ||||
| for _, viewset, _ in router.registry: | ||||
|     if not issubclass(viewset, ModelViewSet | ReadOnlyModelViewSet): | ||||
|     if not issubclass(viewset, (ModelViewSet, ReadOnlyModelViewSet)): | ||||
|         continue | ||||
|     setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset)) | ||||
|  | ||||
| @ -68,11 +68,7 @@ class ConfigView(APIView): | ||||
|         """Get all capabilities this server instance supports""" | ||||
|         caps = [] | ||||
|         deb_test = settings.DEBUG or settings.TEST | ||||
|         if ( | ||||
|             CONFIG.get("storage.media.backend", "file") == "s3" | ||||
|             or Path(settings.STORAGES["default"]["OPTIONS"]["location"]).is_mount() | ||||
|             or deb_test | ||||
|         ): | ||||
|         if Path(settings.MEDIA_ROOT).is_mount() or deb_test: | ||||
|             caps.append(Capabilities.CAN_SAVE_MEDIA) | ||||
|         for processor in get_context_processors(): | ||||
|             if cap := processor.capability(): | ||||
|  | ||||
| @ -33,7 +33,7 @@ for _authentik_app in get_apps(): | ||||
|             app_name=_authentik_app.name, | ||||
|         ) | ||||
|         continue | ||||
|     urls: list = api_urls.api_urlpatterns | ||||
|     urls: list = getattr(api_urls, "api_urlpatterns") | ||||
|     for url in urls: | ||||
|         if isinstance(url, URLPattern): | ||||
|             _other_urls.append(url) | ||||
|  | ||||
| @ -52,9 +52,7 @@ class BlueprintInstanceSerializer(ModelSerializer): | ||||
|         valid, logs = Importer.from_string(content, context).validate() | ||||
|         if not valid: | ||||
|             text_logs = "\n".join([x["event"] for x in logs]) | ||||
|             raise ValidationError( | ||||
|                 _("Failed to validate blueprint: {logs}".format_map({"logs": text_logs})) | ||||
|             ) | ||||
|             raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs})) | ||||
|         return content | ||||
|  | ||||
|     def validate(self, attrs: dict) -> dict: | ||||
|  | ||||
| @ -1,6 +1,5 @@ | ||||
| """authentik Blueprints app""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from importlib import import_module | ||||
| from inspect import ismethod | ||||
|  | ||||
| @ -8,16 +7,14 @@ from django.apps import AppConfig | ||||
| from django.db import DatabaseError, InternalError, ProgrammingError | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
|  | ||||
| from authentik.root.signals import startup | ||||
|  | ||||
|  | ||||
| class ManagedAppConfig(AppConfig): | ||||
|     """Basic reconciliation logic for apps""" | ||||
|  | ||||
|     logger: BoundLogger | ||||
|  | ||||
|     RECONCILE_GLOBAL_CATEGORY: str = "global" | ||||
|     RECONCILE_TENANT_CATEGORY: str = "tenant" | ||||
|     RECONCILE_GLOBAL_PREFIX: str = "reconcile_global_" | ||||
|     RECONCILE_TENANT_PREFIX: str = "reconcile_tenant_" | ||||
|  | ||||
|     def __init__(self, app_name: str, *args, **kwargs) -> None: | ||||
|         super().__init__(app_name, *args, **kwargs) | ||||
| @ -25,13 +22,10 @@ class ManagedAppConfig(AppConfig): | ||||
|  | ||||
|     def ready(self) -> None: | ||||
|         self.import_related() | ||||
|         startup.connect(self._on_startup_callback, dispatch_uid=self.label) | ||||
|         self.reconcile_global() | ||||
|         self.reconcile_tenant() | ||||
|         return super().ready() | ||||
|  | ||||
|     def _on_startup_callback(self, sender, **_): | ||||
|         self._reconcile_global() | ||||
|         self._reconcile_tenant() | ||||
|  | ||||
|     def import_related(self): | ||||
|         """Automatically import related modules which rely on just being imported | ||||
|         to register themselves (mainly django signals and celery tasks)""" | ||||
| @ -57,8 +51,7 @@ class ManagedAppConfig(AppConfig): | ||||
|             meth = getattr(self, meth_name) | ||||
|             if not ismethod(meth): | ||||
|                 continue | ||||
|             category = getattr(meth, "_authentik_managed_reconcile", None) | ||||
|             if category != prefix: | ||||
|             if not meth_name.startswith(prefix): | ||||
|                 continue | ||||
|             name = meth_name.replace(prefix, "") | ||||
|             try: | ||||
| @ -68,19 +61,7 @@ class ManagedAppConfig(AppConfig): | ||||
|             except (DatabaseError, ProgrammingError, InternalError) as exc: | ||||
|                 self.logger.warning("Failed to run reconcile", name=name, exc=exc) | ||||
|  | ||||
|     @staticmethod | ||||
|     def reconcile_tenant(func: Callable): | ||||
|         """Mark a function to be called on startup (for each tenant)""" | ||||
|         func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_TENANT_CATEGORY | ||||
|         return func | ||||
|  | ||||
|     @staticmethod | ||||
|     def reconcile_global(func: Callable): | ||||
|         """Mark a function to be called on startup (globally)""" | ||||
|         func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY | ||||
|         return func | ||||
|  | ||||
|     def _reconcile_tenant(self) -> None: | ||||
|     def reconcile_tenant(self) -> None: | ||||
|         """reconcile ourselves for tenanted methods""" | ||||
|         from authentik.tenants.models import Tenant | ||||
|  | ||||
| @ -91,9 +72,9 @@ class ManagedAppConfig(AppConfig): | ||||
|             return | ||||
|         for tenant in tenants: | ||||
|             with tenant: | ||||
|                 self._reconcile(self.RECONCILE_TENANT_CATEGORY) | ||||
|                 self._reconcile(self.RECONCILE_TENANT_PREFIX) | ||||
|  | ||||
|     def _reconcile_global(self) -> None: | ||||
|     def reconcile_global(self) -> None: | ||||
|         """ | ||||
|         reconcile ourselves for global methods. | ||||
|         Used for signals, tasks, etc. Database queries should not be made in here. | ||||
| @ -101,7 +82,7 @@ class ManagedAppConfig(AppConfig): | ||||
|         from django_tenants.utils import get_public_schema_name, schema_context | ||||
|  | ||||
|         with schema_context(get_public_schema_name()): | ||||
|             self._reconcile(self.RECONCILE_GLOBAL_CATEGORY) | ||||
|             self._reconcile(self.RECONCILE_GLOBAL_PREFIX) | ||||
|  | ||||
|  | ||||
| class AuthentikBlueprintsConfig(ManagedAppConfig): | ||||
| @ -112,13 +93,11 @@ class AuthentikBlueprintsConfig(ManagedAppConfig): | ||||
|     verbose_name = "authentik Blueprints" | ||||
|     default = True | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_global | ||||
|     def load_blueprints_v1_tasks(self): | ||||
|     def reconcile_global_load_blueprints_v1_tasks(self): | ||||
|         """Load v1 tasks""" | ||||
|         self.import_module("authentik.blueprints.v1.tasks") | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_tenant | ||||
|     def blueprints_discovery(self): | ||||
|     def reconcile_tenant_blueprints_discovery(self): | ||||
|         """Run blueprint discovery""" | ||||
|         from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints | ||||
|  | ||||
|  | ||||
| @ -4,14 +4,12 @@ from json import dumps | ||||
| from typing import Any | ||||
|  | ||||
| from django.core.management.base import BaseCommand, no_translations | ||||
| from django.db.models import Model, fields | ||||
| from drf_jsonschema_serializer.convert import converter, field_to_converter | ||||
| from django.db.models import Model | ||||
| from drf_jsonschema_serializer.convert import field_to_converter | ||||
| from rest_framework.fields import Field, JSONField, UUIDField | ||||
| from rest_framework.relations import PrimaryKeyRelatedField | ||||
| from rest_framework.serializers import Serializer | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik import __version__ | ||||
| from authentik.blueprints.v1.common import BlueprintEntryDesiredState | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed | ||||
| from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | ||||
| @ -20,23 +18,6 @@ from authentik.lib.models import SerializerModel | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| @converter | ||||
| class PrimaryKeyRelatedFieldConverter: | ||||
|     """Custom primary key field converter which is aware of non-integer based PKs | ||||
|  | ||||
|     This is not an exhaustive fix for other non-int PKs, however in authentik we either | ||||
|     use UUIDs or ints""" | ||||
|  | ||||
|     field_class = PrimaryKeyRelatedField | ||||
|  | ||||
|     def convert(self, field: PrimaryKeyRelatedField): | ||||
|         model: Model = field.queryset.model | ||||
|         pk_field = model._meta.pk | ||||
|         if isinstance(pk_field, fields.UUIDField): | ||||
|             return {"type": "string", "format": "uuid"} | ||||
|         return {"type": "integer"} | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     """Generate JSON Schema for blueprints""" | ||||
|  | ||||
| @ -48,7 +29,7 @@ class Command(BaseCommand): | ||||
|             "$schema": "http://json-schema.org/draft-07/schema", | ||||
|             "$id": "https://goauthentik.io/blueprints/schema.json", | ||||
|             "type": "object", | ||||
|             "title": f"authentik {__version__} Blueprint schema", | ||||
|             "title": "authentik Blueprint schema", | ||||
|             "required": ["version", "entries"], | ||||
|             "properties": { | ||||
|                 "version": { | ||||
|  | ||||
| @ -71,19 +71,6 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel): | ||||
|     enabled = models.BooleanField(default=True) | ||||
|     managed_models = ArrayField(models.TextField(), default=list) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Blueprint Instance") | ||||
|         verbose_name_plural = _("Blueprint Instances") | ||||
|         unique_together = ( | ||||
|             ( | ||||
|                 "name", | ||||
|                 "path", | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"Blueprint Instance {self.name}" | ||||
|  | ||||
|     def retrieve_oci(self) -> str: | ||||
|         """Get blueprint from an OCI registry""" | ||||
|         client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://")) | ||||
| @ -102,7 +89,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel): | ||||
|                 raise BlueprintRetrievalFailed("Invalid blueprint path") | ||||
|             with full_path.open("r", encoding="utf-8") as _file: | ||||
|                 return _file.read() | ||||
|         except OSError as exc: | ||||
|         except (IOError, OSError) as exc: | ||||
|             raise BlueprintRetrievalFailed(exc) from exc | ||||
|  | ||||
|     def retrieve(self) -> str: | ||||
| @ -118,3 +105,16 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel): | ||||
|         from authentik.blueprints.api import BlueprintInstanceSerializer | ||||
|  | ||||
|         return BlueprintInstanceSerializer | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"Blueprint Instance {self.name}" | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Blueprint Instance") | ||||
|         verbose_name_plural = _("Blueprint Instances") | ||||
|         unique_together = ( | ||||
|             ( | ||||
|                 "name", | ||||
|                 "path", | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
| @ -1,7 +1,7 @@ | ||||
| """Blueprint helpers""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from functools import wraps | ||||
| from typing import Callable | ||||
|  | ||||
| from django.apps import apps | ||||
|  | ||||
| @ -39,7 +39,7 @@ def reconcile_app(app_name: str): | ||||
|         def wrapper(*args, **kwargs): | ||||
|             config = apps.get_app_config(app_name) | ||||
|             if isinstance(config, ManagedAppConfig): | ||||
|                 config._on_startup_callback(None) | ||||
|                 config.ready() | ||||
|             return func(*args, **kwargs) | ||||
|  | ||||
|         return wrapper | ||||
|  | ||||
| @ -1,7 +1,7 @@ | ||||
| """test packaged blueprints""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from pathlib import Path | ||||
| from typing import Callable | ||||
|  | ||||
| from django.test import TransactionTestCase | ||||
|  | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| """authentik managed models tests""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from typing import Callable, Type | ||||
|  | ||||
| from django.apps import apps | ||||
| from django.test import TestCase | ||||
| @ -14,7 +14,7 @@ class TestModels(TestCase): | ||||
|     """Test Models""" | ||||
|  | ||||
|  | ||||
| def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable: | ||||
| def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable: | ||||
|     """Test serializer""" | ||||
|  | ||||
|     def tester(self: TestModels): | ||||
|  | ||||
| @ -54,7 +54,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | ||||
|             file.seek(0) | ||||
|             file_hash = sha512(file.read().encode()).hexdigest() | ||||
|             file.flush() | ||||
|             blueprints_discovery() | ||||
|             blueprints_discovery()  # pylint: disable=no-value-for-parameter | ||||
|             instance = BlueprintInstance.objects.filter(name=blueprint_id).first() | ||||
|             self.assertEqual(instance.last_applied_hash, file_hash) | ||||
|             self.assertEqual( | ||||
| @ -82,7 +82,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | ||||
|                 ) | ||||
|             ) | ||||
|             file.flush() | ||||
|             blueprints_discovery() | ||||
|             blueprints_discovery()  # pylint: disable=no-value-for-parameter | ||||
|             blueprint = BlueprintInstance.objects.filter(name="foo").first() | ||||
|             self.assertEqual( | ||||
|                 blueprint.last_applied_hash, | ||||
| @ -107,7 +107,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | ||||
|                 ) | ||||
|             ) | ||||
|             file.flush() | ||||
|             blueprints_discovery() | ||||
|             blueprints_discovery()  # pylint: disable=no-value-for-parameter | ||||
|             blueprint.refresh_from_db() | ||||
|             self.assertEqual( | ||||
|                 blueprint.last_applied_hash, | ||||
| @ -149,7 +149,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | ||||
|                 instance.status, | ||||
|                 BlueprintInstanceStatus.UNKNOWN, | ||||
|             ) | ||||
|             apply_blueprint(instance.pk) | ||||
|             apply_blueprint(instance.pk)  # pylint: disable=no-value-for-parameter | ||||
|             instance.refresh_from_db() | ||||
|             self.assertEqual(instance.last_applied_hash, "") | ||||
|             self.assertEqual( | ||||
|  | ||||
| @ -1,14 +1,13 @@ | ||||
| """transfer common classes""" | ||||
|  | ||||
| from collections import OrderedDict | ||||
| from collections.abc import Iterable, Mapping | ||||
| from copy import copy | ||||
| from dataclasses import asdict, dataclass, field, is_dataclass | ||||
| from enum import Enum | ||||
| from functools import reduce | ||||
| from operator import ixor | ||||
| from os import getenv | ||||
| from typing import Any, Literal, Union | ||||
| from typing import Any, Iterable, Literal, Mapping, Optional, Union | ||||
| from uuid import UUID | ||||
|  | ||||
| from deepmerge import always_merger | ||||
| @ -46,7 +45,7 @@ def get_attrs(obj: SerializerModel) -> dict[str, Any]: | ||||
| class BlueprintEntryState: | ||||
|     """State of a single instance""" | ||||
|  | ||||
|     instance: Model | None = None | ||||
|     instance: Optional[Model] = None | ||||
|  | ||||
|  | ||||
| class BlueprintEntryDesiredState(Enum): | ||||
| @ -68,14 +67,14 @@ class BlueprintEntry: | ||||
|     ) | ||||
|     conditions: list[Any] = field(default_factory=list) | ||||
|     identifiers: dict[str, Any] = field(default_factory=dict) | ||||
|     attrs: dict[str, Any] | None = field(default_factory=dict) | ||||
|     attrs: Optional[dict[str, Any]] = field(default_factory=dict) | ||||
|  | ||||
|     id: str | None = None | ||||
|     id: Optional[str] = None | ||||
|  | ||||
|     _state: BlueprintEntryState = field(default_factory=BlueprintEntryState) | ||||
|  | ||||
|     def __post_init__(self, *args, **kwargs) -> None: | ||||
|         self.__tag_contexts: list[YAMLTagContext] = [] | ||||
|         self.__tag_contexts: list["YAMLTagContext"] = [] | ||||
|  | ||||
|     @staticmethod | ||||
|     def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry": | ||||
| @ -93,10 +92,10 @@ class BlueprintEntry: | ||||
|             attrs=all_attrs, | ||||
|         ) | ||||
|  | ||||
|     def get_tag_context( | ||||
|     def _get_tag_context( | ||||
|         self, | ||||
|         depth: int = 0, | ||||
|         context_tag_type: type["YAMLTagContext"] | tuple["YAMLTagContext", ...] | None = None, | ||||
|         context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None, | ||||
|     ) -> "YAMLTagContext": | ||||
|         """Get a YAMLTagContext object located at a certain depth in the tag tree""" | ||||
|         if depth < 0: | ||||
| @ -109,8 +108,8 @@ class BlueprintEntry: | ||||
|  | ||||
|         try: | ||||
|             return contexts[-(depth + 1)] | ||||
|         except IndexError as exc: | ||||
|             raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}") from exc | ||||
|         except IndexError: | ||||
|             raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}") | ||||
|  | ||||
|     def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any: | ||||
|         """Check if we have any special tags that need handling""" | ||||
| @ -171,7 +170,7 @@ class Blueprint: | ||||
|     entries: list[BlueprintEntry] = field(default_factory=list) | ||||
|     context: dict = field(default_factory=dict) | ||||
|  | ||||
|     metadata: BlueprintMetadata | None = field(default=None) | ||||
|     metadata: Optional[BlueprintMetadata] = field(default=None) | ||||
|  | ||||
|  | ||||
| class YAMLTag: | ||||
| @ -219,7 +218,7 @@ class Env(YAMLTag): | ||||
|     """Lookup environment variable with optional default""" | ||||
|  | ||||
|     key: str | ||||
|     default: Any | None | ||||
|     default: Optional[Any] | ||||
|  | ||||
|     def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None: | ||||
|         super().__init__() | ||||
| @ -238,7 +237,7 @@ class Context(YAMLTag): | ||||
|     """Lookup key from instance context""" | ||||
|  | ||||
|     key: str | ||||
|     default: Any | None | ||||
|     default: Optional[Any] | ||||
|  | ||||
|     def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None: | ||||
|         super().__init__() | ||||
| @ -282,7 +281,7 @@ class Format(YAMLTag): | ||||
|         try: | ||||
|             return self.format_string % tuple(args) | ||||
|         except TypeError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|             raise EntryInvalidError.from_entry(exc, entry) | ||||
|  | ||||
|  | ||||
| class Find(YAMLTag): | ||||
| @ -367,7 +366,7 @@ class Condition(YAMLTag): | ||||
|             comparator = self._COMPARATORS[self.mode.upper()] | ||||
|             return comparator(tuple(bool(x) for x in args)) | ||||
|         except (TypeError, KeyError) as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|             raise EntryInvalidError.from_entry(exc, entry) | ||||
|  | ||||
|  | ||||
| class If(YAMLTag): | ||||
| @ -399,7 +398,7 @@ class If(YAMLTag): | ||||
|                 blueprint, | ||||
|             ) | ||||
|         except TypeError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|             raise EntryInvalidError.from_entry(exc, entry) | ||||
|  | ||||
|  | ||||
| class Enumerate(YAMLTag, YAMLTagContext): | ||||
| @ -413,7 +412,9 @@ class Enumerate(YAMLTag, YAMLTagContext): | ||||
|         "SEQ": (list, lambda a, b: [*a, b]), | ||||
|         "MAP": ( | ||||
|             dict, | ||||
|             lambda a, b: always_merger.merge(a, {b[0]: b[1]} if isinstance(b, tuple | list) else b), | ||||
|             lambda a, b: always_merger.merge( | ||||
|                 a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b | ||||
|             ), | ||||
|         ), | ||||
|     } | ||||
|  | ||||
| @ -455,7 +456,7 @@ class Enumerate(YAMLTag, YAMLTagContext): | ||||
|         try: | ||||
|             output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()] | ||||
|         except KeyError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|             raise EntryInvalidError.from_entry(exc, entry) | ||||
|  | ||||
|         result = output_class() | ||||
|  | ||||
| @ -483,13 +484,13 @@ class EnumeratedItem(YAMLTag): | ||||
|  | ||||
|     _SUPPORTED_CONTEXT_TAGS = (Enumerate,) | ||||
|  | ||||
|     def __init__(self, _loader: "BlueprintLoader", node: ScalarNode) -> None: | ||||
|     def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None: | ||||
|         super().__init__() | ||||
|         self.depth = int(node.value) | ||||
|  | ||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||
|         try: | ||||
|             context_tag: Enumerate = entry.get_tag_context( | ||||
|             context_tag: Enumerate = entry._get_tag_context( | ||||
|                 depth=self.depth, | ||||
|                 context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS, | ||||
|             ) | ||||
| @ -499,11 +500,9 @@ class EnumeratedItem(YAMLTag): | ||||
|                     f"{self.__class__.__name__} tags are only usable " | ||||
|                     f"inside an {Enumerate.__name__} tag", | ||||
|                     entry, | ||||
|                 ) from exc | ||||
|                 ) | ||||
|  | ||||
|             raise EntryInvalidError.from_entry( | ||||
|                 f"{self.__class__.__name__} tag: {exc}", entry | ||||
|             ) from exc | ||||
|             raise EntryInvalidError.from_entry(f"{self.__class__.__name__} tag: {exc}", entry) | ||||
|  | ||||
|         return context_tag.get_context(entry, blueprint) | ||||
|  | ||||
| @ -516,8 +515,8 @@ class Index(EnumeratedItem): | ||||
|  | ||||
|         try: | ||||
|             return context[0] | ||||
|         except IndexError as exc:  # pragma: no cover | ||||
|             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc | ||||
|         except IndexError:  # pragma: no cover | ||||
|             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) | ||||
|  | ||||
|  | ||||
| class Value(EnumeratedItem): | ||||
| @ -528,8 +527,8 @@ class Value(EnumeratedItem): | ||||
|  | ||||
|         try: | ||||
|             return context[1] | ||||
|         except IndexError as exc:  # pragma: no cover | ||||
|             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc | ||||
|         except IndexError:  # pragma: no cover | ||||
|             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) | ||||
|  | ||||
|  | ||||
| class BlueprintDumper(SafeDumper): | ||||
| @ -556,11 +555,7 @@ class BlueprintDumper(SafeDumper): | ||||
|  | ||||
|             def factory(items): | ||||
|                 final_dict = dict(items) | ||||
|                 # Remove internal state variables | ||||
|                 final_dict.pop("_state", None) | ||||
|                 # Future-proof to only remove the ID if we don't set a value | ||||
|                 if "id" in final_dict and final_dict.get("id") is None: | ||||
|                     final_dict.pop("id") | ||||
|                 return final_dict | ||||
|  | ||||
|             data = asdict(data, dict_factory=factory) | ||||
| @ -587,13 +582,13 @@ class BlueprintLoader(SafeLoader): | ||||
| class EntryInvalidError(SentryIgnoredException): | ||||
|     """Error raised when an entry is invalid""" | ||||
|  | ||||
|     entry_model: str | None | ||||
|     entry_id: str | None | ||||
|     validation_error: ValidationError | None | ||||
|     serializer: Serializer | None = None | ||||
|     entry_model: Optional[str] | ||||
|     entry_id: Optional[str] | ||||
|     validation_error: Optional[ValidationError] | ||||
|     serializer: Optional[Serializer] = None | ||||
|  | ||||
|     def __init__( | ||||
|         self, *args: object, validation_error: ValidationError | None = None, **kwargs | ||||
|         self, *args: object, validation_error: Optional[ValidationError] = None, **kwargs | ||||
|     ) -> None: | ||||
|         super().__init__(*args) | ||||
|         self.entry_model = None | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| """Blueprint exporter""" | ||||
|  | ||||
| from collections.abc import Iterable | ||||
| from typing import Iterable | ||||
| from uuid import UUID | ||||
|  | ||||
| from django.apps import apps | ||||
| @ -59,7 +59,7 @@ class Exporter: | ||||
|         blueprint = Blueprint() | ||||
|         self._pre_export(blueprint) | ||||
|         blueprint.metadata = BlueprintMetadata( | ||||
|             name=_("authentik Export - {date}".format_map({"date": str(now())})), | ||||
|             name=_("authentik Export - %(date)s" % {"date": str(now())}), | ||||
|             labels={ | ||||
|                 LABEL_AUTHENTIK_GENERATED: "true", | ||||
|             }, | ||||
|  | ||||
| @ -2,7 +2,7 @@ | ||||
|  | ||||
| from contextlib import contextmanager | ||||
| from copy import deepcopy | ||||
| from typing import Any | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from dacite.config import Config | ||||
| from dacite.core import from_dict | ||||
| @ -19,6 +19,8 @@ from guardian.models import UserObjectPermission | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.serializers import BaseSerializer, Serializer | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
| from structlog.testing import capture_logs | ||||
| from structlog.types import EventDict | ||||
| from yaml import load | ||||
|  | ||||
| from authentik.blueprints.v1.common import ( | ||||
| @ -39,16 +41,7 @@ from authentik.core.models import ( | ||||
| ) | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.enterprise.models import LicenseUsage | ||||
| from authentik.enterprise.providers.google_workspace.models import ( | ||||
|     GoogleWorkspaceProviderGroup, | ||||
|     GoogleWorkspaceProviderUser, | ||||
| ) | ||||
| from authentik.enterprise.providers.microsoft_entra.models import ( | ||||
|     MicrosoftEntraProviderGroup, | ||||
|     MicrosoftEntraProviderUser, | ||||
| ) | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken | ||||
| from authentik.events.logs import LogEvent, capture_logs | ||||
| from authentik.events.models import SystemTask | ||||
| from authentik.events.utils import cleanse_dict | ||||
| from authentik.flows.models import FlowToken, Stage | ||||
| @ -58,9 +51,7 @@ from authentik.outposts.models import OutpostServiceConnection | ||||
| from authentik.policies.models import Policy, PolicyBindingModel | ||||
| from authentik.policies.reputation.models import Reputation | ||||
| from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken | ||||
| from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser | ||||
| from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser | ||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType | ||||
| from authentik.providers.scim.models import SCIMGroup, SCIMUser | ||||
| from authentik.tenants.models import Tenant | ||||
|  | ||||
| # Context set when the serializer is created in a blueprint context | ||||
| @ -71,7 +62,7 @@ SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry" | ||||
| def excluded_models() -> list[type[Model]]: | ||||
|     """Return a list of all excluded models that shouldn't be exposed via API | ||||
|     or other means (internal only, base classes, non-used objects, etc)""" | ||||
|  | ||||
|     # pylint: disable=imported-auth-user | ||||
|     from django.contrib.auth.models import Group as DjangoGroup | ||||
|     from django.contrib.auth.models import User as DjangoUser | ||||
|  | ||||
| @ -94,11 +85,10 @@ def excluded_models() -> list[type[Model]]: | ||||
|         # Classes that have other dependencies | ||||
|         AuthenticatedSession, | ||||
|         # Classes which are only internally managed | ||||
|         # FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin | ||||
|         FlowToken, | ||||
|         LicenseUsage, | ||||
|         SCIMProviderGroup, | ||||
|         SCIMProviderUser, | ||||
|         SCIMGroup, | ||||
|         SCIMUser, | ||||
|         Tenant, | ||||
|         SystemTask, | ||||
|         ConnectionToken, | ||||
| @ -106,19 +96,12 @@ def excluded_models() -> list[type[Model]]: | ||||
|         AccessToken, | ||||
|         RefreshToken, | ||||
|         Reputation, | ||||
|         WebAuthnDeviceType, | ||||
|         SCIMSourceUser, | ||||
|         SCIMSourceGroup, | ||||
|         GoogleWorkspaceProviderUser, | ||||
|         GoogleWorkspaceProviderGroup, | ||||
|         MicrosoftEntraProviderUser, | ||||
|         MicrosoftEntraProviderGroup, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def is_model_allowed(model: type[Model]) -> bool: | ||||
|     """Check if model is allowed""" | ||||
|     return model not in excluded_models() and issubclass(model, SerializerModel | BaseMetaModel) | ||||
|     return model not in excluded_models() and issubclass(model, (SerializerModel, BaseMetaModel)) | ||||
|  | ||||
|  | ||||
| class DoRollback(SentryIgnoredException): | ||||
| @ -142,7 +125,7 @@ class Importer: | ||||
|     logger: BoundLogger | ||||
|     _import: Blueprint | ||||
|  | ||||
|     def __init__(self, blueprint: Blueprint, context: dict | None = None): | ||||
|     def __init__(self, blueprint: Blueprint, context: Optional[dict] = None): | ||||
|         self.__pk_map: dict[Any, Model] = {} | ||||
|         self._import = blueprint | ||||
|         self.logger = get_logger() | ||||
| @ -178,14 +161,14 @@ class Importer: | ||||
|  | ||||
|         def updater(value) -> Any: | ||||
|             if value in self.__pk_map: | ||||
|                 self.logger.debug("Updating reference in entry", value=value) | ||||
|                 self.logger.debug("updating reference in entry", value=value) | ||||
|                 return self.__pk_map[value] | ||||
|             return value | ||||
|  | ||||
|         for key, value in attrs.items(): | ||||
|             try: | ||||
|                 if isinstance(value, dict): | ||||
|                     for _, _inner_key in enumerate(value): | ||||
|                     for idx, _inner_key in enumerate(value): | ||||
|                         value[_inner_key] = updater(value[_inner_key]) | ||||
|                 elif isinstance(value, list): | ||||
|                     for idx, _inner_value in enumerate(value): | ||||
| @ -214,7 +197,8 @@ class Importer: | ||||
|  | ||||
|         return main_query | sub_query | ||||
|  | ||||
|     def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None: | ||||
|     # pylint: disable-msg=too-many-locals | ||||
|     def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]: | ||||
|         """Validate a single entry""" | ||||
|         if not entry.check_all_conditions_match(self._import): | ||||
|             self.logger.debug("One or more conditions of this entry are not fulfilled, skipping") | ||||
| @ -267,7 +251,7 @@ class Importer: | ||||
|         model_instance = existing_models.first() | ||||
|         if not isinstance(model(), BaseMetaModel) and model_instance: | ||||
|             self.logger.debug( | ||||
|                 "Initialise serializer with instance", | ||||
|                 "initialise serializer with instance", | ||||
|                 model=model, | ||||
|                 instance=model_instance, | ||||
|                 pk=model_instance.pk, | ||||
| @ -277,14 +261,14 @@ class Importer: | ||||
|         elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED: | ||||
|             raise EntryInvalidError.from_entry( | ||||
|                 ( | ||||
|                     f"State is set to {BlueprintEntryDesiredState.MUST_CREATED} " | ||||
|                     f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} " | ||||
|                     "and object exists already", | ||||
|                 ), | ||||
|                 entry, | ||||
|             ) | ||||
|         else: | ||||
|             self.logger.debug( | ||||
|                 "Initialised new serializer instance", | ||||
|                 "initialised new serializer instance", | ||||
|                 model=model, | ||||
|                 **cleanse_dict(updated_identifiers), | ||||
|             ) | ||||
| @ -341,7 +325,7 @@ class Importer: | ||||
|                 model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||
|             except LookupError: | ||||
|                 self.logger.warning( | ||||
|                     "App or Model does not exist", app=model_app_label, model=model_name | ||||
|                     "app or model does not exist", app=model_app_label, model=model_name | ||||
|                 ) | ||||
|                 return False | ||||
|             # Validate each single entry | ||||
| @ -353,7 +337,7 @@ class Importer: | ||||
|                 if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT: | ||||
|                     serializer = exc.serializer | ||||
|                 else: | ||||
|                     self.logger.warning(f"Entry invalid: {exc}", entry=entry, error=exc) | ||||
|                     self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc) | ||||
|                     if raise_errors: | ||||
|                         raise exc | ||||
|                     return False | ||||
| @ -373,27 +357,27 @@ class Importer: | ||||
|                     and state == BlueprintEntryDesiredState.CREATED | ||||
|                 ): | ||||
|                     self.logger.debug( | ||||
|                         "Instance exists, skipping", | ||||
|                         "instance exists, skipping", | ||||
|                         model=model, | ||||
|                         instance=instance, | ||||
|                         pk=instance.pk, | ||||
|                     ) | ||||
|                 else: | ||||
|                     instance = serializer.save() | ||||
|                     self.logger.debug("Updated model", model=instance) | ||||
|                     self.logger.debug("updated model", model=instance) | ||||
|                 if "pk" in entry.identifiers: | ||||
|                     self.__pk_map[entry.identifiers["pk"]] = instance.pk | ||||
|                 entry._state = BlueprintEntryState(instance) | ||||
|             elif state == BlueprintEntryDesiredState.ABSENT: | ||||
|                 instance: Model | None = serializer.instance | ||||
|                 instance: Optional[Model] = serializer.instance | ||||
|                 if instance.pk: | ||||
|                     instance.delete() | ||||
|                     self.logger.debug("Deleted model", mode=instance) | ||||
|                     self.logger.debug("deleted model", mode=instance) | ||||
|                     continue | ||||
|                 self.logger.debug("Entry to delete with no instance, skipping") | ||||
|                 self.logger.debug("entry to delete with no instance, skipping") | ||||
|         return True | ||||
|  | ||||
|     def validate(self, raise_validation_errors=False) -> tuple[bool, list[LogEvent]]: | ||||
|     def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]: | ||||
|         """Validate loaded blueprint export, ensure all models are allowed | ||||
|         and serializers have no errors""" | ||||
|         self.logger.debug("Starting blueprint import validation") | ||||
| @ -407,7 +391,9 @@ class Importer: | ||||
|         ): | ||||
|             successful = self._apply_models(raise_errors=raise_validation_errors) | ||||
|             if not successful: | ||||
|                 self.logger.warning("Blueprint validation failed") | ||||
|                 self.logger.debug("Blueprint validation failed") | ||||
|         for log in logs: | ||||
|             getattr(self.logger, log.get("log_level"))(**log) | ||||
|         self.logger.debug("Finished blueprint import validation") | ||||
|         self._import = orig_import | ||||
|         return successful, logs | ||||
|  | ||||
| @ -43,7 +43,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer): | ||||
|             LOGGER.info("Blueprint does not exist, but not required") | ||||
|             return MetaResult() | ||||
|         LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance) | ||||
|  | ||||
|         # pylint: disable=no-value-for-parameter | ||||
|         apply_blueprint(str(self.blueprint_instance.pk)) | ||||
|         return MetaResult() | ||||
|  | ||||
|  | ||||
| @ -8,15 +8,15 @@ from rest_framework.serializers import Serializer | ||||
| class BaseMetaModel(Model): | ||||
|     """Base models""" | ||||
|  | ||||
|     class Meta: | ||||
|         abstract = True | ||||
|  | ||||
|     @staticmethod | ||||
|     def serializer() -> Serializer: | ||||
|         """Serializer similar to SerializerModel, but as a static method since | ||||
|         this is an abstract model""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     class Meta: | ||||
|         abstract = True | ||||
|  | ||||
|  | ||||
| class MetaResult: | ||||
|     """Result returned by Meta Models' serializers. Empty class but we can't return none as | ||||
|  | ||||
| @ -4,6 +4,7 @@ from dataclasses import asdict, dataclass, field | ||||
| from hashlib import sha512 | ||||
| from pathlib import Path | ||||
| from sys import platform | ||||
| from typing import Optional | ||||
|  | ||||
| from dacite.core import from_dict | ||||
| from django.db import DatabaseError, InternalError, ProgrammingError | ||||
| @ -30,7 +31,6 @@ from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, E | ||||
| from authentik.blueprints.v1.importer import Importer | ||||
| from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE | ||||
| from authentik.blueprints.v1.oci import OCI_PREFIX | ||||
| from authentik.events.logs import capture_logs | ||||
| from authentik.events.models import TaskStatus | ||||
| from authentik.events.system_tasks import SystemTask, prefill_task | ||||
| from authentik.events.utils import sanitize_dict | ||||
| @ -50,14 +50,14 @@ class BlueprintFile: | ||||
|     version: int | ||||
|     hash: str | ||||
|     last_m: int | ||||
|     meta: BlueprintMetadata | None = field(default=None) | ||||
|     meta: Optional[BlueprintMetadata] = field(default=None) | ||||
|  | ||||
|  | ||||
| def start_blueprint_watcher(): | ||||
|     """Start blueprint watcher, if it's not running already.""" | ||||
|     # This function might be called twice since it's called on celery startup | ||||
|  | ||||
|     global _file_watcher_started  # noqa: PLW0603 | ||||
|     # pylint: disable=global-statement | ||||
|     global _file_watcher_started | ||||
|     if _file_watcher_started: | ||||
|         return | ||||
|     observer = Observer() | ||||
| @ -126,7 +126,7 @@ def blueprints_find() -> list[BlueprintFile]: | ||||
|         # Check if any part in the path starts with a dot and assume a hidden file | ||||
|         if any(part for part in path.parts if part.startswith(".")): | ||||
|             continue | ||||
|         with open(path, encoding="utf-8") as blueprint_file: | ||||
|         with open(path, "r", encoding="utf-8") as blueprint_file: | ||||
|             try: | ||||
|                 raw_blueprint = load(blueprint_file.read(), BlueprintLoader) | ||||
|             except YAMLError as exc: | ||||
| @ -150,7 +150,7 @@ def blueprints_find() -> list[BlueprintFile]: | ||||
|     throws=(DatabaseError, ProgrammingError, InternalError), base=SystemTask, bind=True | ||||
| ) | ||||
| @prefill_task | ||||
| def blueprints_discovery(self: SystemTask, path: str | None = None): | ||||
| def blueprints_discovery(self: SystemTask, path: Optional[str] = None): | ||||
|     """Find blueprints and check if they need to be created in the database""" | ||||
|     count = 0 | ||||
|     for blueprint in blueprints_find(): | ||||
| @ -197,7 +197,7 @@ def check_blueprint_v1_file(blueprint: BlueprintFile): | ||||
| def apply_blueprint(self: SystemTask, instance_pk: str): | ||||
|     """Apply single blueprint""" | ||||
|     self.save_on_success = False | ||||
|     instance: BlueprintInstance | None = None | ||||
|     instance: Optional[BlueprintInstance] = None | ||||
|     try: | ||||
|         instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first() | ||||
|         if not instance or not instance.enabled: | ||||
| @ -212,24 +212,23 @@ def apply_blueprint(self: SystemTask, instance_pk: str): | ||||
|         if not valid: | ||||
|             instance.status = BlueprintInstanceStatus.ERROR | ||||
|             instance.save() | ||||
|             self.set_status(TaskStatus.ERROR, *logs) | ||||
|             self.set_status(TaskStatus.ERROR, *[x["event"] for x in logs]) | ||||
|             return | ||||
|         applied = importer.apply() | ||||
|         if not applied: | ||||
|             instance.status = BlueprintInstanceStatus.ERROR | ||||
|             instance.save() | ||||
|             self.set_status(TaskStatus.ERROR, "Failed to apply") | ||||
|             return | ||||
|         with capture_logs() as logs: | ||||
|             applied = importer.apply() | ||||
|             if not applied: | ||||
|                 instance.status = BlueprintInstanceStatus.ERROR | ||||
|                 instance.save() | ||||
|                 self.set_status(TaskStatus.ERROR, *logs) | ||||
|                 return | ||||
|         instance.status = BlueprintInstanceStatus.SUCCESSFUL | ||||
|         instance.last_applied_hash = file_hash | ||||
|         instance.last_applied = now() | ||||
|         self.set_status(TaskStatus.SUCCESSFUL) | ||||
|     except ( | ||||
|         OSError, | ||||
|         DatabaseError, | ||||
|         ProgrammingError, | ||||
|         InternalError, | ||||
|         IOError, | ||||
|         BlueprintRetrievalFailed, | ||||
|         EntryInvalidError, | ||||
|     ) as exc: | ||||
|  | ||||
| @ -11,20 +11,21 @@ from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.permissions import AllowAny | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.validators import UniqueValidator | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.api.authorization import SecretKeyFilter | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.tenants.utils import get_current_tenant | ||||
|  | ||||
|  | ||||
| class FooterLinkSerializer(PassiveSerializer): | ||||
|     """Links returned in Config API""" | ||||
|  | ||||
|     href = CharField(read_only=True, allow_null=True) | ||||
|     href = CharField(read_only=True) | ||||
|     name = CharField(read_only=True) | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| """Inject brand into current request""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from typing import Callable | ||||
|  | ||||
| from django.http.request import HttpRequest | ||||
| from django.http.response import HttpResponse | ||||
| @ -20,7 +20,7 @@ class BrandMiddleware: | ||||
|     def __call__(self, request: HttpRequest) -> HttpResponse: | ||||
|         if not hasattr(request, "brand"): | ||||
|             brand = get_brand_for_request(request) | ||||
|             request.brand = brand | ||||
|             setattr(request, "brand", brand) | ||||
|             locale = brand.default_locale | ||||
|             if locale != "": | ||||
|                 activate(locale) | ||||
|  | ||||
| @ -1,21 +0,0 @@ | ||||
| # Generated by Django 5.0.4 on 2024-04-18 18:56 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_brands", "0005_tenantuuid_to_branduuid"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddIndex( | ||||
|             model_name="brand", | ||||
|             index=models.Index(fields=["domain"], name="authentik_b_domain_b9b24a_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="brand", | ||||
|             index=models.Index(fields=["default"], name="authentik_b_default_3ccf12_idx"), | ||||
|         ), | ||||
|     ] | ||||
| @ -71,7 +71,7 @@ class Brand(SerializerModel): | ||||
|         """Get default locale""" | ||||
|         try: | ||||
|             return self.attributes.get("settings", {}).get("locale", "") | ||||
|  | ||||
|         # pylint: disable=broad-except | ||||
|         except Exception as exc: | ||||
|             LOGGER.warning("Failed to get default locale", exc=exc) | ||||
|             return "" | ||||
| @ -84,7 +84,3 @@ class Brand(SerializerModel): | ||||
|     class Meta: | ||||
|         verbose_name = _("Brand") | ||||
|         verbose_name_plural = _("Brands") | ||||
|         indexes = [ | ||||
|             models.Index(fields=["domain"]), | ||||
|             models.Index(fields=["default"]), | ||||
|         ] | ||||
|  | ||||
| @ -1,8 +1,8 @@ | ||||
| """Application API Views""" | ||||
|  | ||||
| from collections.abc import Iterator | ||||
| from copy import copy | ||||
| from datetime import timedelta | ||||
| from typing import Iterator, Optional | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.db.models import QuerySet | ||||
| @ -17,18 +17,18 @@ from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodFiel | ||||
| from rest_framework.parsers import MultiPartParser | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
| from structlog.stdlib import get_logger | ||||
| from structlog.testing import capture_logs | ||||
|  | ||||
| from authentik.admin.api.metrics import CoordinateSerializer | ||||
| from authentik.api.pagination import Pagination | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.core.api.providers import ProviderSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.core.models import Application, User | ||||
| from authentik.events.logs import LogEventSerializer, capture_logs | ||||
| from authentik.events.models import EventAction | ||||
| from authentik.events.utils import sanitize_dict | ||||
| from authentik.lib.utils.file import ( | ||||
|     FilePathSerializer, | ||||
|     FileUploadSerializer, | ||||
| @ -37,19 +37,16 @@ from authentik.lib.utils.file import ( | ||||
| ) | ||||
| from authentik.policies.api.exec import PolicyTestResultSerializer | ||||
| from authentik.policies.engine import PolicyEngine | ||||
| from authentik.policies.types import CACHE_PREFIX, PolicyResult | ||||
| from authentik.policies.types import PolicyResult | ||||
| from authentik.rbac.decorators import permission_required | ||||
| from authentik.rbac.filters import ObjectFilter | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str: | ||||
| def user_app_cache_key(user_pk: str) -> str: | ||||
|     """Cache key where application list for user is saved""" | ||||
|     key = f"{CACHE_PREFIX}/app_access/{user_pk}" | ||||
|     if page_number: | ||||
|         key += f"/{page_number}" | ||||
|     return key | ||||
|     return f"goauthentik.io/core/app_access/{user_pk}" | ||||
|  | ||||
|  | ||||
| class ApplicationSerializer(ModelSerializer): | ||||
| @ -63,7 +60,7 @@ class ApplicationSerializer(ModelSerializer): | ||||
|  | ||||
|     meta_icon = ReadOnlyField(source="get_meta_icon") | ||||
|  | ||||
|     def get_launch_url(self, app: Application) -> str | None: | ||||
|     def get_launch_url(self, app: Application) -> Optional[str]: | ||||
|         """Allow formatting of launch URL""" | ||||
|         user = None | ||||
|         if "request" in self.context: | ||||
| @ -103,7 +100,8 @@ class ApplicationSerializer(ModelSerializer): | ||||
| class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|     """Application Viewset""" | ||||
|  | ||||
|     queryset = Application.objects.all().prefetch_related("provider").prefetch_related("policies") | ||||
|     # pylint: disable=no-member | ||||
|     queryset = Application.objects.all().prefetch_related("provider") | ||||
|     serializer_class = ApplicationSerializer | ||||
|     search_fields = [ | ||||
|         "name", | ||||
| @ -133,7 +131,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|         return queryset | ||||
|  | ||||
|     def _get_allowed_applications( | ||||
|         self, pagined_apps: Iterator[Application], user: User | None = None | ||||
|         self, pagined_apps: Iterator[Application], user: Optional[User] = None | ||||
|     ) -> list[Application]: | ||||
|         applications = [] | ||||
|         request = self.request._request | ||||
| @ -171,7 +169,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|             try: | ||||
|                 for_user = User.objects.filter(pk=request.query_params.get("for_user")).first() | ||||
|             except ValueError: | ||||
|                 raise ValidationError({"for_user": "for_user must be numerical"}) from None | ||||
|                 raise ValidationError({"for_user": "for_user must be numerical"}) | ||||
|             if not for_user: | ||||
|                 raise ValidationError({"for_user": "User not found"}) | ||||
|         engine = PolicyEngine(application, for_user, request) | ||||
| @ -185,9 +183,9 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|         if request.user.is_superuser: | ||||
|             log_messages = [] | ||||
|             for log in logs: | ||||
|                 if log.attributes.get("process", "") == "PolicyProcess": | ||||
|                 if log.get("process", "") == "PolicyProcess": | ||||
|                     continue | ||||
|                 log_messages.append(LogEventSerializer(log).data) | ||||
|                 log_messages.append(sanitize_dict(log)) | ||||
|             result.log_messages = log_messages | ||||
|             response = PolicyTestResultSerializer(result) | ||||
|         return Response(response.data) | ||||
| @ -217,8 +215,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|             return super().list(request) | ||||
|  | ||||
|         queryset = self._filter_queryset_for_list(self.get_queryset()) | ||||
|         paginator: Pagination = self.paginator | ||||
|         paginated_apps = paginator.paginate_queryset(queryset, request) | ||||
|         pagined_apps = self.paginate_queryset(queryset) | ||||
|  | ||||
|         if "for_user" in request.query_params: | ||||
|             try: | ||||
| @ -232,22 +229,20 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | ||||
|                     raise ValidationError({"for_user": "User not found"}) | ||||
|             except ValueError as exc: | ||||
|                 raise ValidationError from exc | ||||
|             allowed_applications = self._get_allowed_applications(paginated_apps, user=for_user) | ||||
|             allowed_applications = self._get_allowed_applications(pagined_apps, user=for_user) | ||||
|             serializer = self.get_serializer(allowed_applications, many=True) | ||||
|             return self.get_paginated_response(serializer.data) | ||||
|  | ||||
|         allowed_applications = [] | ||||
|         if not should_cache: | ||||
|             allowed_applications = self._get_allowed_applications(paginated_apps) | ||||
|             allowed_applications = self._get_allowed_applications(pagined_apps) | ||||
|         if should_cache: | ||||
|             allowed_applications = cache.get( | ||||
|                 user_app_cache_key(self.request.user.pk, paginator.page.number) | ||||
|             ) | ||||
|             allowed_applications = cache.get(user_app_cache_key(self.request.user.pk)) | ||||
|             if not allowed_applications: | ||||
|                 LOGGER.debug("Caching allowed application list", page=paginator.page.number) | ||||
|                 allowed_applications = self._get_allowed_applications(paginated_apps) | ||||
|                 LOGGER.debug("Caching allowed application list") | ||||
|                 allowed_applications = self._get_allowed_applications(pagined_apps) | ||||
|                 cache.set( | ||||
|                     user_app_cache_key(self.request.user.pk, paginator.page.number), | ||||
|                     user_app_cache_key(self.request.user.pk), | ||||
|                     allowed_applications, | ||||
|                     timeout=86400, | ||||
|                 ) | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| """AuthenticatedSessions API Viewset""" | ||||
|  | ||||
| from typing import TypedDict | ||||
| from typing import Optional, TypedDict | ||||
|  | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from guardian.utils import get_anonymous_user | ||||
| @ -8,12 +8,12 @@ from rest_framework import mixins | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.viewsets import GenericViewSet | ||||
| from ua_parser import user_agent_parser | ||||
|  | ||||
| from authentik.api.authorization import OwnerSuperuserPermissions | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
| from authentik.core.models import AuthenticatedSession | ||||
| from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR, ASNDict | ||||
| from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR, GeoIPDict | ||||
| @ -72,11 +72,11 @@ class AuthenticatedSessionSerializer(ModelSerializer): | ||||
|         """Get parsed user agent""" | ||||
|         return user_agent_parser.Parse(instance.last_user_agent) | ||||
|  | ||||
|     def get_geo_ip(self, instance: AuthenticatedSession) -> GeoIPDict | None:  # pragma: no cover | ||||
|     def get_geo_ip(self, instance: AuthenticatedSession) -> Optional[GeoIPDict]:  # pragma: no cover | ||||
|         """Get GeoIP Data""" | ||||
|         return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.last_ip) | ||||
|  | ||||
|     def get_asn(self, instance: AuthenticatedSession) -> ASNDict | None:  # pragma: no cover | ||||
|     def get_asn(self, instance: AuthenticatedSession) -> Optional[ASNDict]:  # pragma: no cover | ||||
|         """Get ASN Data""" | ||||
|         return ASN_CONTEXT_PROCESSOR.asn_dict(instance.last_ip) | ||||
|  | ||||
|  | ||||
| @ -1,28 +1,22 @@ | ||||
| """Groups API Viewset""" | ||||
|  | ||||
| from json import loads | ||||
| from typing import Optional | ||||
|  | ||||
| from django.db.models import Prefetch | ||||
| from django.http import Http404 | ||||
| from django_filters.filters import CharFilter, ModelMultipleChoiceFilter | ||||
| from django_filters.filterset import FilterSet | ||||
| from drf_spectacular.utils import ( | ||||
|     OpenApiParameter, | ||||
|     OpenApiResponse, | ||||
|     extend_schema, | ||||
|     extend_schema_field, | ||||
| ) | ||||
| from drf_spectacular.utils import OpenApiResponse, extend_schema | ||||
| from guardian.shortcuts import get_objects_for_user | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.fields import CharField, IntegerField, SerializerMethodField | ||||
| from rest_framework.fields import CharField, IntegerField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ListSerializer, ValidationError | ||||
| from rest_framework.validators import UniqueValidator | ||||
| from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer | ||||
| from authentik.core.api.utils import JSONDictField, PassiveSerializer | ||||
| from authentik.core.models import Group, User | ||||
| from authentik.rbac.api.roles import RoleSerializer | ||||
| from authentik.rbac.decorators import permission_required | ||||
| @ -52,7 +46,9 @@ class GroupSerializer(ModelSerializer): | ||||
|     """Group Serializer""" | ||||
|  | ||||
|     attributes = JSONDictField(required=False) | ||||
|     users_obj = SerializerMethodField(allow_null=True) | ||||
|     users_obj = ListSerializer( | ||||
|         child=GroupMemberSerializer(), read_only=True, source="users", required=False | ||||
|     ) | ||||
|     roles_obj = ListSerializer( | ||||
|         child=RoleSerializer(), | ||||
|         read_only=True, | ||||
| @ -63,20 +59,7 @@ class GroupSerializer(ModelSerializer): | ||||
|  | ||||
|     num_pk = IntegerField(read_only=True) | ||||
|  | ||||
|     @property | ||||
|     def _should_include_users(self) -> bool: | ||||
|         request: Request = self.context.get("request", None) | ||||
|         if not request: | ||||
|             return True | ||||
|         return str(request.query_params.get("include_users", "true")).lower() == "true" | ||||
|  | ||||
|     @extend_schema_field(GroupMemberSerializer(many=True)) | ||||
|     def get_users_obj(self, instance: Group) -> list[GroupMemberSerializer] | None: | ||||
|         if not self._should_include_users: | ||||
|             return None | ||||
|         return GroupMemberSerializer(instance.users, many=True).data | ||||
|  | ||||
|     def validate_parent(self, parent: Group | None): | ||||
|     def validate_parent(self, parent: Optional[Group]): | ||||
|         """Validate group parent (if set), ensuring the parent isn't itself""" | ||||
|         if not self.instance or not parent: | ||||
|             return parent | ||||
| @ -102,10 +85,7 @@ class GroupSerializer(ModelSerializer): | ||||
|         extra_kwargs = { | ||||
|             "users": { | ||||
|                 "default": list, | ||||
|             }, | ||||
|             # TODO: This field isn't unique on the database which is hard to backport | ||||
|             # hence we just validate the uniqueness here | ||||
|             "name": {"validators": [UniqueValidator(Group.objects.all())]}, | ||||
|             } | ||||
|         } | ||||
|  | ||||
|  | ||||
| @ -134,7 +114,7 @@ class GroupFilter(FilterSet): | ||||
|         try: | ||||
|             value = loads(value) | ||||
|         except ValueError: | ||||
|             raise ValidationError(detail="filter: failed to parse JSON") from None | ||||
|             raise ValidationError(detail="filter: failed to parse JSON") | ||||
|         if not isinstance(value, dict): | ||||
|             raise ValidationError(detail="filter: value must be key:value mapping") | ||||
|         qs = {} | ||||
| @ -151,49 +131,23 @@ class GroupFilter(FilterSet): | ||||
|         fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"] | ||||
|  | ||||
|  | ||||
| class UserAccountSerializer(PassiveSerializer): | ||||
|     """Account adding/removing operations""" | ||||
|  | ||||
|     pk = IntegerField(required=True) | ||||
|  | ||||
|  | ||||
| class GroupViewSet(UsedByMixin, ModelViewSet): | ||||
|     """Group Viewset""" | ||||
|  | ||||
|     class UserAccountSerializer(PassiveSerializer): | ||||
|         """Account adding/removing operations""" | ||||
|  | ||||
|         pk = IntegerField(required=True) | ||||
|  | ||||
|     queryset = Group.objects.none() | ||||
|     # pylint: disable=no-member | ||||
|     queryset = Group.objects.all().select_related("parent").prefetch_related("users") | ||||
|     serializer_class = GroupSerializer | ||||
|     search_fields = ["name", "is_superuser"] | ||||
|     filterset_class = GroupFilter | ||||
|     ordering = ["name"] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|         base_qs = Group.objects.all().select_related("parent").prefetch_related("roles") | ||||
|  | ||||
|         if self.serializer_class(context={"request": self.request})._should_include_users: | ||||
|             base_qs = base_qs.prefetch_related("users") | ||||
|         else: | ||||
|             base_qs = base_qs.prefetch_related( | ||||
|                 Prefetch("users", queryset=User.objects.all().only("id")) | ||||
|             ) | ||||
|  | ||||
|         return base_qs | ||||
|  | ||||
|     @extend_schema( | ||||
|         parameters=[ | ||||
|             OpenApiParameter("include_users", bool, default=True), | ||||
|         ] | ||||
|     ) | ||||
|     def list(self, request, *args, **kwargs): | ||||
|         return super().list(request, *args, **kwargs) | ||||
|  | ||||
|     @extend_schema( | ||||
|         parameters=[ | ||||
|             OpenApiParameter("include_users", bool, default=True), | ||||
|         ] | ||||
|     ) | ||||
|     def retrieve(self, request, *args, **kwargs): | ||||
|         return super().retrieve(request, *args, **kwargs) | ||||
|  | ||||
|     @permission_required("authentik_core.add_user_to_group") | ||||
|     @permission_required(None, ["authentik_core.add_user"]) | ||||
|     @extend_schema( | ||||
|         request=UserAccountSerializer, | ||||
|         responses={ | ||||
| @ -201,13 +155,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet): | ||||
|             404: OpenApiResponse(description="User not found"), | ||||
|         }, | ||||
|     ) | ||||
|     @action( | ||||
|         detail=True, | ||||
|         methods=["POST"], | ||||
|         pagination_class=None, | ||||
|         filter_backends=[], | ||||
|         permission_classes=[], | ||||
|     ) | ||||
|     @action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[]) | ||||
|     def add_user(self, request: Request, pk: str) -> Response: | ||||
|         """Add user to group""" | ||||
|         group: Group = self.get_object() | ||||
| @ -223,7 +171,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet): | ||||
|         group.users.add(user) | ||||
|         return Response(status=204) | ||||
|  | ||||
|     @permission_required("authentik_core.remove_user_from_group") | ||||
|     @permission_required(None, ["authentik_core.add_user"]) | ||||
|     @extend_schema( | ||||
|         request=UserAccountSerializer, | ||||
|         responses={ | ||||
| @ -231,13 +179,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet): | ||||
|             404: OpenApiResponse(description="User not found"), | ||||
|         }, | ||||
|     ) | ||||
|     @action( | ||||
|         detail=True, | ||||
|         methods=["POST"], | ||||
|         pagination_class=None, | ||||
|         filter_backends=[], | ||||
|         permission_classes=[], | ||||
|     ) | ||||
|     @action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[]) | ||||
|     def remove_user(self, request: Request, pk: str) -> Response: | ||||
|         """Add user to group""" | ||||
|         group: Group = self.get_object() | ||||
|  | ||||
| @ -1,79 +0,0 @@ | ||||
| """API Utilities""" | ||||
|  | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.fields import ( | ||||
|     BooleanField, | ||||
|     CharField, | ||||
| ) | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
|  | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
| from authentik.lib.utils.reflection import all_subclasses | ||||
|  | ||||
|  | ||||
| class TypeCreateSerializer(PassiveSerializer): | ||||
|     """Types of an object that can be created""" | ||||
|  | ||||
|     name = CharField(required=True) | ||||
|     description = CharField(required=True) | ||||
|     component = CharField(required=True) | ||||
|     model_name = CharField(required=True) | ||||
|  | ||||
|     icon_url = CharField(required=False) | ||||
|     requires_enterprise = BooleanField(default=False) | ||||
|  | ||||
|  | ||||
| class CreatableType: | ||||
|     """Class to inherit from to mark a model as creatable, even if the model itself is marked | ||||
|     as abstract""" | ||||
|  | ||||
|  | ||||
| class NonCreatableType: | ||||
|     """Class to inherit from to mark a model as non-creatable even if it is not abstract""" | ||||
|  | ||||
|  | ||||
| class TypesMixin: | ||||
|     """Mixin which adds an API endpoint to list all possible types that can be created""" | ||||
|  | ||||
|     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||
|     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||
|     def types(self, request: Request, additional: list[dict] | None = None) -> Response: | ||||
|         """Get all creatable types""" | ||||
|         data = [] | ||||
|         for subclass in all_subclasses(self.queryset.model): | ||||
|             instance = None | ||||
|             if subclass._meta.abstract: | ||||
|                 if not issubclass(subclass, CreatableType): | ||||
|                     continue | ||||
|                 # Circumvent the django protection for not being able to instantiate | ||||
|                 # abstract models. We need a model instance to access .component | ||||
|                 # and further down .icon_url | ||||
|                 instance = subclass.__new__(subclass) | ||||
|                 # Django re-sets abstract = False so we need to override that | ||||
|                 instance.Meta.abstract = True | ||||
|             else: | ||||
|                 if issubclass(subclass, NonCreatableType): | ||||
|                     continue | ||||
|                 instance = subclass() | ||||
|             try: | ||||
|                 data.append( | ||||
|                     { | ||||
|                         "name": subclass._meta.verbose_name, | ||||
|                         "description": subclass.__doc__, | ||||
|                         "component": instance.component, | ||||
|                         "model_name": subclass._meta.model_name, | ||||
|                         "icon_url": getattr(instance, "icon_url", None), | ||||
|                         "requires_enterprise": isinstance( | ||||
|                             subclass._meta.app_config, EnterpriseConfig | ||||
|                         ), | ||||
|                     } | ||||
|                 ) | ||||
|             except NotImplementedError: | ||||
|                 continue | ||||
|         if additional: | ||||
|             data.extend(additional) | ||||
|         data = sorted(data, key=lambda x: x["name"]) | ||||
|         return Response(TypeCreateSerializer(data, many=True).data) | ||||
| @ -8,23 +8,19 @@ from guardian.shortcuts import get_objects_for_user | ||||
| from rest_framework import mixins | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.exceptions import PermissionDenied | ||||
| from rest_framework.fields import BooleanField, CharField, SerializerMethodField | ||||
| from rest_framework.relations import PrimaryKeyRelatedField | ||||
| from rest_framework.fields import BooleanField, CharField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ModelSerializer, SerializerMethodField | ||||
| from rest_framework.viewsets import GenericViewSet | ||||
| 
 | ||||
| from authentik.blueprints.api import ManagedSerializer | ||||
| from authentik.core.api.object_types import TypesMixin | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ( | ||||
|     MetaNameSerializer, | ||||
|     ModelSerializer, | ||||
|     PassiveSerializer, | ||||
| ) | ||||
| from authentik.core.api.utils import MetaNameSerializer, PassiveSerializer, TypeCreateSerializer | ||||
| from authentik.core.expression.evaluator import PropertyMappingEvaluator | ||||
| from authentik.core.models import Group, PropertyMapping, User | ||||
| from authentik.core.models import PropertyMapping | ||||
| from authentik.events.utils import sanitize_item | ||||
| from authentik.lib.utils.reflection import all_subclasses | ||||
| from authentik.policies.api.exec import PolicyTestSerializer | ||||
| from authentik.rbac.decorators import permission_required | ||||
| 
 | ||||
| @ -68,7 +64,6 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri | ||||
| 
 | ||||
| 
 | ||||
| class PropertyMappingViewSet( | ||||
|     TypesMixin, | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.DestroyModelMixin, | ||||
|     UsedByMixin, | ||||
| @ -77,15 +72,7 @@ class PropertyMappingViewSet( | ||||
| ): | ||||
|     """PropertyMapping Viewset""" | ||||
| 
 | ||||
|     class PropertyMappingTestSerializer(PolicyTestSerializer): | ||||
|         """Test property mapping execution for a user/group with context""" | ||||
| 
 | ||||
|         user = PrimaryKeyRelatedField(queryset=User.objects.all(), required=False, allow_null=True) | ||||
|         group = PrimaryKeyRelatedField( | ||||
|             queryset=Group.objects.all(), required=False, allow_null=True | ||||
|         ) | ||||
| 
 | ||||
|     queryset = PropertyMapping.objects.select_subclasses() | ||||
|     queryset = PropertyMapping.objects.none() | ||||
|     serializer_class = PropertyMappingSerializer | ||||
|     search_fields = [ | ||||
|         "name", | ||||
| @ -93,9 +80,29 @@ class PropertyMappingViewSet( | ||||
|     filterset_fields = {"managed": ["isnull"]} | ||||
|     ordering = ["name"] | ||||
| 
 | ||||
|     def get_queryset(self):  # pragma: no cover | ||||
|         return PropertyMapping.objects.select_subclasses() | ||||
| 
 | ||||
|     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||
|     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||
|     def types(self, request: Request) -> Response: | ||||
|         """Get all creatable property-mapping types""" | ||||
|         data = [] | ||||
|         for subclass in all_subclasses(self.queryset.model): | ||||
|             subclass: PropertyMapping | ||||
|             data.append( | ||||
|                 { | ||||
|                     "name": subclass._meta.verbose_name, | ||||
|                     "description": subclass.__doc__, | ||||
|                     "component": subclass().component, | ||||
|                     "model_name": subclass._meta.model_name, | ||||
|                 } | ||||
|             ) | ||||
|         return Response(TypeCreateSerializer(data, many=True).data) | ||||
| 
 | ||||
|     @permission_required("authentik_core.view_propertymapping") | ||||
|     @extend_schema( | ||||
|         request=PropertyMappingTestSerializer(), | ||||
|         request=PolicyTestSerializer(), | ||||
|         responses={ | ||||
|             200: PropertyMappingTestResultSerializer, | ||||
|             400: OpenApiResponse(description="Invalid parameters"), | ||||
| @ -113,43 +120,33 @@ class PropertyMappingViewSet( | ||||
|         """Test Property Mapping""" | ||||
|         _mapping: PropertyMapping = self.get_object() | ||||
|         # Use `get_subclass` to get correct class and correct `.evaluate` implementation | ||||
|         mapping: PropertyMapping = PropertyMapping.objects.get_subclass(pk=_mapping.pk) | ||||
|         mapping = PropertyMapping.objects.get_subclass(pk=_mapping.pk) | ||||
|         # FIXME: when we separate policy mappings between ones for sources | ||||
|         # and ones for providers, we need to make the user field optional for the source mapping | ||||
|         test_params = self.PropertyMappingTestSerializer(data=request.data) | ||||
|         test_params = PolicyTestSerializer(data=request.data) | ||||
|         if not test_params.is_valid(): | ||||
|             return Response(test_params.errors, status=400) | ||||
| 
 | ||||
|         format_result = str(request.GET.get("format_result", "false")).lower() == "true" | ||||
| 
 | ||||
|         context: dict = test_params.validated_data.get("context", {}) | ||||
|         context.setdefault("user", None) | ||||
| 
 | ||||
|         if user := test_params.validated_data.get("user"): | ||||
|             # User permission check, only allow mapping testing for users that are readable | ||||
|             users = get_objects_for_user(request.user, "authentik_core.view_user").filter( | ||||
|                 pk=user.pk | ||||
|             ) | ||||
|             if not users.exists(): | ||||
|                 raise PermissionDenied() | ||||
|             context["user"] = user | ||||
|         if group := test_params.validated_data.get("group"): | ||||
|             # Group permission check, only allow mapping testing for groups that are readable | ||||
|             groups = get_objects_for_user(request.user, "authentik_core.view_group").filter( | ||||
|                 pk=group.pk | ||||
|             ) | ||||
|             if not groups.exists(): | ||||
|                 raise PermissionDenied() | ||||
|             context["group"] = group | ||||
|         context["request"] = self.request | ||||
|         # User permission check, only allow mapping testing for users that are readable | ||||
|         users = get_objects_for_user(request.user, "authentik_core.view_user").filter( | ||||
|             pk=test_params.validated_data["user"].pk | ||||
|         ) | ||||
|         if not users.exists(): | ||||
|             raise PermissionDenied() | ||||
| 
 | ||||
|         response_data = {"successful": True, "result": ""} | ||||
|         try: | ||||
|             result = mapping.evaluate(**context) | ||||
|             result = mapping.evaluate( | ||||
|                 users.first(), | ||||
|                 self.request, | ||||
|                 **test_params.validated_data.get("context", {}), | ||||
|             ) | ||||
|             response_data["result"] = dumps( | ||||
|                 sanitize_item(result), indent=(4 if format_result else None) | ||||
|             ) | ||||
|         except Exception as exc: | ||||
|         except Exception as exc:  # pylint: disable=broad-except | ||||
|             response_data["result"] = str(exc) | ||||
|             response_data["successful"] = False | ||||
|         response = PropertyMappingTestResultSerializer(response_data) | ||||
| @ -5,14 +5,20 @@ from django.db.models.query import Q | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from django_filters.filters import BooleanFilter | ||||
| from django_filters.filterset import FilterSet | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from rest_framework import mixins | ||||
| from rest_framework.fields import ReadOnlyField, SerializerMethodField | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.fields import ReadOnlyField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ModelSerializer, SerializerMethodField | ||||
| from rest_framework.viewsets import GenericViewSet | ||||
|  | ||||
| from authentik.core.api.object_types import TypesMixin | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import MetaNameSerializer, ModelSerializer | ||||
| from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer | ||||
| from authentik.core.models import Provider | ||||
| from authentik.enterprise.apps import EnterpriseConfig | ||||
| from authentik.lib.utils.reflection import all_subclasses | ||||
|  | ||||
|  | ||||
| class ProviderSerializer(ModelSerializer, MetaNameSerializer): | ||||
| @ -57,12 +63,8 @@ class ProviderFilter(FilterSet): | ||||
|     """Filter for providers""" | ||||
|  | ||||
|     application__isnull = BooleanFilter(method="filter_application__isnull") | ||||
|     backchannel = BooleanFilter( | ||||
|         method="filter_backchannel", | ||||
|         label=_( | ||||
|             "When not set all providers are returned. When set to true, only backchannel " | ||||
|             "providers are returned. When set to false, backchannel providers are excluded" | ||||
|         ), | ||||
|     backchannel_only = BooleanFilter( | ||||
|         method="filter_backchannel_only", | ||||
|     ) | ||||
|  | ||||
|     def filter_application__isnull(self, queryset: QuerySet, name, value): | ||||
| @ -73,14 +75,12 @@ class ProviderFilter(FilterSet): | ||||
|             | Q(application__isnull=value) | ||||
|         ) | ||||
|  | ||||
|     def filter_backchannel(self, queryset: QuerySet, name, value): | ||||
|         """By default all providers are returned. When set to true, only backchannel providers are | ||||
|         returned. When set to false, backchannel providers are excluded""" | ||||
|     def filter_backchannel_only(self, queryset: QuerySet, name, value): | ||||
|         """Only return backchannel providers""" | ||||
|         return queryset.filter(is_backchannel=value) | ||||
|  | ||||
|  | ||||
| class ProviderViewSet( | ||||
|     TypesMixin, | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.DestroyModelMixin, | ||||
|     UsedByMixin, | ||||
| @ -99,3 +99,31 @@ class ProviderViewSet( | ||||
|  | ||||
|     def get_queryset(self):  # pragma: no cover | ||||
|         return Provider.objects.select_subclasses() | ||||
|  | ||||
|     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||
|     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||
|     def types(self, request: Request) -> Response: | ||||
|         """Get all creatable provider types""" | ||||
|         data = [] | ||||
|         for subclass in all_subclasses(self.queryset.model): | ||||
|             subclass: Provider | ||||
|             if subclass._meta.abstract: | ||||
|                 continue | ||||
|             data.append( | ||||
|                 { | ||||
|                     "name": subclass._meta.verbose_name, | ||||
|                     "description": subclass.__doc__, | ||||
|                     "component": subclass().component, | ||||
|                     "model_name": subclass._meta.model_name, | ||||
|                     "requires_enterprise": isinstance(subclass._meta.app_config, EnterpriseConfig), | ||||
|                 } | ||||
|             ) | ||||
|         data.append( | ||||
|             { | ||||
|                 "name": _("SAML Provider from Metadata"), | ||||
|                 "description": _("Create a SAML Provider by importing its Metadata."), | ||||
|                 "component": "ak-provider-saml-import-form", | ||||
|                 "model_name": "", | ||||
|             } | ||||
|         ) | ||||
|         return Response(TypeCreateSerializer(data, many=True).data) | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| """Source API Views""" | ||||
|  | ||||
| from collections.abc import Iterable | ||||
| from typing import Iterable | ||||
|  | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from drf_spectacular.utils import OpenApiResponse, extend_schema | ||||
| @ -11,14 +11,14 @@ from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.parsers import MultiPartParser | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.viewsets import GenericViewSet | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.core.api.object_types import TypesMixin | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import MetaNameSerializer, ModelSerializer | ||||
| from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer | ||||
| from authentik.core.models import Source, UserSourceConnection | ||||
| from authentik.core.types import UserSettingSerializer | ||||
| from authentik.lib.utils.file import ( | ||||
| @ -27,6 +27,7 @@ from authentik.lib.utils.file import ( | ||||
|     set_file, | ||||
|     set_file_url, | ||||
| ) | ||||
| from authentik.lib.utils.reflection import all_subclasses | ||||
| from authentik.policies.engine import PolicyEngine | ||||
| from authentik.rbac.decorators import permission_required | ||||
|  | ||||
| @ -73,7 +74,6 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer): | ||||
|  | ||||
|  | ||||
| class SourceViewSet( | ||||
|     TypesMixin, | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.DestroyModelMixin, | ||||
|     UsedByMixin, | ||||
| @ -132,6 +132,30 @@ class SourceViewSet( | ||||
|         source: Source = self.get_object() | ||||
|         return set_file_url(request, source, "icon") | ||||
|  | ||||
|     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||
|     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||
|     def types(self, request: Request) -> Response: | ||||
|         """Get all creatable source types""" | ||||
|         data = [] | ||||
|         for subclass in all_subclasses(self.queryset.model): | ||||
|             subclass: Source | ||||
|             component = "" | ||||
|             if len(subclass.__subclasses__()) > 0: | ||||
|                 continue | ||||
|             if subclass._meta.abstract: | ||||
|                 component = subclass.__bases__[0]().component | ||||
|             else: | ||||
|                 component = subclass().component | ||||
|             data.append( | ||||
|                 { | ||||
|                     "name": subclass._meta.verbose_name, | ||||
|                     "description": subclass.__doc__, | ||||
|                     "component": component, | ||||
|                     "model_name": subclass._meta.model_name, | ||||
|                 } | ||||
|             ) | ||||
|         return Response(TypeCreateSerializer(data, many=True).data) | ||||
|  | ||||
|     @extend_schema(responses={200: UserSettingSerializer(many=True)}) | ||||
|     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||
|     def user_settings(self, request: Request) -> Response: | ||||
|  | ||||
| @ -2,7 +2,6 @@ | ||||
|  | ||||
| from typing import Any | ||||
|  | ||||
| from django.utils.timezone import now | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer | ||||
| from guardian.shortcuts import assign_perm, get_anonymous_user | ||||
| @ -12,6 +11,7 @@ from rest_framework.fields import CharField | ||||
| from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.api.authorization import OwnerSuperuserPermissions | ||||
| @ -19,18 +19,10 @@ from authentik.blueprints.api import ManagedSerializer | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.users import UserSerializer | ||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | ||||
| from authentik.core.models import ( | ||||
|     USER_ATTRIBUTE_TOKEN_EXPIRING, | ||||
|     USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME, | ||||
|     Token, | ||||
|     TokenIntents, | ||||
|     User, | ||||
|     default_token_duration, | ||||
| ) | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.events.utils import model_to_dict | ||||
| from authentik.lib.utils.time import timedelta_from_string | ||||
| from authentik.rbac.decorators import permission_required | ||||
|  | ||||
|  | ||||
| @ -44,13 +36,6 @@ class TokenSerializer(ManagedSerializer, ModelSerializer): | ||||
|         if SERIALIZER_CONTEXT_BLUEPRINT in self.context: | ||||
|             self.fields["key"] = CharField(required=False) | ||||
|  | ||||
|     def validate_user(self, user: User): | ||||
|         """Ensure user of token cannot be changed""" | ||||
|         if self.instance and self.instance.user_id: | ||||
|             if user.pk != self.instance.user_id: | ||||
|                 raise ValidationError("User cannot be changed") | ||||
|         return user | ||||
|  | ||||
|     def validate(self, attrs: dict[Any, str]) -> dict[Any, str]: | ||||
|         """Ensure only API or App password tokens are created.""" | ||||
|         request: Request = self.context.get("request") | ||||
| @ -64,32 +49,6 @@ class TokenSerializer(ManagedSerializer, ModelSerializer): | ||||
|         attrs.setdefault("intent", TokenIntents.INTENT_API) | ||||
|         if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]: | ||||
|             raise ValidationError({"intent": f"Invalid intent {attrs.get('intent')}"}) | ||||
|  | ||||
|         if attrs.get("intent") == TokenIntents.INTENT_APP_PASSWORD: | ||||
|             # user IS in attrs | ||||
|             user: User = attrs.get("user") | ||||
|             max_token_lifetime = user.group_attributes(request).get( | ||||
|                 USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME, | ||||
|             ) | ||||
|             max_token_lifetime_dt = default_token_duration() | ||||
|             if max_token_lifetime is not None: | ||||
|                 try: | ||||
|                     max_token_lifetime_dt = now() + timedelta_from_string(max_token_lifetime) | ||||
|                 except ValueError: | ||||
|                     pass | ||||
|  | ||||
|             if "expires" in attrs and attrs.get("expires") > max_token_lifetime_dt: | ||||
|                 raise ValidationError( | ||||
|                     { | ||||
|                         "expires": ( | ||||
|                             f"Token expires exceeds maximum lifetime ({max_token_lifetime_dt} UTC)." | ||||
|                         ) | ||||
|                     } | ||||
|                 ) | ||||
|         elif attrs.get("intent") == TokenIntents.INTENT_API: | ||||
|             # For API tokens, expires cannot be overridden | ||||
|             attrs["expires"] = default_token_duration() | ||||
|  | ||||
|         return attrs | ||||
|  | ||||
|     class Meta: | ||||
|  | ||||
| @ -65,7 +65,7 @@ class TransactionApplicationSerializer(PassiveSerializer): | ||||
|                 raise ValidationError("Invalid provider model") | ||||
|             self._provider_model = model | ||||
|         except LookupError: | ||||
|             raise ValidationError("Invalid provider model") from None | ||||
|             raise ValidationError("Invalid provider model") | ||||
|         return fq_model_name | ||||
|  | ||||
|     def validate(self, attrs: dict) -> dict: | ||||
| @ -106,7 +106,7 @@ class TransactionApplicationSerializer(PassiveSerializer): | ||||
|                 { | ||||
|                     exc.entry_id: exc.validation_error.detail, | ||||
|                 } | ||||
|             ) from None | ||||
|             ) | ||||
|         return blueprint | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -39,12 +39,12 @@ def get_delete_action(manager: Manager) -> str: | ||||
|     """Get the delete action from the Foreign key, falls back to cascade""" | ||||
|     if hasattr(manager, "field"): | ||||
|         if manager.field.remote_field.on_delete.__name__ == SET_NULL.__name__: | ||||
|             return DeleteAction.SET_NULL.value | ||||
|             return DeleteAction.SET_NULL.name | ||||
|         if manager.field.remote_field.on_delete.__name__ == SET_DEFAULT.__name__: | ||||
|             return DeleteAction.SET_DEFAULT.value | ||||
|             return DeleteAction.SET_DEFAULT.name | ||||
|     if hasattr(manager, "source_field"): | ||||
|         return DeleteAction.CASCADE_MANY.value | ||||
|     return DeleteAction.CASCADE.value | ||||
|         return DeleteAction.CASCADE_MANY.name | ||||
|     return DeleteAction.CASCADE.name | ||||
|  | ||||
|  | ||||
| class UsedByMixin: | ||||
| @ -54,6 +54,7 @@ class UsedByMixin: | ||||
|         responses={200: UsedBySerializer(many=True)}, | ||||
|     ) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     # pylint: disable=too-many-locals | ||||
|     def used_by(self, request: Request, *args, **kwargs) -> Response: | ||||
|         """Get a list of all objects that use this object""" | ||||
|         model: Model = self.get_object() | ||||
|  | ||||
| @ -2,7 +2,7 @@ | ||||
|  | ||||
| from datetime import timedelta | ||||
| from json import loads | ||||
| from typing import Any | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from django.contrib.auth import update_session_auth_hash | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| @ -40,6 +40,7 @@ from rest_framework.serializers import ( | ||||
|     BooleanField, | ||||
|     DateTimeField, | ||||
|     ListSerializer, | ||||
|     ModelSerializer, | ||||
|     PrimaryKeyRelatedField, | ||||
|     ValidationError, | ||||
| ) | ||||
| @ -51,12 +52,7 @@ from authentik.admin.api.metrics import CoordinateSerializer | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import ( | ||||
|     JSONDictField, | ||||
|     LinkSerializer, | ||||
|     ModelSerializer, | ||||
|     PassiveSerializer, | ||||
| ) | ||||
| from authentik.core.api.utils import JSONDictField, LinkSerializer, PassiveSerializer | ||||
| from authentik.core.middleware import ( | ||||
|     SESSION_KEY_IMPERSONATE_ORIGINAL_USER, | ||||
|     SESSION_KEY_IMPERSONATE_USER, | ||||
| @ -89,7 +85,7 @@ class UserGroupSerializer(ModelSerializer): | ||||
|     """Simplified Group Serializer for user's groups""" | ||||
|  | ||||
|     attributes = JSONDictField(required=False) | ||||
|     parent_name = CharField(source="parent.name", read_only=True, allow_null=True) | ||||
|     parent_name = CharField(source="parent.name", read_only=True) | ||||
|  | ||||
|     class Meta: | ||||
|         model = Group | ||||
| @ -117,26 +113,13 @@ class UserSerializer(ModelSerializer): | ||||
|         queryset=Group.objects.all().order_by("name"), | ||||
|         default=list, | ||||
|     ) | ||||
|     groups_obj = SerializerMethodField(allow_null=True) | ||||
|     groups_obj = ListSerializer(child=UserGroupSerializer(), read_only=True, source="ak_groups") | ||||
|     uid = CharField(read_only=True) | ||||
|     username = CharField( | ||||
|         max_length=150, | ||||
|         validators=[UniqueValidator(queryset=User.objects.all().order_by("username"))], | ||||
|     ) | ||||
|  | ||||
|     @property | ||||
|     def _should_include_groups(self) -> bool: | ||||
|         request: Request = self.context.get("request", None) | ||||
|         if not request: | ||||
|             return True | ||||
|         return str(request.query_params.get("include_groups", "true")).lower() == "true" | ||||
|  | ||||
|     @extend_schema_field(UserGroupSerializer(many=True)) | ||||
|     def get_groups_obj(self, instance: User) -> list[UserGroupSerializer] | None: | ||||
|         if not self._should_include_groups: | ||||
|             return None | ||||
|         return UserGroupSerializer(instance.ak_groups, many=True).data | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         super().__init__(*args, **kwargs) | ||||
|         if SERIALIZER_CONTEXT_BLUEPRINT in self.context: | ||||
| @ -159,7 +142,7 @@ class UserSerializer(ModelSerializer): | ||||
|         self._set_password(instance, password) | ||||
|         return instance | ||||
|  | ||||
|     def _set_password(self, instance: User, password: str | None): | ||||
|     def _set_password(self, instance: User, password: Optional[str]): | ||||
|         """Set password of user if we're in a blueprint context, and if it's an empty | ||||
|         string then use an unusable password""" | ||||
|         if SERIALIZER_CONTEXT_BLUEPRINT in self.context and password: | ||||
| @ -171,7 +154,7 @@ class UserSerializer(ModelSerializer): | ||||
|  | ||||
|     def get_avatar(self, user: User) -> str: | ||||
|         """User's avatar, either a http/https URL or a data URI""" | ||||
|         return get_avatar(user, self.context.get("request")) | ||||
|         return get_avatar(user, self.context["request"]) | ||||
|  | ||||
|     def validate_path(self, path: str) -> str: | ||||
|         """Validate path""" | ||||
| @ -235,7 +218,7 @@ class UserSelfSerializer(ModelSerializer): | ||||
|  | ||||
|     def get_avatar(self, user: User) -> str: | ||||
|         """User's avatar, either a http/https URL or a data URI""" | ||||
|         return get_avatar(user, self.context.get("request")) | ||||
|         return get_avatar(user, self.context["request"]) | ||||
|  | ||||
|     @extend_schema_field( | ||||
|         ListSerializer( | ||||
| @ -375,7 +358,7 @@ class UsersFilter(FilterSet): | ||||
|         try: | ||||
|             value = loads(value) | ||||
|         except ValueError: | ||||
|             raise ValidationError(detail="filter: failed to parse JSON") from None | ||||
|             raise ValidationError(detail="filter: failed to parse JSON") | ||||
|         if not isinstance(value, dict): | ||||
|             raise ValidationError(detail="filter: value must be key:value mapping") | ||||
|         qs = {} | ||||
| @ -411,28 +394,18 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|     search_fields = ["username", "name", "is_active", "email", "uuid"] | ||||
|     filterset_class = UsersFilter | ||||
|  | ||||
|     def get_queryset(self): | ||||
|         base_qs = User.objects.all().exclude_anonymous() | ||||
|         if self.serializer_class(context={"request": self.request})._should_include_groups: | ||||
|             base_qs = base_qs.prefetch_related("ak_groups") | ||||
|         return base_qs | ||||
|     def get_queryset(self):  # pragma: no cover | ||||
|         return User.objects.all().exclude_anonymous().prefetch_related("ak_groups") | ||||
|  | ||||
|     @extend_schema( | ||||
|         parameters=[ | ||||
|             OpenApiParameter("include_groups", bool, default=True), | ||||
|         ] | ||||
|     ) | ||||
|     def list(self, request, *args, **kwargs): | ||||
|         return super().list(request, *args, **kwargs) | ||||
|  | ||||
|     def _create_recovery_link(self) -> tuple[str, Token]: | ||||
|     def _create_recovery_link(self) -> tuple[Optional[str], Optional[Token]]: | ||||
|         """Create a recovery link (when the current brand has a recovery flow set), | ||||
|         that can either be shown to an admin or sent to the user directly""" | ||||
|         brand: Brand = self.request._request.brand | ||||
|         # Check that there is a recovery flow, if not return an error | ||||
|         flow = brand.flow_recovery | ||||
|         if not flow: | ||||
|             raise ValidationError({"non_field_errors": "No recovery flow set."}) | ||||
|             LOGGER.debug("No recovery flow set") | ||||
|             return None, None | ||||
|         user: User = self.get_object() | ||||
|         planner = FlowPlanner(flow) | ||||
|         planner.allow_empty_flows = True | ||||
| @ -444,9 +417,8 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|                 }, | ||||
|             ) | ||||
|         except FlowNonApplicableException: | ||||
|             raise ValidationError( | ||||
|                 {"non_field_errors": "Recovery flow not applicable to user"} | ||||
|             ) from None | ||||
|             LOGGER.warning("Recovery flow not applicable to user") | ||||
|             return None, None | ||||
|         token, __ = FlowToken.objects.update_or_create( | ||||
|             identifier=f"{user.uid}-password-reset", | ||||
|             defaults={ | ||||
| @ -591,13 +563,16 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|     @extend_schema( | ||||
|         responses={ | ||||
|             "200": LinkSerializer(many=False), | ||||
|             "404": LinkSerializer(many=False), | ||||
|         }, | ||||
|         request=None, | ||||
|     ) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"]) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     def recovery(self, request: Request, pk: int) -> Response: | ||||
|         """Create a temporary link that a user can use to recover their accounts""" | ||||
|         link, _ = self._create_recovery_link() | ||||
|         if not link: | ||||
|             LOGGER.debug("Couldn't create token") | ||||
|             return Response({"link": ""}, status=404) | ||||
|         return Response({"link": link}) | ||||
|  | ||||
|     @permission_required("authentik_core.reset_user_password") | ||||
| @ -612,28 +587,31 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|         ], | ||||
|         responses={ | ||||
|             "204": OpenApiResponse(description="Successfully sent recover email"), | ||||
|             "404": OpenApiResponse(description="Bad request"), | ||||
|         }, | ||||
|         request=None, | ||||
|     ) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"]) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     def recovery_email(self, request: Request, pk: int) -> Response: | ||||
|         """Create a temporary link that a user can use to recover their accounts""" | ||||
|         for_user: User = self.get_object() | ||||
|         if for_user.email == "": | ||||
|             LOGGER.debug("User doesn't have an email address") | ||||
|             raise ValidationError({"non_field_errors": "User does not have an email address set."}) | ||||
|             return Response(status=404) | ||||
|         link, token = self._create_recovery_link() | ||||
|         if not link: | ||||
|             LOGGER.debug("Couldn't create token") | ||||
|             return Response(status=404) | ||||
|         # Lookup the email stage to assure the current user can access it | ||||
|         stages = get_objects_for_user( | ||||
|             request.user, "authentik_stages_email.view_emailstage" | ||||
|         ).filter(pk=request.query_params.get("email_stage")) | ||||
|         if not stages.exists(): | ||||
|             LOGGER.debug("Email stage does not exist/user has no permissions") | ||||
|             raise ValidationError({"non_field_errors": "Email stage does not exist."}) | ||||
|             return Response(status=404) | ||||
|         email_stage: EmailStage = stages.first() | ||||
|         message = TemplateEmailMessage( | ||||
|             subject=_(email_stage.subject), | ||||
|             to=[(for_user.name, for_user.email)], | ||||
|             to=[for_user.email], | ||||
|             template_name=email_stage.template, | ||||
|             language=for_user.locale(request), | ||||
|             template_context={ | ||||
|  | ||||
| @ -6,19 +6,8 @@ from django.db.models import Model | ||||
| from drf_spectacular.extensions import OpenApiSerializerFieldExtension | ||||
| from drf_spectacular.plumbing import build_basic_type | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from rest_framework.fields import ( | ||||
|     CharField, | ||||
|     IntegerField, | ||||
|     JSONField, | ||||
|     SerializerMethodField, | ||||
| ) | ||||
| from rest_framework.serializers import ModelSerializer as BaseModelSerializer | ||||
| from rest_framework.serializers import ( | ||||
|     Serializer, | ||||
|     ValidationError, | ||||
|     model_meta, | ||||
|     raise_errors_on_nested_writes, | ||||
| ) | ||||
| from rest_framework.fields import BooleanField, CharField, IntegerField, JSONField | ||||
| from rest_framework.serializers import Serializer, SerializerMethodField, ValidationError | ||||
|  | ||||
|  | ||||
| def is_dict(value: Any): | ||||
| @ -28,39 +17,6 @@ def is_dict(value: Any): | ||||
|     raise ValidationError("Value must be a dictionary, and not have any duplicate keys.") | ||||
|  | ||||
|  | ||||
| class ModelSerializer(BaseModelSerializer): | ||||
|  | ||||
|     def update(self, instance: Model, validated_data): | ||||
|         raise_errors_on_nested_writes("update", self, validated_data) | ||||
|         info = model_meta.get_field_info(instance) | ||||
|  | ||||
|         # Simply set each attribute on the instance, and then save it. | ||||
|         # Note that unlike `.create()` we don't need to treat many-to-many | ||||
|         # relationships as being a special case. During updates we already | ||||
|         # have an instance pk for the relationships to be associated with. | ||||
|         m2m_fields = [] | ||||
|         for attr, value in validated_data.items(): | ||||
|             if attr in info.relations and info.relations[attr].to_many: | ||||
|                 m2m_fields.append((attr, value)) | ||||
|             else: | ||||
|                 setattr(instance, attr, value) | ||||
|  | ||||
|         instance.save() | ||||
|  | ||||
|         # Note that many-to-many fields are set after updating instance. | ||||
|         # Setting m2m fields triggers signals which could potentially change | ||||
|         # updated instance and we do not want it to collide with .update() | ||||
|         for attr, value in m2m_fields: | ||||
|             field = getattr(instance, attr) | ||||
|             # We can't check for inheritance here as m2m managers are generated dynamically | ||||
|             if field.__class__.__name__ == "RelatedManager": | ||||
|                 field.set(value, bulk=False) | ||||
|             else: | ||||
|                 field.set(value) | ||||
|  | ||||
|         return instance | ||||
|  | ||||
|  | ||||
| class JSONDictField(JSONField): | ||||
|     """JSON Field which only allows dictionaries""" | ||||
|  | ||||
| @ -112,6 +68,16 @@ class MetaNameSerializer(PassiveSerializer): | ||||
|         return f"{obj._meta.app_label}.{obj._meta.model_name}" | ||||
|  | ||||
|  | ||||
| class TypeCreateSerializer(PassiveSerializer): | ||||
|     """Types of an object that can be created""" | ||||
|  | ||||
|     name = CharField(required=True) | ||||
|     description = CharField(required=True) | ||||
|     component = CharField(required=True) | ||||
|     model_name = CharField(required=True) | ||||
|     requires_enterprise = BooleanField(default=False) | ||||
|  | ||||
|  | ||||
| class CacheSerializer(PassiveSerializer): | ||||
|     """Generic cache stats for an object""" | ||||
|  | ||||
|  | ||||
| @ -14,16 +14,14 @@ class AuthentikCoreConfig(ManagedAppConfig): | ||||
|     mountpoint = "" | ||||
|     default = True | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_global | ||||
|     def debug_worker_hook(self): | ||||
|     def reconcile_global_debug_worker_hook(self): | ||||
|         """Dispatch startup tasks inline when debugging""" | ||||
|         if settings.DEBUG: | ||||
|             from authentik.root.celery import worker_ready_hook | ||||
|  | ||||
|             worker_ready_hook() | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_tenant | ||||
|     def source_inbuilt(self): | ||||
|     def reconcile_tenant_source_inbuilt(self): | ||||
|         """Reconcile inbuilt source""" | ||||
|         from authentik.core.models import Source | ||||
|  | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| """Authenticate with tokens""" | ||||
|  | ||||
| from typing import Any | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from django.contrib.auth.backends import ModelBackend | ||||
| from django.http.request import HttpRequest | ||||
| @ -16,24 +16,23 @@ class InbuiltBackend(ModelBackend): | ||||
|     """Inbuilt backend""" | ||||
|  | ||||
|     def authenticate( | ||||
|         self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any | ||||
|     ) -> User | None: | ||||
|         self, request: HttpRequest, username: Optional[str], password: Optional[str], **kwargs: Any | ||||
|     ) -> Optional[User]: | ||||
|         user = super().authenticate(request, username=username, password=password, **kwargs) | ||||
|         if not user: | ||||
|             return None | ||||
|         self.set_method("password", request) | ||||
|         return user | ||||
|  | ||||
|     def set_method(self, method: str, request: HttpRequest | None, **kwargs): | ||||
|     def set_method(self, method: str, request: Optional[HttpRequest], **kwargs): | ||||
|         """Set method data on current flow, if possbiel""" | ||||
|         if not request: | ||||
|             return | ||||
|         # Since we can't directly pass other variables to signals, and we want to log the method | ||||
|         # and the token used, we assume we're running in a flow and set a variable in the context | ||||
|         flow_plan: FlowPlan = request.session.get(SESSION_KEY_PLAN, FlowPlan("")) | ||||
|         flow_plan.context.setdefault(PLAN_CONTEXT_METHOD, method) | ||||
|         flow_plan.context.setdefault(PLAN_CONTEXT_METHOD_ARGS, {}) | ||||
|         flow_plan.context[PLAN_CONTEXT_METHOD_ARGS].update(cleanse_dict(sanitize_dict(kwargs))) | ||||
|         flow_plan.context[PLAN_CONTEXT_METHOD] = method | ||||
|         flow_plan.context[PLAN_CONTEXT_METHOD_ARGS] = cleanse_dict(sanitize_dict(kwargs)) | ||||
|         request.session[SESSION_KEY_PLAN] = flow_plan | ||||
|  | ||||
|  | ||||
| @ -41,18 +40,18 @@ class TokenBackend(InbuiltBackend): | ||||
|     """Authenticate with token""" | ||||
|  | ||||
|     def authenticate( | ||||
|         self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any | ||||
|     ) -> User | None: | ||||
|         self, request: HttpRequest, username: Optional[str], password: Optional[str], **kwargs: Any | ||||
|     ) -> Optional[User]: | ||||
|         try: | ||||
|  | ||||
|             # pylint: disable=no-member | ||||
|             user = User._default_manager.get_by_natural_key(username) | ||||
|  | ||||
|         # pylint: disable=no-member | ||||
|         except User.DoesNotExist: | ||||
|             # Run the default password hasher once to reduce the timing | ||||
|             # difference between an existing and a nonexistent user (#20760). | ||||
|             User().set_password(password) | ||||
|             return None | ||||
|  | ||||
|         # pylint: disable=no-member | ||||
|         tokens = Token.filter_not_expired( | ||||
|             user=user, key=password, intent=TokenIntents.INTENT_APP_PASSWORD | ||||
|         ) | ||||
|  | ||||
| @ -38,6 +38,6 @@ class TokenOutpostMiddleware: | ||||
|                 raise DenyConnection() | ||||
|         except AuthenticationFailed as exc: | ||||
|             LOGGER.warning("Failed to authenticate", exc=exc) | ||||
|             raise DenyConnection() from None | ||||
|             raise DenyConnection() | ||||
|  | ||||
|         scope["user"] = user | ||||
|  | ||||
							
								
								
									
										7
									
								
								authentik/core/exceptions.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								authentik/core/exceptions.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,7 @@ | ||||
| """authentik core exceptions""" | ||||
|  | ||||
| from authentik.lib.sentry import SentryIgnoredException | ||||
|  | ||||
|  | ||||
| class PropertyMappingExpressionException(SentryIgnoredException): | ||||
|     """Error when a PropertyMapping Exception expression could not be parsed or evaluated.""" | ||||
| @ -1,13 +1,11 @@ | ||||
| """Property Mapping Evaluator""" | ||||
|  | ||||
| from types import CodeType | ||||
| from typing import Any | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from django.db.models import Model | ||||
| from django.http import HttpRequest | ||||
| from prometheus_client import Histogram | ||||
|  | ||||
| from authentik.core.expression.exceptions import SkipObjectException | ||||
| from authentik.core.models import User | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.lib.expression.evaluator import BaseEvaluator | ||||
| @ -25,43 +23,31 @@ class PropertyMappingEvaluator(BaseEvaluator): | ||||
|     """Custom Evaluator that adds some different context variables.""" | ||||
|  | ||||
|     dry_run: bool | ||||
|     model: Model | ||||
|     _compiled: CodeType | None = None | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         model: Model, | ||||
|         user: User | None = None, | ||||
|         request: HttpRequest | None = None, | ||||
|         dry_run: bool | None = False, | ||||
|         user: Optional[User] = None, | ||||
|         request: Optional[HttpRequest] = None, | ||||
|         dry_run: Optional[bool] = False, | ||||
|         **kwargs, | ||||
|     ): | ||||
|         self.model = model | ||||
|         if hasattr(model, "name"): | ||||
|             _filename = model.name | ||||
|         else: | ||||
|             _filename = str(model) | ||||
|         super().__init__(filename=_filename) | ||||
|         self.dry_run = dry_run | ||||
|         self.set_context(user, request, **kwargs) | ||||
|  | ||||
|     def set_context( | ||||
|         self, | ||||
|         user: User | None = None, | ||||
|         request: HttpRequest | None = None, | ||||
|         **kwargs, | ||||
|     ): | ||||
|         req = PolicyRequest(user=User()) | ||||
|         req.obj = self.model | ||||
|         req.obj = model | ||||
|         if user: | ||||
|             req.user = user | ||||
|             self._context["user"] = user | ||||
|         if request: | ||||
|             req.http_request = request | ||||
|         req.context.update(**kwargs) | ||||
|         self._context["request"] = req | ||||
|         req.context.update(**kwargs) | ||||
|         self._context.update(**kwargs) | ||||
|         self._globals["SkipObject"] = SkipObjectException | ||||
|         self.dry_run = dry_run | ||||
|  | ||||
|     def handle_error(self, exc: Exception, expression_source: str): | ||||
|         """Exception Handler""" | ||||
| @ -76,19 +62,10 @@ class PropertyMappingEvaluator(BaseEvaluator): | ||||
|         ) | ||||
|         if "request" in self._context: | ||||
|             req: PolicyRequest = self._context["request"] | ||||
|             if req.http_request: | ||||
|                 event.from_http(req.http_request, req.user) | ||||
|                 return | ||||
|             elif req.user: | ||||
|                 event.set_user(req.user) | ||||
|             event.from_http(req.http_request, req.user) | ||||
|             return | ||||
|         event.save() | ||||
|  | ||||
|     def evaluate(self, *args, **kwargs) -> Any: | ||||
|         with PROPERTY_MAPPING_TIME.labels(mapping_name=self._filename).time(): | ||||
|             return super().evaluate(*args, **kwargs) | ||||
|  | ||||
|     def compile(self, expression: str | None = None) -> Any: | ||||
|         if not self._compiled: | ||||
|             compiled = super().compile(expression or self.model.expression) | ||||
|             self._compiled = compiled | ||||
|         return self._compiled | ||||
|  | ||||
| @ -1,19 +0,0 @@ | ||||
| """authentik core exceptions""" | ||||
|  | ||||
| from authentik.lib.expression.exceptions import ControlFlowException | ||||
| from authentik.lib.sentry import SentryIgnoredException | ||||
|  | ||||
|  | ||||
| class PropertyMappingExpressionException(SentryIgnoredException): | ||||
|     """Error when a PropertyMapping Exception expression could not be parsed or evaluated.""" | ||||
|  | ||||
|     def __init__(self, exc: Exception, mapping) -> None: | ||||
|         super().__init__() | ||||
|         self.exc = exc | ||||
|         self.mapping = mapping | ||||
|  | ||||
|  | ||||
| class SkipObjectException(ControlFlowException): | ||||
|     """Exception which can be raised in a property mapping to skip syncing an object. | ||||
|     Only applies to Property mappings which sync objects, and not on mappings which transitively | ||||
|     apply to a single user""" | ||||
| @ -1,34 +1,10 @@ | ||||
| """custom runserver command""" | ||||
|  | ||||
| from typing import TextIO | ||||
|  | ||||
| from daphne.management.commands.runserver import Command as RunServer | ||||
| from daphne.server import Server | ||||
|  | ||||
| from authentik.root.signals import post_startup, pre_startup, startup | ||||
|  | ||||
|  | ||||
| class SignalServer(Server): | ||||
|     """Server which signals back to authentik when it finished starting up""" | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|         def ready_callable(): | ||||
|             pre_startup.send(sender=self) | ||||
|             startup.send(sender=self) | ||||
|             post_startup.send(sender=self) | ||||
|  | ||||
|         self.ready_callable = ready_callable | ||||
|  | ||||
|  | ||||
| class Command(RunServer): | ||||
|     """custom runserver command, which doesn't show the misleading django startup message""" | ||||
|  | ||||
|     server_cls = SignalServer | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         super().__init__(*args, **kwargs) | ||||
|         # Redirect standard stdout banner from Daphne into the void | ||||
|         # as there are a couple more steps that happen before startup is fully done | ||||
|         self.stdout = TextIO() | ||||
|     def on_bind(self, server_port): | ||||
|         pass | ||||
|  | ||||
| @ -16,8 +16,13 @@ from authentik.events.middleware import should_log_model | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.events.utils import model_to_dict | ||||
|  | ||||
| BANNER_TEXT = f"""### authentik shell ({get_full_version()}) | ||||
| ### Node {platform.node()} | Arch {platform.machine()} | Python {platform.python_version()} """ | ||||
| BANNER_TEXT = """### authentik shell ({authentik}) | ||||
| ### Node {node} | Arch {arch} | Python {python} """.format( | ||||
|     node=platform.node(), | ||||
|     python=platform.python_version(), | ||||
|     arch=platform.machine(), | ||||
|     authentik=get_full_version(), | ||||
| ) | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
| @ -81,7 +86,7 @@ class Command(BaseCommand): | ||||
|  | ||||
|         # If Python code has been passed, execute it and exit. | ||||
|         if options["command"]: | ||||
|  | ||||
|             # pylint: disable=exec-used | ||||
|             exec(options["command"], namespace)  # nosec # noqa | ||||
|             return | ||||
|  | ||||
| @ -94,7 +99,7 @@ class Command(BaseCommand): | ||||
|         else: | ||||
|             try: | ||||
|                 hook() | ||||
|             except Exception: | ||||
|             except Exception:  # pylint: disable=broad-except | ||||
|                 # Match the behavior of the cpython shell where an error in | ||||
|                 # sys.__interactivehook__ prints a warning and the exception | ||||
|                 # and continues. | ||||
|  | ||||
| @ -1,7 +1,7 @@ | ||||
| """authentik admin Middleware to impersonate users""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from contextvars import ContextVar | ||||
| from typing import Callable, Optional | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| @ -15,9 +15,9 @@ RESPONSE_HEADER_ID = "X-authentik-id" | ||||
| KEY_AUTH_VIA = "auth_via" | ||||
| KEY_USER = "user" | ||||
|  | ||||
| CTX_REQUEST_ID = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + "request_id", default=None) | ||||
| CTX_HOST = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + "host", default=None) | ||||
| CTX_AUTH_VIA = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None) | ||||
| CTX_REQUEST_ID = ContextVar[Optional[str]](STRUCTLOG_KEY_PREFIX + "request_id", default=None) | ||||
| CTX_HOST = ContextVar[Optional[str]](STRUCTLOG_KEY_PREFIX + "host", default=None) | ||||
| CTX_AUTH_VIA = ContextVar[Optional[str]](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None) | ||||
|  | ||||
|  | ||||
| class ImpersonateMiddleware: | ||||
| @ -55,7 +55,7 @@ class RequestIDMiddleware: | ||||
|     def __call__(self, request: HttpRequest) -> HttpResponse: | ||||
|         if not hasattr(request, "request_id"): | ||||
|             request_id = uuid4().hex | ||||
|             request.request_id = request_id | ||||
|             setattr(request, "request_id", request_id) | ||||
|             CTX_REQUEST_ID.set(request_id) | ||||
|             CTX_HOST.set(request.get_host()) | ||||
|             set_tag("authentik.request_id", request_id) | ||||
| @ -67,7 +67,7 @@ class RequestIDMiddleware: | ||||
|         response = self.get_response(request) | ||||
|  | ||||
|         response[RESPONSE_HEADER_ID] = request.request_id | ||||
|         response.ak_context = {} | ||||
|         setattr(response, "ak_context", {}) | ||||
|         response.ak_context["request_id"] = CTX_REQUEST_ID.get() | ||||
|         response.ak_context["host"] = CTX_HOST.get() | ||||
|         response.ak_context[KEY_AUTH_VIA] = CTX_AUTH_VIA.get() | ||||
|  | ||||
| @ -5,7 +5,6 @@ from django.db import migrations, models | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
|  | ||||
| import authentik.core.models | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| def set_default_token_key(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
| @ -17,10 +16,6 @@ def set_default_token_key(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|         token.save() | ||||
|  | ||||
|  | ||||
| def default_token_key(): | ||||
|     return generate_id(60) | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     replaces = [ | ||||
|         ("authentik_core", "0012_auto_20201003_1737"), | ||||
| @ -67,7 +62,7 @@ class Migration(migrations.Migration): | ||||
|         migrations.AddField( | ||||
|             model_name="token", | ||||
|             name="key", | ||||
|             field=models.TextField(default=default_token_key), | ||||
|             field=models.TextField(default=authentik.core.models.default_token_key), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="token", | ||||
|  | ||||
| @ -7,13 +7,11 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
|  | ||||
|  | ||||
| def backport_is_backchannel(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     db_alias = schema_editor.connection.alias | ||||
|     from authentik.providers.ldap.models import LDAPProvider | ||||
|     from authentik.providers.scim.models import SCIMProvider | ||||
|     from authentik.core.models import BackchannelProvider | ||||
|  | ||||
|     for model in [LDAPProvider, SCIMProvider]: | ||||
|     for model in BackchannelProvider.__subclasses__(): | ||||
|         try: | ||||
|             for obj in model.objects.using(db_alias).only("is_backchannel"): | ||||
|             for obj in model.objects.only("is_backchannel"): | ||||
|                 obj.is_backchannel = True | ||||
|                 obj.save() | ||||
|         except (DatabaseError, InternalError, ProgrammingError): | ||||
|  | ||||
| @ -1,31 +0,0 @@ | ||||
| # Generated by Django 5.0.2 on 2024-02-29 10:15 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
| import authentik.core.models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0033_alter_user_options"), | ||||
|         ("authentik_tenants", "0002_tenant_default_token_duration_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="authenticatedsession", | ||||
|             name="expires", | ||||
|             field=models.DateTimeField(default=None, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="token", | ||||
|             name="expires", | ||||
|             field=models.DateTimeField(default=None, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="token", | ||||
|             name="key", | ||||
|             field=models.TextField(default=authentik.core.models.default_token_key), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,52 +0,0 @@ | ||||
| # Generated by Django 5.0.4 on 2024-04-15 11:28 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("auth", "0012_alter_user_first_name_max_length"), | ||||
|         ("authentik_core", "0034_alter_authenticatedsession_expires_and_more"), | ||||
|         ("authentik_rbac", "0003_alter_systempermission_options"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterModelOptions( | ||||
|             name="group", | ||||
|             options={ | ||||
|                 "permissions": [ | ||||
|                     ("add_user_to_group", "Add user to group"), | ||||
|                     ("remove_user_from_group", "Remove user from group"), | ||||
|                 ], | ||||
|                 "verbose_name": "Group", | ||||
|                 "verbose_name_plural": "Groups", | ||||
|             }, | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="group", | ||||
|             index=models.Index(fields=["name"], name="authentik_c_name_9ba8e4_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="user", | ||||
|             index=models.Index(fields=["last_login"], name="authentik_c_last_lo_f0179a_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="user", | ||||
|             index=models.Index( | ||||
|                 fields=["password_change_date"], name="authentik_c_passwor_eec915_idx" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="user", | ||||
|             index=models.Index(fields=["uuid"], name="authentik_c_uuid_3dae2f_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="user", | ||||
|             index=models.Index(fields=["path"], name="authentik_c_path_b1f502_idx"), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="user", | ||||
|             index=models.Index(fields=["type"], name="authentik_c_type_ecf60d_idx"), | ||||
|         ), | ||||
|     ] | ||||
| @ -1,6 +1,6 @@ | ||||
| """authentik core models""" | ||||
|  | ||||
| from datetime import datetime | ||||
| from datetime import timedelta | ||||
| from hashlib import sha256 | ||||
| from typing import Any, Optional, Self | ||||
| from uuid import uuid4 | ||||
| @ -15,7 +15,6 @@ from django.http import HttpRequest | ||||
| from django.utils.functional import SimpleLazyObject, cached_property | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from django_cte import CTEQuerySet, With | ||||
| from guardian.conf import settings | ||||
| from guardian.mixins import GuardianUserMixin | ||||
| from model_utils.managers import InheritanceManager | ||||
| @ -23,20 +22,18 @@ from rest_framework.serializers import Serializer | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.blueprints.models import ManagedModel | ||||
| from authentik.core.expression.exceptions import PropertyMappingExpressionException | ||||
| from authentik.core.exceptions import PropertyMappingExpressionException | ||||
| from authentik.core.types import UILoginButton, UserSettingSerializer | ||||
| from authentik.lib.avatars import get_avatar | ||||
| from authentik.lib.expression.exceptions import ControlFlowException | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.lib.models import ( | ||||
|     CreatedUpdatedModel, | ||||
|     DomainlessFormattedURLValidator, | ||||
|     SerializerModel, | ||||
| ) | ||||
| from authentik.lib.utils.time import timedelta_from_string | ||||
| from authentik.policies.models import PolicyBindingModel | ||||
| from authentik.tenants.models import DEFAULT_TOKEN_DURATION, DEFAULT_TOKEN_LENGTH | ||||
| from authentik.tenants.utils import get_current_tenant, get_unique_identifier | ||||
| from authentik.root.install_id import get_install_id | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug" | ||||
| @ -45,44 +42,33 @@ USER_ATTRIBUTE_EXPIRES = "goauthentik.io/user/expires" | ||||
| USER_ATTRIBUTE_DELETE_ON_LOGOUT = "goauthentik.io/user/delete-on-logout" | ||||
| USER_ATTRIBUTE_SOURCES = "goauthentik.io/user/sources" | ||||
| USER_ATTRIBUTE_TOKEN_EXPIRING = "goauthentik.io/user/token-expires"  # nosec | ||||
| USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME = "goauthentik.io/user/token-maximum-lifetime"  # nosec | ||||
| USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username" | ||||
| USER_ATTRIBUTE_CHANGE_NAME = "goauthentik.io/user/can-change-name" | ||||
| USER_ATTRIBUTE_CHANGE_EMAIL = "goauthentik.io/user/can-change-email" | ||||
| USER_PATH_SYSTEM_PREFIX = "goauthentik.io" | ||||
| USER_PATH_SERVICE_ACCOUNT = USER_PATH_SYSTEM_PREFIX + "/service-accounts" | ||||
|  | ||||
|  | ||||
| options.DEFAULT_NAMES = options.DEFAULT_NAMES + ( | ||||
|     # used_by API that allows models to specify if they shadow an object | ||||
|     # for example the proxy provider which is built on top of an oauth provider | ||||
|     "authentik_used_by_shadows", | ||||
|     # List fields for which changes are not logged (due to them having dedicated objects) | ||||
|     # for example user's password and last_login | ||||
|     "authentik_signals_ignored_fields", | ||||
| ) | ||||
|  | ||||
| GROUP_RECURSION_LIMIT = 20 | ||||
|  | ||||
|  | ||||
| def default_token_duration() -> datetime: | ||||
| def default_token_duration(): | ||||
|     """Default duration a Token is valid""" | ||||
|     current_tenant = get_current_tenant() | ||||
|     token_duration = ( | ||||
|         current_tenant.default_token_duration | ||||
|         if hasattr(current_tenant, "default_token_duration") | ||||
|         else DEFAULT_TOKEN_DURATION | ||||
|     ) | ||||
|     return now() + timedelta_from_string(token_duration) | ||||
|     return now() + timedelta(minutes=30) | ||||
|  | ||||
|  | ||||
| def default_token_key() -> str: | ||||
| def default_token_key(): | ||||
|     """Default token key""" | ||||
|     current_tenant = get_current_tenant() | ||||
|     token_length = ( | ||||
|         current_tenant.default_token_length | ||||
|         if hasattr(current_tenant, "default_token_length") | ||||
|         else DEFAULT_TOKEN_LENGTH | ||||
|     ) | ||||
|     # We use generate_id since the chars in the key should be easy | ||||
|     # to use in Emails (for verification) and URLs (for recovery) | ||||
|     return generate_id(token_length) | ||||
|     return generate_id(CONFIG.get_int("default_token_length")) | ||||
|  | ||||
|  | ||||
| class UserTypes(models.TextChoices): | ||||
| @ -100,40 +86,6 @@ class UserTypes(models.TextChoices): | ||||
|     INTERNAL_SERVICE_ACCOUNT = "internal_service_account" | ||||
|  | ||||
|  | ||||
| class GroupQuerySet(CTEQuerySet): | ||||
|     def with_children_recursive(self): | ||||
|         """Recursively get all groups that have the current queryset as parents | ||||
|         or are indirectly related.""" | ||||
|  | ||||
|         def make_cte(cte): | ||||
|             """Build the query that ends up in WITH RECURSIVE""" | ||||
|             # Start from self, aka the current query | ||||
|             # Add a depth attribute to limit the recursion | ||||
|             return self.annotate( | ||||
|                 relative_depth=models.Value(0, output_field=models.IntegerField()) | ||||
|             ).union( | ||||
|                 # Here is the recursive part of the query. cte refers to the previous iteration | ||||
|                 # Only select groups for which the parent is part of the previous iteration | ||||
|                 # and increase the depth | ||||
|                 # Finally, limit the depth | ||||
|                 cte.join(Group, group_uuid=cte.col.parent_id) | ||||
|                 .annotate( | ||||
|                     relative_depth=models.ExpressionWrapper( | ||||
|                         cte.col.relative_depth | ||||
|                         + models.Value(1, output_field=models.IntegerField()), | ||||
|                         output_field=models.IntegerField(), | ||||
|                     ) | ||||
|                 ) | ||||
|                 .filter(relative_depth__lt=GROUP_RECURSION_LIMIT), | ||||
|                 all=True, | ||||
|             ) | ||||
|  | ||||
|         # Build the recursive query, see above | ||||
|         cte = With.recursive(make_cte) | ||||
|         # Return the result, as a usable queryset for Group. | ||||
|         return cte.join(Group, group_uuid=cte.col.group_uuid).with_cte(cte) | ||||
|  | ||||
|  | ||||
| class Group(SerializerModel): | ||||
|     """Group model which supports a basic hierarchy and has attributes""" | ||||
|  | ||||
| @ -156,8 +108,6 @@ class Group(SerializerModel): | ||||
|     ) | ||||
|     attributes = models.JSONField(default=dict, blank=True) | ||||
|  | ||||
|     objects = GroupQuerySet.as_manager() | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> Serializer: | ||||
|         from authentik.core.api.groups import GroupSerializer | ||||
| @ -176,11 +126,36 @@ class Group(SerializerModel): | ||||
|         return user.all_groups().filter(group_uuid=self.group_uuid).exists() | ||||
|  | ||||
|     def children_recursive(self: Self | QuerySet["Group"]) -> QuerySet["Group"]: | ||||
|         """Compatibility layer for Group.objects.with_children_recursive()""" | ||||
|         qs = self | ||||
|         if not isinstance(self, QuerySet): | ||||
|             qs = Group.objects.filter(group_uuid=self.group_uuid) | ||||
|         return qs.with_children_recursive() | ||||
|         """Recursively get all groups that have this as parent or are indirectly related""" | ||||
|         direct_groups = [] | ||||
|         if isinstance(self, QuerySet): | ||||
|             direct_groups = list(x for x in self.all().values_list("pk", flat=True).iterator()) | ||||
|         else: | ||||
|             direct_groups = [self.pk] | ||||
|         if len(direct_groups) < 1: | ||||
|             return Group.objects.none() | ||||
|         query = """ | ||||
|         WITH RECURSIVE parents AS ( | ||||
|             SELECT authentik_core_group.*, 0 AS relative_depth | ||||
|             FROM authentik_core_group | ||||
|             WHERE authentik_core_group.group_uuid = ANY(%s) | ||||
|  | ||||
|             UNION ALL | ||||
|  | ||||
|             SELECT authentik_core_group.*, parents.relative_depth + 1 | ||||
|             FROM authentik_core_group, parents | ||||
|             WHERE ( | ||||
|                 authentik_core_group.group_uuid = parents.parent_id and | ||||
|                 parents.relative_depth < 20 | ||||
|             ) | ||||
|         ) | ||||
|         SELECT group_uuid | ||||
|         FROM parents | ||||
|         GROUP BY group_uuid, name | ||||
|         ORDER BY name; | ||||
|         """ | ||||
|         group_pks = [group.pk for group in Group.objects.raw(query, [direct_groups]).iterator()] | ||||
|         return Group.objects.filter(pk__in=group_pks) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return f"Group {self.name}" | ||||
| @ -192,13 +167,8 @@ class Group(SerializerModel): | ||||
|                 "parent", | ||||
|             ), | ||||
|         ) | ||||
|         indexes = [models.Index(fields=["name"])] | ||||
|         verbose_name = _("Group") | ||||
|         verbose_name_plural = _("Groups") | ||||
|         permissions = [ | ||||
|             ("add_user_to_group", _("Add user to group")), | ||||
|             ("remove_user_from_group", _("Remove user from group")), | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class UserQuerySet(models.QuerySet): | ||||
| @ -247,10 +217,12 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser): | ||||
|         return User._meta.get_field("path").default | ||||
|  | ||||
|     def all_groups(self) -> QuerySet[Group]: | ||||
|         """Recursively get all groups this user is a member of.""" | ||||
|         return self.ak_groups.all().with_children_recursive() | ||||
|         """Recursively get all groups this user is a member of. | ||||
|         At least one query is done to get the direct groups of the user, with groups | ||||
|         there are at most 3 queries done""" | ||||
|         return Group.children_recursive(self.ak_groups.all()) | ||||
|  | ||||
|     def group_attributes(self, request: HttpRequest | None = None) -> dict[str, Any]: | ||||
|     def group_attributes(self, request: Optional[HttpRequest] = None) -> dict[str, Any]: | ||||
|         """Get a dictionary containing the attributes from all groups the user belongs to, | ||||
|         including the users attributes""" | ||||
|         final_attributes = {} | ||||
| @ -304,13 +276,13 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser): | ||||
|     @property | ||||
|     def uid(self) -> str: | ||||
|         """Generate a globally unique UID, based on the user ID and the hashed secret key""" | ||||
|         return sha256(f"{self.id}-{get_unique_identifier()}".encode("ascii")).hexdigest() | ||||
|         return sha256(f"{self.id}-{get_install_id()}".encode("ascii")).hexdigest() | ||||
|  | ||||
|     def locale(self, request: HttpRequest | None = None) -> str: | ||||
|     def locale(self, request: Optional[HttpRequest] = None) -> str: | ||||
|         """Get the locale the user has configured""" | ||||
|         try: | ||||
|             return self.attributes.get("settings", {}).get("locale", "") | ||||
|  | ||||
|         # pylint: disable=broad-except | ||||
|         except Exception as exc: | ||||
|             LOGGER.warning("Failed to get default locale", exc=exc) | ||||
|         if request: | ||||
| @ -333,12 +305,13 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser): | ||||
|             ("preview_user", _("Can preview user data sent to providers")), | ||||
|             ("view_user_applications", _("View applications the user has access to")), | ||||
|         ] | ||||
|         indexes = [ | ||||
|             models.Index(fields=["last_login"]), | ||||
|             models.Index(fields=["password_change_date"]), | ||||
|             models.Index(fields=["uuid"]), | ||||
|             models.Index(fields=["path"]), | ||||
|             models.Index(fields=["type"]), | ||||
|         authentik_signals_ignored_fields = [ | ||||
|             # Logged by the events `password_set` | ||||
|             # the `password_set` action/signal doesn't currently convey which user | ||||
|             # initiated the password change, so for now we'll log two actions | ||||
|             # ("password", "password_change_date"), | ||||
|             # Logged by `login` | ||||
|             ("last_login",), | ||||
|         ] | ||||
|  | ||||
|  | ||||
| @ -385,15 +358,11 @@ class Provider(SerializerModel): | ||||
|     objects = InheritanceManager() | ||||
|  | ||||
|     @property | ||||
|     def launch_url(self) -> str | None: | ||||
|     def launch_url(self) -> Optional[str]: | ||||
|         """URL to this provider and initiate authorization for the user. | ||||
|         Can return None for providers that are not URL-based""" | ||||
|         return None | ||||
|  | ||||
|     @property | ||||
|     def icon_url(self) -> str | None: | ||||
|         return None | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         """Return component used to edit this object""" | ||||
| @ -466,7 +435,7 @@ class Application(SerializerModel, PolicyBindingModel): | ||||
|         return ApplicationSerializer | ||||
|  | ||||
|     @property | ||||
|     def get_meta_icon(self) -> str | None: | ||||
|     def get_meta_icon(self) -> Optional[str]: | ||||
|         """Get the URL to the App Icon image. If the name is /static or starts with http | ||||
|         it is returned as-is""" | ||||
|         if not self.meta_icon: | ||||
| @ -475,7 +444,7 @@ class Application(SerializerModel, PolicyBindingModel): | ||||
|             return self.meta_icon.name | ||||
|         return self.meta_icon.url | ||||
|  | ||||
|     def get_launch_url(self, user: Optional["User"] = None) -> str | None: | ||||
|     def get_launch_url(self, user: Optional["User"] = None) -> Optional[str]: | ||||
|         """Get launch URL if set, otherwise attempt to get launch URL based on provider.""" | ||||
|         url = None | ||||
|         if self.meta_launch_url: | ||||
| @ -488,13 +457,13 @@ class Application(SerializerModel, PolicyBindingModel): | ||||
|                 user = user._wrapped | ||||
|             try: | ||||
|                 return url % user.__dict__ | ||||
|  | ||||
|             # pylint: disable=broad-except | ||||
|             except Exception as exc: | ||||
|                 LOGGER.warning("Failed to format launch url", exc=exc) | ||||
|                 return url | ||||
|         return url | ||||
|  | ||||
|     def get_provider(self) -> Provider | None: | ||||
|     def get_provider(self) -> Optional[Provider]: | ||||
|         """Get casted provider instance""" | ||||
|         if not self.provider: | ||||
|             return None | ||||
| @ -582,7 +551,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|     objects = InheritanceManager() | ||||
|  | ||||
|     @property | ||||
|     def icon_url(self) -> str | None: | ||||
|     def icon_url(self) -> Optional[str]: | ||||
|         """Get the URL to the Icon. If the name is /static or | ||||
|         starts with http it is returned as-is""" | ||||
|         if not self.icon: | ||||
| @ -597,7 +566,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|             return self.user_path_template % { | ||||
|                 "slug": self.slug, | ||||
|             } | ||||
|  | ||||
|         # pylint: disable=broad-except | ||||
|         except Exception as exc: | ||||
|             LOGGER.warning("Failed to template user path", exc=exc, source=self) | ||||
|             return User.default_path() | ||||
| @ -607,12 +576,12 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | ||||
|         """Return component used to edit this object""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def ui_login_button(self, request: HttpRequest) -> UILoginButton | None: | ||||
|     def ui_login_button(self, request: HttpRequest) -> Optional[UILoginButton]: | ||||
|         """If source uses a http-based flow, return UI Information about the login | ||||
|         button. If source doesn't use http-based flow, return None.""" | ||||
|         return None | ||||
|  | ||||
|     def ui_user_settings(self) -> UserSettingSerializer | None: | ||||
|     def ui_user_settings(self) -> Optional[UserSettingSerializer]: | ||||
|         """Entrypoint to integrate with User settings. Can either return None if no | ||||
|         user settings are available, or UserSettingSerializer.""" | ||||
|         return None | ||||
| @ -648,9 +617,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||
|         """Get serializer for this model""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"User-source connection (user={self.user_id}, source={self.source_id})" | ||||
|  | ||||
|     class Meta: | ||||
|         unique_together = (("user", "source"),) | ||||
|  | ||||
| @ -658,12 +624,9 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel): | ||||
| class ExpiringModel(models.Model): | ||||
|     """Base Model which can expire, and is automatically cleaned up.""" | ||||
|  | ||||
|     expires = models.DateTimeField(default=None, null=True) | ||||
|     expires = models.DateTimeField(default=default_token_duration) | ||||
|     expiring = models.BooleanField(default=True) | ||||
|  | ||||
|     class Meta: | ||||
|         abstract = True | ||||
|  | ||||
|     def expire_action(self, *args, **kwargs): | ||||
|         """Handler which is called when this object is expired. By | ||||
|         default the object is deleted. This is less efficient compared | ||||
| @ -672,7 +635,7 @@ class ExpiringModel(models.Model): | ||||
|         return self.delete(*args, **kwargs) | ||||
|  | ||||
|     @classmethod | ||||
|     def filter_not_expired(cls, **kwargs) -> QuerySet["Token"]: | ||||
|     def filter_not_expired(cls, **kwargs) -> QuerySet: | ||||
|         """Filer for tokens which are not expired yet or are not expiring, | ||||
|         and match filters in `kwargs`""" | ||||
|         for obj in cls.objects.filter(**kwargs).filter(Q(expires__lt=now(), expiring=True)): | ||||
| @ -686,6 +649,9 @@ class ExpiringModel(models.Model): | ||||
|             return False | ||||
|         return now() > self.expires | ||||
|  | ||||
|     class Meta: | ||||
|         abstract = True | ||||
|  | ||||
|  | ||||
| class TokenIntents(models.TextChoices): | ||||
|     """Intents a Token can be created for.""" | ||||
| @ -715,21 +681,6 @@ class Token(SerializerModel, ManagedModel, ExpiringModel): | ||||
|     user = models.ForeignKey("User", on_delete=models.CASCADE, related_name="+") | ||||
|     description = models.TextField(default="", blank=True) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Token") | ||||
|         verbose_name_plural = _("Tokens") | ||||
|         indexes = [ | ||||
|             models.Index(fields=["identifier"]), | ||||
|             models.Index(fields=["key"]), | ||||
|         ] | ||||
|         permissions = [("view_token_key", _("View token's key"))] | ||||
|  | ||||
|     def __str__(self): | ||||
|         description = f"{self.identifier}" | ||||
|         if self.expiring: | ||||
|             description += f" (expires={self.expires})" | ||||
|         return description | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         from authentik.core.api.tokens import TokenSerializer | ||||
| @ -757,6 +708,21 @@ class Token(SerializerModel, ManagedModel, ExpiringModel): | ||||
|             message=f"Token {self.identifier}'s secret was rotated.", | ||||
|         ).save() | ||||
|  | ||||
|     def __str__(self): | ||||
|         description = f"{self.identifier}" | ||||
|         if self.expiring: | ||||
|             description += f" (expires={self.expires})" | ||||
|         return description | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Token") | ||||
|         verbose_name_plural = _("Tokens") | ||||
|         indexes = [ | ||||
|             models.Index(fields=["identifier"]), | ||||
|             models.Index(fields=["key"]), | ||||
|         ] | ||||
|         permissions = [("view_token_key", _("View token's key"))] | ||||
|  | ||||
|  | ||||
| class PropertyMapping(SerializerModel, ManagedModel): | ||||
|     """User-defined key -> x mapping which can be used by providers to expose extra data.""" | ||||
| @ -777,17 +743,15 @@ class PropertyMapping(SerializerModel, ManagedModel): | ||||
|         """Get serializer for this model""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def evaluate(self, user: User | None, request: HttpRequest | None, **kwargs) -> Any: | ||||
|     def evaluate(self, user: Optional[User], request: Optional[HttpRequest], **kwargs) -> Any: | ||||
|         """Evaluate `self.expression` using `**kwargs` as Context.""" | ||||
|         from authentik.core.expression.evaluator import PropertyMappingEvaluator | ||||
|  | ||||
|         evaluator = PropertyMappingEvaluator(self, user, request, **kwargs) | ||||
|         try: | ||||
|             return evaluator.evaluate(self.expression) | ||||
|         except ControlFlowException as exc: | ||||
|             raise exc | ||||
|         except Exception as exc: | ||||
|             raise PropertyMappingExpressionException(self, exc) from exc | ||||
|             raise PropertyMappingExpressionException(exc) from exc | ||||
|  | ||||
|     def __str__(self): | ||||
|         return f"Property Mapping {self.name}" | ||||
| @ -815,13 +779,6 @@ class AuthenticatedSession(ExpiringModel): | ||||
|     last_user_agent = models.TextField(blank=True) | ||||
|     last_used = models.DateTimeField(auto_now=True) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Authenticated Session") | ||||
|         verbose_name_plural = _("Authenticated Sessions") | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"Authenticated Session {self.session_key[:10]}" | ||||
|  | ||||
|     @staticmethod | ||||
|     def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]: | ||||
|         """Create a new session from a http request""" | ||||
| @ -836,3 +793,7 @@ class AuthenticatedSession(ExpiringModel): | ||||
|             last_user_agent=request.META.get("HTTP_USER_AGENT", ""), | ||||
|             expires=request.session.get_expiry_date(), | ||||
|         ) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Authenticated Session") | ||||
|         verbose_name_plural = _("Authenticated Sessions") | ||||
|  | ||||
| @ -10,14 +10,7 @@ from django.dispatch import receiver | ||||
| from django.http.request import HttpRequest | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.models import ( | ||||
|     Application, | ||||
|     AuthenticatedSession, | ||||
|     BackchannelProvider, | ||||
|     ExpiringModel, | ||||
|     User, | ||||
|     default_token_duration, | ||||
| ) | ||||
| from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider, User | ||||
|  | ||||
| # Arguments: user: User, password: str | ||||
| password_changed = Signal() | ||||
| @ -68,12 +61,3 @@ def backchannel_provider_pre_save(sender: type[Model], instance: Model, **_): | ||||
|     if not isinstance(instance, BackchannelProvider): | ||||
|         return | ||||
|     instance.is_backchannel = True | ||||
|  | ||||
|  | ||||
| @receiver(pre_save) | ||||
| def expiring_model_pre_save(sender: type[Model], instance: Model, **_): | ||||
|     """Ensure expires is set on ExpiringModels that are set to expire""" | ||||
|     if not issubclass(sender, ExpiringModel): | ||||
|         return | ||||
|     if instance.expiring and instance.expires is None: | ||||
|         instance.expires = default_token_duration() | ||||
|  | ||||
| @ -1,7 +1,7 @@ | ||||
| """Source decision helper""" | ||||
|  | ||||
| from enum import Enum | ||||
| from typing import Any | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from django.contrib import messages | ||||
| from django.db import IntegrityError | ||||
| @ -13,12 +13,11 @@ from django.utils.translation import gettext as _ | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.models import Source, SourceUserMatchingModes, User, UserSourceConnection | ||||
| from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostSourceStage | ||||
| from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostUserEnrollmentStage | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.flows.exceptions import FlowNonApplicableException | ||||
| from authentik.flows.models import Flow, FlowToken, Stage, in_memory_stage | ||||
| from authentik.flows.models import Flow, Stage, in_memory_stage | ||||
| from authentik.flows.planner import ( | ||||
|     PLAN_CONTEXT_IS_RESTORED, | ||||
|     PLAN_CONTEXT_PENDING_USER, | ||||
|     PLAN_CONTEXT_REDIRECT, | ||||
|     PLAN_CONTEXT_SOURCE, | ||||
| @ -36,8 +35,6 @@ from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND | ||||
| from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||
| from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH | ||||
|  | ||||
| SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"  # nosec | ||||
|  | ||||
|  | ||||
| class Action(Enum): | ||||
|     """Actions that can be decided based on the request | ||||
| @ -93,13 +90,16 @@ class SourceFlowManager: | ||||
|         self._logger = get_logger().bind(source=source, identifier=identifier) | ||||
|         self.policy_context = {} | ||||
|  | ||||
|     def get_action(self, **kwargs) -> tuple[Action, UserSourceConnection | None]:  # noqa: PLR0911 | ||||
|     # pylint: disable=too-many-return-statements | ||||
|     def get_action(self, **kwargs) -> tuple[Action, Optional[UserSourceConnection]]: | ||||
|         """decide which action should be taken""" | ||||
|         new_connection = self.connection_type(source=self.source, identifier=self.identifier) | ||||
|         # When request is authenticated, always link | ||||
|         if self.request.user.is_authenticated: | ||||
|             new_connection.user = self.request.user | ||||
|             new_connection = self.update_connection(new_connection, **kwargs) | ||||
|             # pylint: disable=no-member | ||||
|             new_connection.save() | ||||
|             return Action.LINK, new_connection | ||||
|  | ||||
|         existing_connections = self.connection_type.objects.filter( | ||||
| @ -146,6 +146,7 @@ class SourceFlowManager: | ||||
|         ]: | ||||
|             new_connection.user = user | ||||
|             new_connection = self.update_connection(new_connection, **kwargs) | ||||
|             new_connection.save() | ||||
|             return Action.LINK, new_connection | ||||
|         if self.source.user_matching_mode in [ | ||||
|             SourceUserMatchingModes.EMAIL_DENY, | ||||
| @ -187,10 +188,8 @@ class SourceFlowManager: | ||||
|         # Default case, assume deny | ||||
|         error = Exception( | ||||
|             _( | ||||
|                 "Request to authenticate with {source} has been denied. Please authenticate " | ||||
|                 "with the source you've previously signed up with.".format_map( | ||||
|                     {"source": self.source.name} | ||||
|                 ) | ||||
|                 "Request to authenticate with %(source)s has been denied. Please authenticate " | ||||
|                 "with the source you've previously signed up with." % {"source": self.source.name} | ||||
|             ), | ||||
|         ) | ||||
|         return self.error_handler(error) | ||||
| @ -206,55 +205,38 @@ class SourceFlowManager: | ||||
|  | ||||
|     def get_stages_to_append(self, flow: Flow) -> list[Stage]: | ||||
|         """Hook to override stages which are appended to the flow""" | ||||
|         return [ | ||||
|             in_memory_stage(PostSourceStage), | ||||
|         ] | ||||
|         if not self.source.enrollment_flow: | ||||
|             return [] | ||||
|         if flow.slug == self.source.enrollment_flow.slug: | ||||
|             return [ | ||||
|                 in_memory_stage(PostUserEnrollmentStage), | ||||
|             ] | ||||
|         return [] | ||||
|  | ||||
|     def _prepare_flow( | ||||
|         self, | ||||
|         flow: Flow | None, | ||||
|         flow: Flow, | ||||
|         connection: UserSourceConnection, | ||||
|         stages: list[StageView] | None = None, | ||||
|         stages: Optional[list[StageView]] = None, | ||||
|         **kwargs, | ||||
|     ) -> HttpResponse: | ||||
|         """Prepare Authentication Plan, redirect user FlowExecutor""" | ||||
|         # Ensure redirect is carried through when user was trying to | ||||
|         # authorize application | ||||
|         final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get( | ||||
|             NEXT_ARG_NAME, "authentik_core:if-user" | ||||
|         ) | ||||
|         kwargs.update( | ||||
|             { | ||||
|                 # Since we authenticate the user by their token, they have no backend set | ||||
|                 PLAN_CONTEXT_AUTHENTICATION_BACKEND: BACKEND_INBUILT, | ||||
|                 PLAN_CONTEXT_SSO: True, | ||||
|                 PLAN_CONTEXT_SOURCE: self.source, | ||||
|                 PLAN_CONTEXT_REDIRECT: final_redirect, | ||||
|                 PLAN_CONTEXT_SOURCES_CONNECTION: connection, | ||||
|             } | ||||
|         ) | ||||
|         kwargs.update(self.policy_context) | ||||
|         if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: | ||||
|             token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) | ||||
|             self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) | ||||
|             plan = token.plan | ||||
|             plan.context[PLAN_CONTEXT_IS_RESTORED] = token | ||||
|             plan.context.update(kwargs) | ||||
|             for stage in self.get_stages_to_append(flow): | ||||
|                 plan.append_stage(stage) | ||||
|             if stages: | ||||
|                 for stage in stages: | ||||
|                     plan.append_stage(stage) | ||||
|             self.request.session[SESSION_KEY_PLAN] = plan | ||||
|             flow_slug = token.flow.slug | ||||
|             token.delete() | ||||
|             return redirect_with_qs( | ||||
|                 "authentik_core:if-flow", | ||||
|                 self.request.GET, | ||||
|                 flow_slug=flow_slug, | ||||
|             ) | ||||
|         # Ensure redirect is carried through when user was trying to | ||||
|         # authorize application | ||||
|         final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get( | ||||
|             NEXT_ARG_NAME, "authentik_core:if-user" | ||||
|         ) | ||||
|         if PLAN_CONTEXT_REDIRECT not in kwargs: | ||||
|             kwargs[PLAN_CONTEXT_REDIRECT] = final_redirect | ||||
|  | ||||
|         if not flow: | ||||
|             return bad_request_message( | ||||
|                 self.request, | ||||
| @ -262,9 +244,6 @@ class SourceFlowManager: | ||||
|             ) | ||||
|         # We run the Flow planner here so we can pass the Pending user in the context | ||||
|         planner = FlowPlanner(flow) | ||||
|         # We append some stages so the initial flow we get might be empty | ||||
|         planner.allow_empty_flows = True | ||||
|         planner.use_cache = False | ||||
|         plan = planner.plan(self.request, kwargs) | ||||
|         for stage in self.get_stages_to_append(flow): | ||||
|             plan.append_stage(stage) | ||||
| @ -291,9 +270,7 @@ class SourceFlowManager: | ||||
|                 in_memory_stage( | ||||
|                     MessageStage, | ||||
|                     message=_( | ||||
|                         "Successfully authenticated with {source}!".format_map( | ||||
|                             {"source": self.source.name} | ||||
|                         ) | ||||
|                         "Successfully authenticated with %(source)s!" % {"source": self.source.name} | ||||
|                     ), | ||||
|                 ) | ||||
|             ], | ||||
| @ -309,9 +286,7 @@ class SourceFlowManager: | ||||
|         # When request isn't authenticated we jump straight to auth | ||||
|         if not self.request.user.is_authenticated: | ||||
|             return self.handle_auth(connection) | ||||
|         if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session: | ||||
|             return self._prepare_flow(None, connection) | ||||
|         connection.save() | ||||
|         # Connection has already been saved | ||||
|         Event.new( | ||||
|             EventAction.SOURCE_LINKED, | ||||
|             message="Linked Source", | ||||
| @ -319,13 +294,13 @@ class SourceFlowManager: | ||||
|         ).from_http(self.request) | ||||
|         messages.success( | ||||
|             self.request, | ||||
|             _("Successfully linked {source}!".format_map({"source": self.source.name})), | ||||
|             _("Successfully linked %(source)s!" % {"source": self.source.name}), | ||||
|         ) | ||||
|         return redirect( | ||||
|             reverse( | ||||
|                 "authentik_core:if-user", | ||||
|             ) | ||||
|             + "#/settings;page-sources" | ||||
|             + f"#/settings;page-{self.source.slug}" | ||||
|         ) | ||||
|  | ||||
|     def handle_enroll( | ||||
| @ -347,9 +322,7 @@ class SourceFlowManager: | ||||
|                 in_memory_stage( | ||||
|                     MessageStage, | ||||
|                     message=_( | ||||
|                         "Successfully authenticated with {source}!".format_map( | ||||
|                             {"source": self.source.name} | ||||
|                         ) | ||||
|                         "Successfully authenticated with %(source)s!" % {"source": self.source.name} | ||||
|                     ), | ||||
|                 ) | ||||
|             ], | ||||
|  | ||||
| @ -10,7 +10,7 @@ from authentik.flows.stage import StageView | ||||
| PLAN_CONTEXT_SOURCES_CONNECTION = "goauthentik.io/sources/connection" | ||||
|  | ||||
|  | ||||
| class PostSourceStage(StageView): | ||||
| class PostUserEnrollmentStage(StageView): | ||||
|     """Dynamically injected stage which saves the Connection after | ||||
|     the user has been enrolled.""" | ||||
|  | ||||
| @ -21,12 +21,10 @@ class PostSourceStage(StageView): | ||||
|         ] | ||||
|         user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] | ||||
|         connection.user = user | ||||
|         linked = connection.pk is None | ||||
|         connection.save() | ||||
|         if linked: | ||||
|             Event.new( | ||||
|                 EventAction.SOURCE_LINKED, | ||||
|                 message="Linked Source", | ||||
|                 source=connection.source, | ||||
|             ).from_http(self.request) | ||||
|         Event.new( | ||||
|             EventAction.SOURCE_LINKED, | ||||
|             message="Linked Source", | ||||
|             source=connection.source, | ||||
|         ).from_http(self.request) | ||||
|         return self.executor.stage_ok() | ||||
|  | ||||
| @ -2,9 +2,7 @@ | ||||
|  | ||||
| from datetime import datetime, timedelta | ||||
|  | ||||
| from django.conf import ImproperlyConfigured | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.contrib.sessions.backends.db import SessionStore as DBSessionStore | ||||
| from django.core.cache import cache | ||||
| from django.utils.timezone import now | ||||
| from structlog.stdlib import get_logger | ||||
| @ -17,7 +15,6 @@ from authentik.core.models import ( | ||||
|     User, | ||||
| ) | ||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| @ -40,35 +37,20 @@ def clean_expired_models(self: SystemTask): | ||||
|         messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}") | ||||
|     # Special case | ||||
|     amount = 0 | ||||
|  | ||||
|     # pylint: disable=no-member | ||||
|     for session in AuthenticatedSession.objects.all(): | ||||
|         match CONFIG.get("session_storage", "cache"): | ||||
|             case "cache": | ||||
|                 cache_key = f"{KEY_PREFIX}{session.session_key}" | ||||
|                 value = None | ||||
|                 try: | ||||
|                     value = cache.get(cache_key) | ||||
|  | ||||
|                 except Exception as exc: | ||||
|                     LOGGER.debug("Failed to get session from cache", exc=exc) | ||||
|                 if not value: | ||||
|                     session.delete() | ||||
|                     amount += 1 | ||||
|             case "db": | ||||
|                 if not ( | ||||
|                     DBSessionStore.get_model_class() | ||||
|                     .objects.filter(session_key=session.session_key, expire_date__gt=now()) | ||||
|                     .exists() | ||||
|                 ): | ||||
|                     session.delete() | ||||
|                     amount += 1 | ||||
|             case _: | ||||
|                 # Should never happen, as we check for other values in authentik/root/settings.py | ||||
|                 raise ImproperlyConfigured( | ||||
|                     "Invalid session_storage setting, allowed values are db and cache" | ||||
|                 ) | ||||
|         cache_key = f"{KEY_PREFIX}{session.session_key}" | ||||
|         value = None | ||||
|         try: | ||||
|             value = cache.get(cache_key) | ||||
|         # pylint: disable=broad-except | ||||
|         except Exception as exc: | ||||
|             LOGGER.debug("Failed to get session from cache", exc=exc) | ||||
|         if not value: | ||||
|             session.delete() | ||||
|             amount += 1 | ||||
|     LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount) | ||||
|  | ||||
|     # pylint: disable=no-member | ||||
|     messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}") | ||||
|     self.set_status(TaskStatus.SUCCESSFUL, *messages) | ||||
|  | ||||
|  | ||||
| @ -10,7 +10,7 @@ | ||||
|         versionSubdomain: "{{ version_subdomain }}", | ||||
|         build: "{{ build }}", | ||||
|     }; | ||||
|     window.addEventListener("DOMContentLoaded", function () { | ||||
|     window.addEventListener("DOMContentLoaded", () => { | ||||
|         {% for message in messages %} | ||||
|         window.dispatchEvent( | ||||
|             new CustomEvent("ak-message", { | ||||
|  | ||||
| @ -1,6 +1,5 @@ | ||||
| {% load static %} | ||||
| {% load i18n %} | ||||
| {% load authentik_core %} | ||||
|  | ||||
| <!DOCTYPE html> | ||||
|  | ||||
| @ -15,8 +14,8 @@ | ||||
|         {% endblock %} | ||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> | ||||
|         <link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject> | ||||
|         {% versioned_script "dist/poly-%v.js" %} | ||||
|         {% versioned_script "dist/standalone/loading/index-%v.js" %} | ||||
|         <script src="{% static 'dist/poly.js' %}?version={{ version }}" type="module"></script> | ||||
|         <script src="{% static 'dist/standalone/loading/index.js' %}?version={{ version }}" type="module"></script> | ||||
|         {% block head %} | ||||
|         {% endblock %} | ||||
|         <meta name="sentry-trace" content="{{ sentry_trace }}" /> | ||||
|  | ||||
| @ -1,9 +1,9 @@ | ||||
| {% extends "base/skeleton.html" %} | ||||
|  | ||||
| {% load authentik_core %} | ||||
| {% load static %} | ||||
|  | ||||
| {% block head %} | ||||
| {% versioned_script "dist/admin/AdminInterface-%v.js" %} | ||||
| <script src="{% static 'dist/admin/AdminInterface.js' %}?version={{ version }}" type="module"></script> | ||||
| <meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)"> | ||||
| <meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)"> | ||||
| {% include "base/header_js.html" %} | ||||
|  | ||||
| @ -1,7 +1,6 @@ | ||||
| {% extends "base/skeleton.html" %} | ||||
| 
 | ||||
| {% load static %} | ||||
| {% load authentik_core %} | ||||
| 
 | ||||
| {% block head_before %} | ||||
| {{ block.super }} | ||||
| @ -18,7 +17,7 @@ window.authentik.flow = { | ||||
| {% endblock %} | ||||
| 
 | ||||
| {% block head %} | ||||
| {% versioned_script "dist/flow/FlowInterface-%v.js" %} | ||||
| <script src="{% static 'dist/flow/FlowInterface.js' %}?version={{ version }}" type="module"></script> | ||||
| <style> | ||||
| :root { | ||||
|     --ak-flow-background: url("{{ flow.background_url }}"); | ||||
| @ -1,9 +1,9 @@ | ||||
| {% extends "base/skeleton.html" %} | ||||
|  | ||||
| {% load authentik_core %} | ||||
| {% load static %} | ||||
|  | ||||
| {% block head %} | ||||
| {% versioned_script "dist/user/UserInterface-%v.js" %} | ||||
| <script src="{% static 'dist/user/UserInterface.js' %}?version={{ version }}" type="module"></script> | ||||
| <meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: light)"> | ||||
| <meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: dark)"> | ||||
| {% include "base/header_js.html" %} | ||||
|  | ||||
| @ -71,9 +71,9 @@ | ||||
|                 </li> | ||||
|                 {% endfor %} | ||||
|                 <li> | ||||
|                     <span> | ||||
|                     <a href="https://goauthentik.io?utm_source=authentik"> | ||||
|                         {% trans 'Powered by authentik' %} | ||||
|                     </span> | ||||
|                     </a> | ||||
|                 </li> | ||||
|             </ul> | ||||
|         </footer> | ||||
|  | ||||
| @ -1,21 +0,0 @@ | ||||
| """authentik core tags""" | ||||
|  | ||||
| from django import template | ||||
| from django.templatetags.static import static as static_loader | ||||
| from django.utils.safestring import mark_safe | ||||
|  | ||||
| from authentik import get_full_version | ||||
|  | ||||
| register = template.Library() | ||||
|  | ||||
|  | ||||
| @register.simple_tag() | ||||
| def versioned_script(path: str) -> str: | ||||
|     """Wrapper around {% static %} tag that supports setting the version""" | ||||
|     returned_lines = [ | ||||
|         ( | ||||
|             f'<script src="{static_loader(path.replace("%v", get_full_version()))}' | ||||
|             '" type="module"></script>' | ||||
|         ), | ||||
|     ] | ||||
|     return mark_safe("".join(returned_lines))  # nosec | ||||
| @ -1,11 +1,10 @@ | ||||
| """Test Groups API""" | ||||
|  | ||||
| from django.urls.base import reverse | ||||
| from guardian.shortcuts import assign_perm | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import Group, User | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_user | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| @ -13,33 +12,13 @@ class TestGroupsAPI(APITestCase): | ||||
|     """Test Groups API""" | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         self.login_user = create_test_user() | ||||
|         self.admin = create_test_admin_user() | ||||
|         self.user = User.objects.create(username="test-user") | ||||
|  | ||||
|     def test_list_with_users(self): | ||||
|         """Test listing with users""" | ||||
|         admin = create_test_admin_user() | ||||
|         self.client.force_login(admin) | ||||
|         response = self.client.get(reverse("authentik_api:group-list"), {"include_users": "true"}) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_retrieve_with_users(self): | ||||
|         """Test retrieve with users""" | ||||
|         admin = create_test_admin_user() | ||||
|         group = Group.objects.create(name=generate_id()) | ||||
|         self.client.force_login(admin) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:group-detail", kwargs={"pk": group.pk}), | ||||
|             {"include_users": "true"}, | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_add_user(self): | ||||
|         """Test add_user""" | ||||
|         group = Group.objects.create(name=generate_id()) | ||||
|         assign_perm("authentik_core.add_user_to_group", self.login_user, group) | ||||
|         assign_perm("authentik_core.view_user", self.login_user) | ||||
|         self.client.force_login(self.login_user) | ||||
|         self.client.force_login(self.admin) | ||||
|         res = self.client.post( | ||||
|             reverse("authentik_api:group-add-user", kwargs={"pk": group.pk}), | ||||
|             data={ | ||||
| @ -53,9 +32,7 @@ class TestGroupsAPI(APITestCase): | ||||
|     def test_add_user_404(self): | ||||
|         """Test add_user""" | ||||
|         group = Group.objects.create(name=generate_id()) | ||||
|         assign_perm("authentik_core.add_user_to_group", self.login_user, group) | ||||
|         assign_perm("authentik_core.view_user", self.login_user) | ||||
|         self.client.force_login(self.login_user) | ||||
|         self.client.force_login(self.admin) | ||||
|         res = self.client.post( | ||||
|             reverse("authentik_api:group-add-user", kwargs={"pk": group.pk}), | ||||
|             data={ | ||||
| @ -67,10 +44,8 @@ class TestGroupsAPI(APITestCase): | ||||
|     def test_remove_user(self): | ||||
|         """Test remove_user""" | ||||
|         group = Group.objects.create(name=generate_id()) | ||||
|         assign_perm("authentik_core.remove_user_from_group", self.login_user, group) | ||||
|         assign_perm("authentik_core.view_user", self.login_user) | ||||
|         group.users.add(self.user) | ||||
|         self.client.force_login(self.login_user) | ||||
|         self.client.force_login(self.admin) | ||||
|         res = self.client.post( | ||||
|             reverse("authentik_api:group-remove-user", kwargs={"pk": group.pk}), | ||||
|             data={ | ||||
| @ -84,10 +59,8 @@ class TestGroupsAPI(APITestCase): | ||||
|     def test_remove_user_404(self): | ||||
|         """Test remove_user""" | ||||
|         group = Group.objects.create(name=generate_id()) | ||||
|         assign_perm("authentik_core.remove_user_from_group", self.login_user, group) | ||||
|         assign_perm("authentik_core.view_user", self.login_user) | ||||
|         group.users.add(self.user) | ||||
|         self.client.force_login(self.login_user) | ||||
|         self.client.force_login(self.admin) | ||||
|         res = self.client.post( | ||||
|             reverse("authentik_api:group-remove-user", kwargs={"pk": group.pk}), | ||||
|             data={ | ||||
| @ -99,12 +72,11 @@ class TestGroupsAPI(APITestCase): | ||||
|     def test_parent_self(self): | ||||
|         """Test parent""" | ||||
|         group = Group.objects.create(name=generate_id()) | ||||
|         assign_perm("view_group", self.login_user, group) | ||||
|         assign_perm("change_group", self.login_user, group) | ||||
|         self.client.force_login(self.login_user) | ||||
|         self.client.force_login(self.admin) | ||||
|         res = self.client.patch( | ||||
|             reverse("authentik_api:group-detail", kwargs={"pk": group.pk}), | ||||
|             data={ | ||||
|                 "pk": self.user.pk + 3, | ||||
|                 "parent": group.pk, | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
| @ -1,14 +1,14 @@ | ||||
| """authentik core models tests""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from datetime import timedelta | ||||
| from time import sleep | ||||
| from typing import Callable | ||||
|  | ||||
| from django.test import RequestFactory, TestCase | ||||
| from django.utils.timezone import now | ||||
| from freezegun import freeze_time | ||||
| from guardian.shortcuts import get_anonymous_user | ||||
|  | ||||
| from authentik.core.models import Provider, Source, Token | ||||
| from authentik.flows.models import Stage | ||||
| from authentik.lib.utils.reflection import all_subclasses | ||||
|  | ||||
|  | ||||
| @ -17,20 +17,18 @@ class TestModels(TestCase): | ||||
|  | ||||
|     def test_token_expire(self): | ||||
|         """Test token expiring""" | ||||
|         with freeze_time() as freeze: | ||||
|             token = Token.objects.create(expires=now(), user=get_anonymous_user()) | ||||
|             freeze.tick(timedelta(seconds=1)) | ||||
|             self.assertTrue(token.is_expired) | ||||
|         token = Token.objects.create(expires=now(), user=get_anonymous_user()) | ||||
|         sleep(0.5) | ||||
|         self.assertTrue(token.is_expired) | ||||
|  | ||||
|     def test_token_expire_no_expire(self): | ||||
|         """Test token expiring with "expiring" set""" | ||||
|         with freeze_time() as freeze: | ||||
|             token = Token.objects.create(expires=now(), user=get_anonymous_user(), expiring=False) | ||||
|             freeze.tick(timedelta(seconds=1)) | ||||
|             self.assertFalse(token.is_expired) | ||||
|         token = Token.objects.create(expires=now(), user=get_anonymous_user(), expiring=False) | ||||
|         sleep(0.5) | ||||
|         self.assertFalse(token.is_expired) | ||||
|  | ||||
|  | ||||
| def source_tester_factory(test_model: type[Source]) -> Callable: | ||||
| def source_tester_factory(test_model: type[Stage]) -> Callable: | ||||
|     """Test source""" | ||||
|  | ||||
|     factory = RequestFactory() | ||||
| @ -38,19 +36,19 @@ def source_tester_factory(test_model: type[Source]) -> Callable: | ||||
|  | ||||
|     def tester(self: TestModels): | ||||
|         model_class = None | ||||
|         if test_model._meta.abstract: | ||||
|             model_class = [x for x in test_model.__bases__ if issubclass(x, Source)][0]() | ||||
|         if test_model._meta.abstract:  # pragma: no cover | ||||
|             model_class = test_model.__bases__[0]() | ||||
|         else: | ||||
|             model_class = test_model() | ||||
|         model_class.slug = "test" | ||||
|         self.assertIsNotNone(model_class.component) | ||||
|         model_class.ui_login_button(request) | ||||
|         model_class.ui_user_settings() | ||||
|         _ = model_class.ui_login_button(request) | ||||
|         _ = model_class.ui_user_settings() | ||||
|  | ||||
|     return tester | ||||
|  | ||||
|  | ||||
| def provider_tester_factory(test_model: type[Provider]) -> Callable: | ||||
| def provider_tester_factory(test_model: type[Stage]) -> Callable: | ||||
|     """Test provider""" | ||||
|  | ||||
|     def tester(self: TestModels): | ||||
|  | ||||
| @ -3,10 +3,7 @@ | ||||
| from django.test import RequestFactory, TestCase | ||||
| from guardian.shortcuts import get_anonymous_user | ||||
|  | ||||
| from authentik.core.expression.exceptions import ( | ||||
|     PropertyMappingExpressionException, | ||||
|     SkipObjectException, | ||||
| ) | ||||
| from authentik.core.exceptions import PropertyMappingExpressionException | ||||
| from authentik.core.models import PropertyMapping | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
| from authentik.events.models import Event, EventAction | ||||
| @ -45,17 +42,6 @@ class TestPropertyMappings(TestCase): | ||||
|         self.assertTrue(events.exists()) | ||||
|         self.assertEqual(len(events), 1) | ||||
|  | ||||
|     def test_expression_skip(self): | ||||
|         """Test expression error""" | ||||
|         expr = "raise SkipObject" | ||||
|         mapping = PropertyMapping.objects.create(name=generate_id(), expression=expr) | ||||
|         with self.assertRaises(SkipObjectException): | ||||
|             mapping.evaluate(None, None) | ||||
|         events = Event.objects.filter( | ||||
|             action=EventAction.PROPERTY_MAPPING_EXCEPTION, context__expression=expr | ||||
|         ) | ||||
|         self.assertFalse(events.exists()) | ||||
|  | ||||
|     def test_expression_error_extended(self): | ||||
|         """Test expression error (with user and http request""" | ||||
|         expr = "return aaa" | ||||
| @ -80,11 +66,14 @@ class TestPropertyMappings(TestCase): | ||||
|             expression="return request.http_request.path", | ||||
|         ) | ||||
|         http_request = self.factory.get("/") | ||||
|         tmpl = f""" | ||||
|         res = ak_call_policy('{expr.name}') | ||||
|         tmpl = ( | ||||
|             """ | ||||
|         res = ak_call_policy('%s') | ||||
|         result = [request.http_request.path, res.raw_result] | ||||
|         return result | ||||
|         """ | ||||
|             % expr.name | ||||
|         ) | ||||
|         evaluator = PropertyMapping(expression=tmpl, name=generate_id()) | ||||
|         res = evaluator.evaluate(self.user, http_request) | ||||
|         self.assertEqual(res, ["/", "/"]) | ||||
|  | ||||
| @ -6,10 +6,9 @@ from django.urls import reverse | ||||
| from rest_framework.serializers import ValidationError | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.api.property_mappings import PropertyMappingSerializer | ||||
| from authentik.core.models import Group, PropertyMapping | ||||
| from authentik.core.api.propertymappings import PropertyMappingSerializer | ||||
| from authentik.core.models import PropertyMapping | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| class TestPropertyMappingAPI(APITestCase): | ||||
| @ -17,40 +16,23 @@ class TestPropertyMappingAPI(APITestCase): | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.mapping = PropertyMapping.objects.create( | ||||
|             name="dummy", expression="""return {'foo': 'bar'}""" | ||||
|         ) | ||||
|         self.user = create_test_admin_user() | ||||
|         self.client.force_login(self.user) | ||||
|  | ||||
|     def test_test_call(self): | ||||
|         """Test PropertyMappings's test endpoint""" | ||||
|         mapping = PropertyMapping.objects.create( | ||||
|             name="dummy", expression="""return {'foo': 'bar', 'baz': user.username}""" | ||||
|         ) | ||||
|         """Test PropertMappings's test endpoint""" | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:propertymapping-test", kwargs={"pk": mapping.pk}), | ||||
|             reverse("authentik_api:propertymapping-test", kwargs={"pk": self.mapping.pk}), | ||||
|             data={ | ||||
|                 "user": self.user.pk, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertJSONEqual( | ||||
|             response.content.decode(), | ||||
|             {"result": dumps({"foo": "bar", "baz": self.user.username}), "successful": True}, | ||||
|         ) | ||||
|  | ||||
|     def test_test_call_group(self): | ||||
|         """Test PropertyMappings's test endpoint""" | ||||
|         mapping = PropertyMapping.objects.create( | ||||
|             name="dummy", expression="""return {'foo': 'bar', 'baz': group.name}""" | ||||
|         ) | ||||
|         group = Group.objects.create(name=generate_id()) | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:propertymapping-test", kwargs={"pk": mapping.pk}), | ||||
|             data={ | ||||
|                 "group": group.pk, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertJSONEqual( | ||||
|             response.content.decode(), | ||||
|             {"result": dumps({"foo": "bar", "baz": group.name}), "successful": True}, | ||||
|             {"result": dumps({"foo": "bar"}), "successful": True}, | ||||
|         ) | ||||
|  | ||||
|     def test_validate(self): | ||||
|  | ||||
| @ -2,15 +2,11 @@ | ||||
|  | ||||
| from django.contrib.auth.models import AnonymousUser | ||||
| from django.test import TestCase | ||||
| from django.urls import reverse | ||||
| from guardian.utils import get_anonymous_user | ||||
|  | ||||
| from authentik.core.models import SourceUserMatchingModes, User | ||||
| from authentik.core.sources.flow_manager import Action | ||||
| from authentik.core.sources.stage import PostSourceStage | ||||
| from authentik.core.tests.utils import create_test_flow | ||||
| from authentik.flows.planner import FlowPlan | ||||
| from authentik.flows.views.executor import SESSION_KEY_PLAN | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.lib.tests.utils import get_request | ||||
| from authentik.policies.denied import AccessDeniedResponse | ||||
| @ -25,62 +21,42 @@ class TestSourceFlowManager(TestCase): | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.authentication_flow = create_test_flow() | ||||
|         self.enrollment_flow = create_test_flow() | ||||
|         self.source: OAuthSource = OAuthSource.objects.create( | ||||
|             name=generate_id(), | ||||
|             slug=generate_id(), | ||||
|             authentication_flow=self.authentication_flow, | ||||
|             enrollment_flow=self.enrollment_flow, | ||||
|         ) | ||||
|         self.source: OAuthSource = OAuthSource.objects.create(name="test") | ||||
|         self.identifier = generate_id() | ||||
|  | ||||
|     def test_unauthenticated_enroll(self): | ||||
|         """Test un-authenticated user enrolling""" | ||||
|         request = get_request("/", user=AnonymousUser()) | ||||
|         flow_manager = OAuthSourceFlowManager(self.source, request, self.identifier, {}) | ||||
|         flow_manager = OAuthSourceFlowManager( | ||||
|             self.source, get_request("/", user=AnonymousUser()), self.identifier, {} | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.ENROLL) | ||||
|         response = flow_manager.get_flow() | ||||
|         self.assertEqual(response.status_code, 302) | ||||
|         flow_plan: FlowPlan = request.session[SESSION_KEY_PLAN] | ||||
|         self.assertEqual(flow_plan.bindings[0].stage.view, PostSourceStage) | ||||
|         flow_manager.get_flow() | ||||
|  | ||||
|     def test_unauthenticated_auth(self): | ||||
|         """Test un-authenticated user authenticating""" | ||||
|         UserOAuthSourceConnection.objects.create( | ||||
|             user=get_anonymous_user(), source=self.source, identifier=self.identifier | ||||
|         ) | ||||
|         request = get_request("/", user=AnonymousUser()) | ||||
|         flow_manager = OAuthSourceFlowManager(self.source, request, self.identifier, {}) | ||||
|  | ||||
|         flow_manager = OAuthSourceFlowManager( | ||||
|             self.source, get_request("/", user=AnonymousUser()), self.identifier, {} | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.AUTH) | ||||
|         response = flow_manager.get_flow() | ||||
|         self.assertEqual(response.status_code, 302) | ||||
|         flow_plan: FlowPlan = request.session[SESSION_KEY_PLAN] | ||||
|         self.assertEqual(flow_plan.bindings[0].stage.view, PostSourceStage) | ||||
|         flow_manager.get_flow() | ||||
|  | ||||
|     def test_authenticated_link(self): | ||||
|         """Test authenticated user linking""" | ||||
|         user = User.objects.create(username="foo", email="foo@bar.baz") | ||||
|         request = get_request("/", user=user) | ||||
|         flow_manager = OAuthSourceFlowManager(self.source, request, self.identifier, {}) | ||||
|         action, connection = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.LINK) | ||||
|         self.assertIsNone(connection.pk) | ||||
|         response = flow_manager.get_flow() | ||||
|         self.assertEqual(response.status_code, 302) | ||||
|         self.assertEqual( | ||||
|             response.url, | ||||
|             reverse("authentik_core:if-user") + "#/settings;page-sources", | ||||
|         UserOAuthSourceConnection.objects.create( | ||||
|             user=get_anonymous_user(), source=self.source, identifier=self.identifier | ||||
|         ) | ||||
|  | ||||
|     def test_unauthenticated_link(self): | ||||
|         """Test un-authenticated user linking""" | ||||
|         flow_manager = OAuthSourceFlowManager(self.source, get_request("/"), self.identifier, {}) | ||||
|         action, connection = flow_manager.get_action() | ||||
|         user = User.objects.create(username="foo", email="foo@bar.baz") | ||||
|         flow_manager = OAuthSourceFlowManager( | ||||
|             self.source, get_request("/", user=user), self.identifier, {} | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.LINK) | ||||
|         self.assertIsNone(connection.pk) | ||||
|         flow_manager.get_flow() | ||||
|  | ||||
|     def test_unauthenticated_enroll_email(self): | ||||
| @ -197,5 +173,5 @@ class TestSourceFlowManager(TestCase): | ||||
|         self.assertEqual(action, Action.ENROLL) | ||||
|         response = flow_manager.get_flow() | ||||
|         self.assertIsInstance(response, AccessDeniedResponse) | ||||
|  | ||||
|         # pylint: disable=no-member | ||||
|         self.assertEqual(response.error_message, "foo") | ||||
|  | ||||
| @ -1,6 +1,5 @@ | ||||
| """Test token API""" | ||||
|  | ||||
| from datetime import datetime, timedelta | ||||
| from json import loads | ||||
|  | ||||
| from django.urls.base import reverse | ||||
| @ -8,13 +7,8 @@ from guardian.shortcuts import get_anonymous_user | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.api.tokens import TokenSerializer | ||||
| from authentik.core.models import ( | ||||
|     USER_ATTRIBUTE_TOKEN_EXPIRING, | ||||
|     USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME, | ||||
|     Token, | ||||
|     TokenIntents, | ||||
| ) | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_user | ||||
| from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents, User | ||||
| from authentik.core.tests.utils import create_test_admin_user | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| @ -23,7 +17,7 @@ class TestTokenAPI(APITestCase): | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.user = create_test_user() | ||||
|         self.user = User.objects.create(username="testuser") | ||||
|         self.admin = create_test_admin_user() | ||||
|         self.client.force_login(self.user) | ||||
|  | ||||
| @ -82,95 +76,6 @@ class TestTokenAPI(APITestCase): | ||||
|         self.assertEqual(token.intent, TokenIntents.INTENT_API) | ||||
|         self.assertEqual(token.expiring, False) | ||||
|  | ||||
|     def test_token_create_expiring(self): | ||||
|         """Test token creation endpoint""" | ||||
|         self.user.attributes[USER_ATTRIBUTE_TOKEN_EXPIRING] = True | ||||
|         self.user.save() | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:token-list"), {"identifier": "test-token"} | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 201) | ||||
|         token = Token.objects.get(identifier="test-token") | ||||
|         self.assertEqual(token.user, self.user) | ||||
|         self.assertEqual(token.intent, TokenIntents.INTENT_API) | ||||
|         self.assertEqual(token.expiring, True) | ||||
|  | ||||
|     def test_token_create_expiring_custom_ok(self): | ||||
|         """Test token creation endpoint""" | ||||
|         self.user.attributes[USER_ATTRIBUTE_TOKEN_EXPIRING] = True | ||||
|         self.user.attributes[USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME] = "hours=2" | ||||
|         self.user.save() | ||||
|         expires = datetime.now() + timedelta(hours=1) | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:token-list"), | ||||
|             { | ||||
|                 "identifier": "test-token", | ||||
|                 "expires": expires, | ||||
|                 "intent": TokenIntents.INTENT_APP_PASSWORD, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 201) | ||||
|         token = Token.objects.get(identifier="test-token") | ||||
|         self.assertEqual(token.user, self.user) | ||||
|         self.assertEqual(token.intent, TokenIntents.INTENT_APP_PASSWORD) | ||||
|         self.assertEqual(token.expiring, True) | ||||
|         self.assertEqual(token.expires.timestamp(), expires.timestamp()) | ||||
|  | ||||
|     def test_token_create_expiring_custom_nok(self): | ||||
|         """Test token creation endpoint""" | ||||
|         self.user.attributes[USER_ATTRIBUTE_TOKEN_EXPIRING] = True | ||||
|         self.user.attributes[USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME] = "hours=2" | ||||
|         self.user.save() | ||||
|         expires = datetime.now() + timedelta(hours=3) | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:token-list"), | ||||
|             { | ||||
|                 "identifier": "test-token", | ||||
|                 "expires": expires, | ||||
|                 "intent": TokenIntents.INTENT_APP_PASSWORD, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 400) | ||||
|  | ||||
|     def test_token_create_expiring_custom_api(self): | ||||
|         """Test token creation endpoint""" | ||||
|         self.user.attributes[USER_ATTRIBUTE_TOKEN_EXPIRING] = True | ||||
|         self.user.attributes[USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME] = "hours=2" | ||||
|         self.user.save() | ||||
|         expires = datetime.now() + timedelta(seconds=3) | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:token-list"), | ||||
|             { | ||||
|                 "identifier": "test-token", | ||||
|                 "expires": expires, | ||||
|                 "intent": TokenIntents.INTENT_API, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 201) | ||||
|         token = Token.objects.get(identifier="test-token") | ||||
|         self.assertEqual(token.user, self.user) | ||||
|         self.assertEqual(token.intent, TokenIntents.INTENT_API) | ||||
|         self.assertEqual(token.expiring, True) | ||||
|         self.assertNotEqual(token.expires.timestamp(), expires.timestamp()) | ||||
|  | ||||
|     def test_token_change_user(self): | ||||
|         """Test creating a token and then changing the user""" | ||||
|         ident = generate_id() | ||||
|         response = self.client.post(reverse("authentik_api:token-list"), {"identifier": ident}) | ||||
|         self.assertEqual(response.status_code, 201) | ||||
|         token = Token.objects.get(identifier=ident) | ||||
|         self.assertEqual(token.user, self.user) | ||||
|         self.assertEqual(token.intent, TokenIntents.INTENT_API) | ||||
|         self.assertEqual(token.expiring, True) | ||||
|         self.assertTrue(self.user.has_perm("authentik_core.view_token_key", token)) | ||||
|         response = self.client.put( | ||||
|             reverse("authentik_api:token-detail", kwargs={"identifier": ident}), | ||||
|             data={"identifier": "user_token_poc_v3", "intent": "api", "user": self.admin.pk}, | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 400) | ||||
|         token.refresh_from_db() | ||||
|         self.assertEqual(token.user, self.user) | ||||
|  | ||||
|     def test_list(self): | ||||
|         """Test Token List (Test normal authentication)""" | ||||
|         Token.objects.all().delete() | ||||
|  | ||||
| @ -41,12 +41,6 @@ class TestUsersAPI(APITestCase): | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_list_with_groups(self): | ||||
|         """Test listing with groups""" | ||||
|         self.client.force_login(self.admin) | ||||
|         response = self.client.get(reverse("authentik_api:user-list"), {"include_groups": "true"}) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_metrics(self): | ||||
|         """Test user's metrics""" | ||||
|         self.client.force_login(self.admin) | ||||
| @ -66,11 +60,10 @@ class TestUsersAPI(APITestCase): | ||||
|     def test_recovery_no_flow(self): | ||||
|         """Test user recovery link (no recovery flow set)""" | ||||
|         self.client.force_login(self.admin) | ||||
|         response = self.client.post( | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:user-recovery", kwargs={"pk": self.user.pk}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 400) | ||||
|         self.assertJSONEqual(response.content, {"non_field_errors": "No recovery flow set."}) | ||||
|         self.assertEqual(response.status_code, 404) | ||||
|  | ||||
|     def test_set_password(self): | ||||
|         """Test Direct password set""" | ||||
| @ -91,7 +84,7 @@ class TestUsersAPI(APITestCase): | ||||
|         brand.flow_recovery = flow | ||||
|         brand.save() | ||||
|         self.client.force_login(self.admin) | ||||
|         response = self.client.post( | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:user-recovery", kwargs={"pk": self.user.pk}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
| @ -99,20 +92,16 @@ class TestUsersAPI(APITestCase): | ||||
|     def test_recovery_email_no_flow(self): | ||||
|         """Test user recovery link (no recovery flow set)""" | ||||
|         self.client.force_login(self.admin) | ||||
|         response = self.client.post( | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 400) | ||||
|         self.assertJSONEqual( | ||||
|             response.content, {"non_field_errors": "User does not have an email address set."} | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 404) | ||||
|         self.user.email = "foo@bar.baz" | ||||
|         self.user.save() | ||||
|         response = self.client.post( | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 400) | ||||
|         self.assertJSONEqual(response.content, {"non_field_errors": "No recovery flow set."}) | ||||
|         self.assertEqual(response.status_code, 404) | ||||
|  | ||||
|     def test_recovery_email_no_stage(self): | ||||
|         """Test user recovery link (no email stage)""" | ||||
| @ -123,11 +112,10 @@ class TestUsersAPI(APITestCase): | ||||
|         brand.flow_recovery = flow | ||||
|         brand.save() | ||||
|         self.client.force_login(self.admin) | ||||
|         response = self.client.post( | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 400) | ||||
|         self.assertJSONEqual(response.content, {"non_field_errors": "Email stage does not exist."}) | ||||
|         self.assertEqual(response.status_code, 404) | ||||
|  | ||||
|     def test_recovery_email(self): | ||||
|         """Test user recovery link""" | ||||
| @ -141,7 +129,7 @@ class TestUsersAPI(APITestCase): | ||||
|         stage = EmailStage.objects.create(name="email") | ||||
|  | ||||
|         self.client.force_login(self.admin) | ||||
|         response = self.client.post( | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:user-recovery-email", | ||||
|                 kwargs={"pk": self.user.pk}, | ||||
|  | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	