Compare commits
	
		
			30 Commits
		
	
	
		
			blueprints
			...
			web/config
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 79da411f10 | |||
| ce761c4337 | |||
| 0d3025794c | |||
| 79601f6d66 | |||
| 1ec0623ab6 | |||
| 4bf151cfc2 | |||
| 6752d19375 | |||
| 284c2327c6 | |||
| 600c3caa62 | |||
| 366d48eddb | |||
| e67a290b73 | |||
| 4456f085d3 | |||
| 53e982594e | |||
| def988c3b1 | |||
| e164661321 | |||
| 849fea6e91 | |||
| 24278d0781 | |||
| 8c6f83b88e | |||
| fc80596432 | |||
| 03fde51313 | |||
| f669222529 | |||
| 297c29b231 | |||
| 21b50838db | |||
| d2a9b2a343 | |||
| c52fa631b4 | |||
| 6cf2de8a7c | |||
| d4b80c17e8 | |||
| 828b8a83ea | |||
| 115e2f3dcb | |||
| 6228931305 | 
| @ -1,34 +1,20 @@ | ||||
| [bumpversion] | ||||
| current_version = 2024.10.5 | ||||
| current_version = 2023.10.5 | ||||
| tag = True | ||||
| commit = True | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||
| serialize =  | ||||
| 	{major}.{minor}.{patch}-{rc_t}{rc_n} | ||||
| 	{major}.{minor}.{patch} | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+) | ||||
| serialize = {major}.{minor}.{patch} | ||||
| message = release: {new_version} | ||||
| tag_name = version/{new_version} | ||||
|  | ||||
| [bumpversion:part:rc_t] | ||||
| values =  | ||||
| 	rc | ||||
| 	final | ||||
| optional_value = final | ||||
|  | ||||
| [bumpversion:file:pyproject.toml] | ||||
|  | ||||
| [bumpversion:file:package.json] | ||||
|  | ||||
| [bumpversion:file:docker-compose.yml] | ||||
|  | ||||
| [bumpversion:file:schema.yml] | ||||
|  | ||||
| [bumpversion:file:blueprints/schema.json] | ||||
|  | ||||
| [bumpversion:file:authentik/__init__.py] | ||||
|  | ||||
| [bumpversion:file:internal/constants/constants.go] | ||||
|  | ||||
| [bumpversion:file:web/src/common/constants.ts] | ||||
|  | ||||
| [bumpversion:file:website/docs/install-config/install/aws/template.yaml] | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							| @ -1 +1 @@ | ||||
| custom: https://goauthentik.io/pricing/ | ||||
| github: [BeryJu] | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/ISSUE_TEMPLATE/question.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/ISSUE_TEMPLATE/question.md
									
									
									
									
										vendored
									
									
								
							| @ -9,7 +9,7 @@ assignees: "" | ||||
| **Describe your question/** | ||||
| A clear and concise description of what you're trying to do. | ||||
|  | ||||
| **Relevant info** | ||||
| **Relevant infos** | ||||
| i.e. Version of other software you're using, specifics of your setup | ||||
|  | ||||
| **Screenshots** | ||||
|  | ||||
| @ -9,6 +9,9 @@ inputs: | ||||
| runs: | ||||
|   using: "composite" | ||||
|   steps: | ||||
|     - name: Generate config | ||||
|       id: ev | ||||
|       uses: ./.github/actions/docker-push-variables | ||||
|     - name: Find Comment | ||||
|       uses: peter-evans/find-comment@v2 | ||||
|       id: fc | ||||
| @ -54,10 +57,9 @@ runs: | ||||
|             authentik: | ||||
|                 outposts: | ||||
|                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||
|             global: | ||||
|                 image: | ||||
|                     repository: ghcr.io/goauthentik/dev-server | ||||
|                     tag: ${{ inputs.tag }} | ||||
|             image: | ||||
|                 repository: ghcr.io/goauthentik/dev-server | ||||
|                 tag: ${{ inputs.tag }} | ||||
|             ``` | ||||
|  | ||||
|             For arm64, use these values: | ||||
| @ -66,10 +68,9 @@ runs: | ||||
|             authentik: | ||||
|                 outposts: | ||||
|                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||
|             global: | ||||
|                 image: | ||||
|                     repository: ghcr.io/goauthentik/dev-server | ||||
|                     tag: ${{ inputs.tag }}-arm64 | ||||
|             image: | ||||
|                 repository: ghcr.io/goauthentik/dev-server | ||||
|                 tag: ${{ inputs.tag }}-arm64 | ||||
|             ``` | ||||
|  | ||||
|             Afterwards, run the upgrade commands from the latest release notes. | ||||
|  | ||||
							
								
								
									
										85
									
								
								.github/actions/docker-push-variables/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										85
									
								
								.github/actions/docker-push-variables/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,53 +1,64 @@ | ||||
| --- | ||||
| name: "Prepare docker environment variables" | ||||
| description: "Prepare docker environment variables" | ||||
|  | ||||
| inputs: | ||||
|   image-name: | ||||
|     required: true | ||||
|     description: "Docker image prefix" | ||||
|   image-arch: | ||||
|     required: false | ||||
|     description: "Docker image arch" | ||||
|  | ||||
| outputs: | ||||
|   shouldPush: | ||||
|     description: "Whether to push the image or not" | ||||
|     value: ${{ steps.ev.outputs.shouldPush }} | ||||
|  | ||||
|   shouldBuild: | ||||
|     description: "Whether to build image or not" | ||||
|     value: ${{ steps.ev.outputs.shouldBuild }} | ||||
|   branchName: | ||||
|     description: "Branch name" | ||||
|     value: ${{ steps.ev.outputs.branchName }} | ||||
|   branchNameContainer: | ||||
|     description: "Branch name (for containers)" | ||||
|     value: ${{ steps.ev.outputs.branchNameContainer }} | ||||
|   timestamp: | ||||
|     description: "Timestamp" | ||||
|     value: ${{ steps.ev.outputs.timestamp }} | ||||
|   sha: | ||||
|     description: "sha" | ||||
|     value: ${{ steps.ev.outputs.sha }} | ||||
|  | ||||
|   shortHash: | ||||
|     description: "shortHash" | ||||
|     value: ${{ steps.ev.outputs.shortHash }} | ||||
|   version: | ||||
|     description: "Version" | ||||
|     description: "version" | ||||
|     value: ${{ steps.ev.outputs.version }} | ||||
|   prerelease: | ||||
|     description: "Prerelease" | ||||
|     value: ${{ steps.ev.outputs.prerelease }} | ||||
|  | ||||
|   imageTags: | ||||
|     description: "Docker image tags" | ||||
|     value: ${{ steps.ev.outputs.imageTags }} | ||||
|   attestImageNames: | ||||
|     description: "Docker image names used for attestation" | ||||
|     value: ${{ steps.ev.outputs.attestImageNames }} | ||||
|   imageMainTag: | ||||
|     description: "Docker image main tag" | ||||
|     value: ${{ steps.ev.outputs.imageMainTag }} | ||||
|   imageMainName: | ||||
|     description: "Docker image main name" | ||||
|     value: ${{ steps.ev.outputs.imageMainName }} | ||||
|   versionFamily: | ||||
|     description: "versionFamily" | ||||
|     value: ${{ steps.ev.outputs.versionFamily }} | ||||
|  | ||||
| runs: | ||||
|   using: "composite" | ||||
|   steps: | ||||
|     - name: Generate config | ||||
|       id: ev | ||||
|       shell: bash | ||||
|       env: | ||||
|         IMAGE_NAME: ${{ inputs.image-name }} | ||||
|         IMAGE_ARCH: ${{ inputs.image-arch }} | ||||
|         PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} | ||||
|       shell: python | ||||
|       run: | | ||||
|         python3 ${{ github.action_path }}/push_vars.py | ||||
|         """Helper script to get the actual branch name, docker safe""" | ||||
|         import configparser | ||||
|         import os | ||||
|         from time import time | ||||
|  | ||||
|         parser = configparser.ConfigParser() | ||||
|         parser.read(".bumpversion.cfg") | ||||
|  | ||||
|         branch_name = os.environ["GITHUB_REF"] | ||||
|         if os.environ.get("GITHUB_HEAD_REF", "") != "": | ||||
|             branch_name = os.environ["GITHUB_HEAD_REF"] | ||||
|  | ||||
|         should_build = str(os.environ.get("DOCKER_USERNAME", "") != "").lower() | ||||
|         version = parser.get("bumpversion", "current_version") | ||||
|         version_family = ".".join(version.split(".")[:-1]) | ||||
|         safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-") | ||||
|  | ||||
|         sha = os.environ["GITHUB_SHA"] if not "${{ github.event.pull_request.head.sha }}" else "${{ github.event.pull_request.head.sha }}" | ||||
|  | ||||
|         with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output: | ||||
|             print("branchName=%s" % branch_name, file=_output) | ||||
|             print("branchNameContainer=%s" % safe_branch_name, file=_output) | ||||
|             print("timestamp=%s" % int(time()), file=_output) | ||||
|             print("sha=%s" % sha, file=_output) | ||||
|             print("shortHash=%s" % sha[:7], file=_output) | ||||
|             print("shouldBuild=%s" % should_build, file=_output) | ||||
|             print("version=%s" % version, file=_output) | ||||
|             print("versionFamily=%s" % version_family, file=_output) | ||||
|  | ||||
| @ -1,81 +0,0 @@ | ||||
| """Helper script to get the actual branch name, docker safe""" | ||||
|  | ||||
| import configparser | ||||
| import os | ||||
| from time import time | ||||
|  | ||||
| parser = configparser.ConfigParser() | ||||
| parser.read(".bumpversion.cfg") | ||||
|  | ||||
| # Decide if we should push the image or not | ||||
| should_push = True | ||||
| if len(os.environ.get("DOCKER_USERNAME", "")) < 1: | ||||
|     # Don't push if we don't have DOCKER_USERNAME, i.e. no secrets are available | ||||
|     should_push = False | ||||
| if os.environ.get("GITHUB_REPOSITORY").lower() == "goauthentik/authentik-internal": | ||||
|     # Don't push on the internal repo | ||||
|     should_push = False | ||||
|  | ||||
| branch_name = os.environ["GITHUB_REF"] | ||||
| if os.environ.get("GITHUB_HEAD_REF", "") != "": | ||||
|     branch_name = os.environ["GITHUB_HEAD_REF"] | ||||
| safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-").replace("'", "-") | ||||
|  | ||||
| image_names = os.getenv("IMAGE_NAME").split(",") | ||||
| image_arch = os.getenv("IMAGE_ARCH") or None | ||||
|  | ||||
| is_pull_request = bool(os.getenv("PR_HEAD_SHA")) | ||||
| is_release = "dev" not in image_names[0] | ||||
|  | ||||
| sha = os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA") | ||||
|  | ||||
| # 2042.1.0 or 2042.1.0-rc1 | ||||
| version = parser.get("bumpversion", "current_version") | ||||
| # 2042.1 | ||||
| version_family = ".".join(version.split("-", 1)[0].split(".")[:-1]) | ||||
| prerelease = "-" in version | ||||
|  | ||||
| image_tags = [] | ||||
| if is_release: | ||||
|     for name in image_names: | ||||
|         image_tags += [ | ||||
|             f"{name}:{version}", | ||||
|         ] | ||||
|         if not prerelease: | ||||
|             image_tags += [ | ||||
|                 f"{name}:latest", | ||||
|                 f"{name}:{version_family}", | ||||
|             ] | ||||
| else: | ||||
|     suffix = "" | ||||
|     if image_arch and image_arch != "amd64": | ||||
|         suffix = f"-{image_arch}" | ||||
|     for name in image_names: | ||||
|         image_tags += [ | ||||
|             f"{name}:gh-{sha}{suffix}",  # Used for ArgoCD and PR comments | ||||
|             f"{name}:gh-{safe_branch_name}{suffix}",  # For convenience | ||||
|             f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}",  # Use by FluxCD | ||||
|         ] | ||||
|  | ||||
| image_main_tag = image_tags[0].split(":")[-1] | ||||
|  | ||||
|  | ||||
| def get_attest_image_names(image_with_tags: list[str]): | ||||
|     """Attestation only for GHCR""" | ||||
|     image_tags = [] | ||||
|     for image_name in set(name.split(":")[0] for name in image_with_tags): | ||||
|         if not image_name.startswith("ghcr.io"): | ||||
|             continue | ||||
|         image_tags.append(image_name) | ||||
|     return ",".join(set(image_tags)) | ||||
|  | ||||
|  | ||||
| with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output: | ||||
|     print(f"shouldPush={str(should_push).lower()}", file=_output) | ||||
|     print(f"sha={sha}", file=_output) | ||||
|     print(f"version={version}", file=_output) | ||||
|     print(f"prerelease={prerelease}", file=_output) | ||||
|     print(f"imageTags={','.join(image_tags)}", file=_output) | ||||
|     print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output) | ||||
|     print(f"imageMainTag={image_main_tag}", file=_output) | ||||
|     print(f"imageMainName={image_tags[0]}", file=_output) | ||||
| @ -1,7 +0,0 @@ | ||||
| #!/bin/bash -x | ||||
| SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) | ||||
| GITHUB_OUTPUT=/dev/stdout \ | ||||
|     GITHUB_REF=ref \ | ||||
|     GITHUB_SHA=sha \ | ||||
|     IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \ | ||||
|     python $SCRIPT_DIR/push_vars.py | ||||
							
								
								
									
										16
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										16
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -4,7 +4,7 @@ description: "Setup authentik testing environment" | ||||
| inputs: | ||||
|   postgresql_version: | ||||
|     description: "Optional postgresql image tag" | ||||
|     default: "16" | ||||
|     default: "12" | ||||
|  | ||||
| runs: | ||||
|   using: "composite" | ||||
| @ -14,28 +14,28 @@ runs: | ||||
|       run: | | ||||
|         pipx install poetry || true | ||||
|         sudo apt-get update | ||||
|         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server | ||||
|         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext | ||||
|     - name: Setup python and restore poetry | ||||
|       uses: actions/setup-python@v5 | ||||
|       uses: actions/setup-python@v4 | ||||
|       with: | ||||
|         python-version-file: "pyproject.toml" | ||||
|         python-version-file: 'pyproject.toml' | ||||
|         cache: "poetry" | ||||
|     - name: Setup node | ||||
|       uses: actions/setup-node@v4 | ||||
|       uses: actions/setup-node@v3 | ||||
|       with: | ||||
|         node-version-file: web/package.json | ||||
|         cache: "npm" | ||||
|         cache-dependency-path: web/package-lock.json | ||||
|     - name: Setup go | ||||
|       uses: actions/setup-go@v5 | ||||
|       uses: actions/setup-go@v4 | ||||
|       with: | ||||
|         go-version-file: "go.mod" | ||||
|     - name: Setup dependencies | ||||
|       shell: bash | ||||
|       run: | | ||||
|         export PSQL_TAG=${{ inputs.postgresql_version }} | ||||
|         docker compose -f .github/actions/setup/docker-compose.yml up -d | ||||
|         poetry install --sync | ||||
|         docker-compose -f .github/actions/setup/docker-compose.yml up -d | ||||
|         poetry install | ||||
|         cd web && npm ci | ||||
|     - name: Generate config | ||||
|       shell: poetry run python {0} | ||||
|  | ||||
							
								
								
									
										4
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,6 +1,8 @@ | ||||
| version: "3.7" | ||||
|  | ||||
| services: | ||||
|   postgresql: | ||||
|     image: docker.io/library/postgres:${PSQL_TAG:-16} | ||||
|     image: docker.io/library/postgres:${PSQL_TAG:-12} | ||||
|     volumes: | ||||
|       - db-data:/var/lib/postgresql/data | ||||
|     environment: | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							| @ -3,5 +3,3 @@ keypairs | ||||
| hass | ||||
| warmup | ||||
| ontext | ||||
| singed | ||||
| assertIn | ||||
|  | ||||
							
								
								
									
										47
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										47
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -21,9 +21,7 @@ updates: | ||||
|     labels: | ||||
|       - dependencies | ||||
|   - package-ecosystem: npm | ||||
|     directories: | ||||
|       - "/web" | ||||
|       - "/web/sfe" | ||||
|     directory: "/web" | ||||
|     schedule: | ||||
|       interval: daily | ||||
|       time: "04:00" | ||||
| @ -32,22 +30,20 @@ updates: | ||||
|     open-pull-requests-limit: 10 | ||||
|     commit-message: | ||||
|       prefix: "web:" | ||||
|     # TODO: deduplicate these groups | ||||
|     groups: | ||||
|       sentry: | ||||
|         patterns: | ||||
|           - "@sentry/*" | ||||
|           - "@spotlightjs/*" | ||||
|       babel: | ||||
|         patterns: | ||||
|           - "@babel/*" | ||||
|           - "babel-*" | ||||
|       eslint: | ||||
|         patterns: | ||||
|           - "@eslint/*" | ||||
|           - "@typescript-eslint/*" | ||||
|           - "eslint-*" | ||||
|           - "eslint" | ||||
|           - "typescript-eslint" | ||||
|           - "eslint-*" | ||||
|       storybook: | ||||
|         patterns: | ||||
|           - "@storybook/*" | ||||
| @ -55,16 +51,37 @@ updates: | ||||
|       esbuild: | ||||
|         patterns: | ||||
|           - "@esbuild/*" | ||||
|           - "esbuild*" | ||||
|       rollup: | ||||
|   - package-ecosystem: npm | ||||
|     directory: "/tests/wdio" | ||||
|     schedule: | ||||
|       interval: daily | ||||
|       time: "04:00" | ||||
|     labels: | ||||
|       - dependencies | ||||
|     open-pull-requests-limit: 10 | ||||
|     commit-message: | ||||
|       prefix: "web:" | ||||
|     # TODO: deduplicate these groups | ||||
|     groups: | ||||
|       sentry: | ||||
|         patterns: | ||||
|           - "@rollup/*" | ||||
|           - "rollup-*" | ||||
|           - "rollup*" | ||||
|       swc: | ||||
|           - "@sentry/*" | ||||
|       babel: | ||||
|         patterns: | ||||
|           - "@swc/*" | ||||
|           - "swc-*" | ||||
|           - "@babel/*" | ||||
|           - "babel-*" | ||||
|       eslint: | ||||
|         patterns: | ||||
|           - "@typescript-eslint/*" | ||||
|           - "eslint" | ||||
|           - "eslint-*" | ||||
|       storybook: | ||||
|         patterns: | ||||
|           - "@storybook/*" | ||||
|           - "*storybook*" | ||||
|       esbuild: | ||||
|         patterns: | ||||
|           - "@esbuild/*" | ||||
|       wdio: | ||||
|         patterns: | ||||
|           - "@wdio/*" | ||||
|  | ||||
							
								
								
									
										3
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							| @ -1,7 +1,7 @@ | ||||
| <!-- | ||||
| 👋 Hi there! Welcome. | ||||
|  | ||||
| Please check the Contributing guidelines: https://docs.goauthentik.io/docs/developer-docs/#how-can-i-contribute | ||||
| Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute | ||||
| --> | ||||
|  | ||||
| ## Details | ||||
| @ -27,6 +27,7 @@ If an API change has been made | ||||
| If changes to the frontend have been made | ||||
|  | ||||
| -   [ ] The code has been formatted (`make web`) | ||||
| -   [ ] The translation files have been updated (`make i18n-extract`) | ||||
|  | ||||
| If applicable | ||||
|  | ||||
|  | ||||
							
								
								
									
										66
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										66
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,66 +0,0 @@ | ||||
| name: authentik-api-py-publish | ||||
| on: | ||||
|   push: | ||||
|     branches: [main] | ||||
|     paths: | ||||
|       - "schema.yml" | ||||
|   workflow_dispatch: | ||||
| jobs: | ||||
|   build: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       id-token: write | ||||
|     steps: | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         with: | ||||
|           app_id: ${{ secrets.GH_APP_ID }} | ||||
|           private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|       - name: Install poetry & deps | ||||
|         shell: bash | ||||
|         run: | | ||||
|           pipx install poetry || true | ||||
|           sudo apt-get update | ||||
|           sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext | ||||
|       - name: Setup python and restore poetry | ||||
|         uses: actions/setup-python@v5 | ||||
|         with: | ||||
|           python-version-file: "pyproject.toml" | ||||
|           cache: "poetry" | ||||
|       - name: Generate API Client | ||||
|         run: make gen-client-py | ||||
|       - name: Publish package | ||||
|         working-directory: gen-py-api/ | ||||
|         run: | | ||||
|           poetry build | ||||
|       - name: Publish package to PyPI | ||||
|         uses: pypa/gh-action-pypi-publish@release/v1 | ||||
|         with: | ||||
|           packages-dir: gen-py-api/dist/ | ||||
|       # We can't easily upgrade the API client being used due to poetry being poetry | ||||
|       # so we'll have to rely on dependabot | ||||
|       # - name: Upgrade / | ||||
|       #   run: | | ||||
|       #     export VERSION=$(cd gen-py-api && poetry version -s) | ||||
|       #     poetry add "authentik_client=$VERSION" --allow-prereleases --lock | ||||
|       # - uses: peter-evans/create-pull-request@v6 | ||||
|       #   id: cpr | ||||
|       #   with: | ||||
|       #     token: ${{ steps.generate_token.outputs.token }} | ||||
|       #     branch: update-root-api-client | ||||
|       #     commit-message: "root: bump API Client version" | ||||
|       #     title: "root: bump API Client version" | ||||
|       #     body: "root: bump API Client version" | ||||
|       #     delete-branch: true | ||||
|       #     signoff: true | ||||
|       #     # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|       #     author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|       # - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|       #   with: | ||||
|       #     token: ${{ steps.generate_token.outputs.token }} | ||||
|       #     pull-request-number: ${{ steps.cpr.outputs.pull-request-number }} | ||||
|       #     merge-method: squash | ||||
							
								
								
									
										46
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										46
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,46 +0,0 @@ | ||||
| name: authentik-ci-aws-cfn | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: | ||||
|       - main | ||||
|       - next | ||||
|       - version-* | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - main | ||||
|       - version-* | ||||
|  | ||||
| env: | ||||
|   POSTGRES_DB: authentik | ||||
|   POSTGRES_USER: authentik | ||||
|   POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77" | ||||
|  | ||||
| jobs: | ||||
|   check-changes-applied: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: website/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: website/package-lock.json | ||||
|       - working-directory: website/ | ||||
|         run: | | ||||
|           npm ci | ||||
|       - name: Check changes have been applied | ||||
|         run: | | ||||
|           poetry run make aws-cfn | ||||
|           git diff --exit-code | ||||
|   ci-aws-cfn-mark: | ||||
|     if: always() | ||||
|     needs: | ||||
|       - check-changes-applied | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: re-actors/alls-green@release/v1 | ||||
|         with: | ||||
|           jobs: ${{ toJSON(needs) }} | ||||
							
								
								
									
										142
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										142
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,4 +1,3 @@ | ||||
| --- | ||||
| name: authentik-ci-main | ||||
|  | ||||
| on: | ||||
| @ -7,6 +6,8 @@ on: | ||||
|       - main | ||||
|       - next | ||||
|       - version-* | ||||
|     paths-ignore: | ||||
|       - website | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - main | ||||
| @ -26,7 +27,10 @@ jobs: | ||||
|           - bandit | ||||
|           - black | ||||
|           - codespell | ||||
|           - isort | ||||
|           - pending-migrations | ||||
|           - pylint | ||||
|           - pyright | ||||
|           - ruff | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
| @ -50,6 +54,7 @@ jobs: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         psql: | ||||
|           - 12-alpine | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|     steps: | ||||
| @ -64,7 +69,7 @@ jobs: | ||||
|           cp authentik/lib/default.yml local.env.yml | ||||
|           cp -R .github .. | ||||
|           cp -R scripts .. | ||||
|           git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) | ||||
|           git checkout version/$(python -c "from authentik import __version__; print(__version__)") | ||||
|           rm -rf .github/ scripts/ | ||||
|           mv ../.github ../scripts . | ||||
|       - name: Setup authentik env (stable) | ||||
| @ -103,6 +108,7 @@ jobs: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         psql: | ||||
|           - 12-alpine | ||||
|           - 15-alpine | ||||
|           - 16-alpine | ||||
|     steps: | ||||
| @ -116,16 +122,9 @@ jobs: | ||||
|           poetry run make test | ||||
|           poetry run coverage xml | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v5 | ||||
|         uses: codecov/codecov-action@v3 | ||||
|         with: | ||||
|           flags: unit | ||||
|           token: ${{ secrets.CODECOV_TOKEN }} | ||||
|       - if: ${{ !cancelled() }} | ||||
|         uses: codecov/test-results-action@v1 | ||||
|         with: | ||||
|           flags: unit | ||||
|           file: unittest.xml | ||||
|           token: ${{ secrets.CODECOV_TOKEN }} | ||||
|   test-integration: | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 30 | ||||
| @ -134,22 +133,15 @@ jobs: | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: Create k8s Kind Cluster | ||||
|         uses: helm/kind-action@v1.11.0 | ||||
|         uses: helm/kind-action@v1.8.0 | ||||
|       - name: run integration | ||||
|         run: | | ||||
|           poetry run coverage run manage.py test tests/integration | ||||
|           poetry run coverage xml | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v5 | ||||
|         uses: codecov/codecov-action@v3 | ||||
|         with: | ||||
|           flags: integration | ||||
|           token: ${{ secrets.CODECOV_TOKEN }} | ||||
|       - if: ${{ !cancelled() }} | ||||
|         uses: codecov/test-results-action@v1 | ||||
|         with: | ||||
|           flags: integration | ||||
|           file: unittest.xml | ||||
|           token: ${{ secrets.CODECOV_TOKEN }} | ||||
|   test-e2e: | ||||
|     name: test-e2e (${{ matrix.job.name }}) | ||||
|     runs-on: ubuntu-latest | ||||
| @ -170,8 +162,6 @@ jobs: | ||||
|             glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap* | ||||
|           - name: radius | ||||
|             glob: tests/e2e/test_provider_radius* | ||||
|           - name: scim | ||||
|             glob: tests/e2e/test_source_scim* | ||||
|           - name: flows | ||||
|             glob: tests/e2e/test_flows* | ||||
|     steps: | ||||
| @ -180,9 +170,9 @@ jobs: | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: Setup e2e env (chrome, etc) | ||||
|         run: | | ||||
|           docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull | ||||
|           docker-compose -f tests/e2e/docker-compose.yml up -d | ||||
|       - id: cache-web | ||||
|         uses: actions/cache@v4 | ||||
|         uses: actions/cache@v3 | ||||
|         with: | ||||
|           path: web/dist | ||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }} | ||||
| @ -198,18 +188,10 @@ jobs: | ||||
|           poetry run coverage run manage.py test ${{ matrix.job.glob }} | ||||
|           poetry run coverage xml | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v5 | ||||
|         uses: codecov/codecov-action@v3 | ||||
|         with: | ||||
|           flags: e2e | ||||
|           token: ${{ secrets.CODECOV_TOKEN }} | ||||
|       - if: ${{ !cancelled() }} | ||||
|         uses: codecov/test-results-action@v1 | ||||
|         with: | ||||
|           flags: e2e | ||||
|           file: unittest.xml | ||||
|           token: ${{ secrets.CODECOV_TOKEN }} | ||||
|   ci-core-mark: | ||||
|     if: always() | ||||
|     needs: | ||||
|       - lint | ||||
|       - test-migrations | ||||
| @ -219,31 +201,20 @@ jobs: | ||||
|       - test-e2e | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: re-actors/alls-green@release/v1 | ||||
|         with: | ||||
|           jobs: ${{ toJSON(needs) }} | ||||
|       - run: echo mark | ||||
|   build: | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         arch: | ||||
|           - amd64 | ||||
|           - arm64 | ||||
|     needs: ci-core-mark | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     timeout-minutes: 120 | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|         uses: docker/setup-qemu-action@v3.0.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
| @ -251,12 +222,9 @@ jobs: | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/dev-server | ||||
|           image-arch: ${{ matrix.arch }} | ||||
|       - name: Login to Container Registry | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         uses: docker/login-action@v3 | ||||
|         if: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @ -264,30 +232,75 @@ jobs: | ||||
|       - name: generate ts client | ||||
|         run: make gen-client-ts | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         id: push | ||||
|         uses: docker/build-push-action@v5 | ||||
|         with: | ||||
|           context: . | ||||
|           secrets: | | ||||
|             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||
|             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|           push: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|           tags: | | ||||
|             ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }} | ||||
|             ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }} | ||||
|             ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }} | ||||
|           build-args: | | ||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache | ||||
|           cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max' || '' }} | ||||
|           platforms: linux/${{ matrix.arch }} | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|             VERSION=${{ steps.ev.outputs.version }} | ||||
|             VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }} | ||||
|           cache-from: type=gha | ||||
|           cache-to: type=gha,mode=max | ||||
|   build-arm64: | ||||
|     needs: ci-core-mark | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|     timeout-minutes: 120 | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.0.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|       - name: Login to Container Registry | ||||
|         uses: docker/login-action@v3 | ||||
|         if: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: generate ts client | ||||
|         run: make gen-client-ts | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v5 | ||||
|         with: | ||||
|           context: . | ||||
|           secrets: | | ||||
|             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||
|             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||
|           push: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|           tags: | | ||||
|             ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64 | ||||
|             ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}-arm64 | ||||
|             ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64 | ||||
|           build-args: | | ||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||
|             VERSION=${{ steps.ev.outputs.version }} | ||||
|             VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }} | ||||
|           platforms: linux/arm64 | ||||
|           cache-from: type=gha | ||||
|           cache-to: type=gha,mode=max | ||||
|   pr-comment: | ||||
|     needs: | ||||
|       - build | ||||
|       - build-arm64 | ||||
|     runs-on: ubuntu-latest | ||||
|     if: ${{ github.event_name == 'pull_request' }} | ||||
|     permissions: | ||||
| @ -303,10 +316,7 @@ jobs: | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/dev-server | ||||
|       - name: Comment on PR | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         uses: ./.github/actions/comment-pr-instructions | ||||
|         with: | ||||
|           tag: ${{ steps.ev.outputs.imageMainTag }} | ||||
|           tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }} | ||||
|  | ||||
							
								
								
									
										41
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										41
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,4 +1,3 @@ | ||||
| --- | ||||
| name: authentik-ci-outpost | ||||
|  | ||||
| on: | ||||
| @ -29,9 +28,9 @@ jobs: | ||||
|       - name: Generate API | ||||
|         run: make gen-client-go | ||||
|       - name: golangci-lint | ||||
|         uses: golangci/golangci-lint-action@v6 | ||||
|         uses: golangci/golangci-lint-action@v3 | ||||
|         with: | ||||
|           version: latest | ||||
|           version: v1.54.2 | ||||
|           args: --timeout 5000s --verbose | ||||
|           skip-cache: true | ||||
|   test-unittest: | ||||
| @ -49,15 +48,12 @@ jobs: | ||||
|         run: | | ||||
|           go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./... | ||||
|   ci-outpost-mark: | ||||
|     if: always() | ||||
|     needs: | ||||
|       - lint-golint | ||||
|       - test-unittest | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: re-actors/alls-green@release/v1 | ||||
|         with: | ||||
|           jobs: ${{ toJSON(needs) }} | ||||
|       - run: echo mark | ||||
|   build-container: | ||||
|     timeout-minutes: 120 | ||||
|     needs: | ||||
| @ -74,15 +70,12 @@ jobs: | ||||
|     permissions: | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           ref: ${{ github.event.pull_request.head.sha }} | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|         uses: docker/setup-qemu-action@v3.0.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
| @ -90,11 +83,9 @@ jobs: | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/dev-${{ matrix.type }} | ||||
|       - name: Login to Container Registry | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         uses: docker/login-action@v3 | ||||
|         if: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @ -102,25 +93,21 @@ jobs: | ||||
|       - name: Generate API | ||||
|         run: make gen-client-go | ||||
|       - name: Build Docker Image | ||||
|         id: push | ||||
|         uses: docker/build-push-action@v6 | ||||
|         uses: docker/build-push-action@v5 | ||||
|         with: | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           push: ${{ steps.ev.outputs.shouldBuild == 'true' }} | ||||
|           tags: | | ||||
|             ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }} | ||||
|             ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }} | ||||
|           file: ${{ matrix.type }}.Dockerfile | ||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|           build-args: | | ||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||
|             VERSION=${{ steps.ev.outputs.version }} | ||||
|             VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }} | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache | ||||
|           cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }} | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|           cache-from: type=gha | ||||
|           cache-to: type=gha,mode=max | ||||
|   build-binary: | ||||
|     timeout-minutes: 120 | ||||
|     needs: | ||||
|  | ||||
							
								
								
									
										118
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										118
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,23 +12,14 @@ on: | ||||
|       - version-* | ||||
|  | ||||
| jobs: | ||||
|   lint: | ||||
|   lint-eslint: | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         command: | ||||
|           - lint | ||||
|           - lint:lockfile | ||||
|           - tsc | ||||
|           - prettier-check | ||||
|         project: | ||||
|           - web | ||||
|         include: | ||||
|           - command: tsc | ||||
|             project: web | ||||
|           - command: lit-analyse | ||||
|             project: web | ||||
|           - tests/wdio | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
| @ -37,14 +28,83 @@ jobs: | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: ${{ matrix.project }}/package-lock.json | ||||
|       - working-directory: ${{ matrix.project }}/ | ||||
|         run: | | ||||
|           npm ci | ||||
|         run: npm ci | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: Lint | ||||
|       - name: Eslint | ||||
|         working-directory: ${{ matrix.project }}/ | ||||
|         run: npm run ${{ matrix.command }} | ||||
|         run: npm run lint | ||||
|   lint-build: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: web/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: web/package-lock.json | ||||
|       - working-directory: web/ | ||||
|         run: npm ci | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: TSC | ||||
|         working-directory: web/ | ||||
|         run: npm run tsc | ||||
|   lint-prettier: | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         project: | ||||
|           - web | ||||
|           - tests/wdio | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: ${{ matrix.project }}/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: ${{ matrix.project }}/package-lock.json | ||||
|       - working-directory: ${{ matrix.project }}/ | ||||
|         run: npm ci | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: prettier | ||||
|         working-directory: ${{ matrix.project }}/ | ||||
|         run: npm run prettier-check | ||||
|   lint-lit-analyse: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: web/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: web/package-lock.json | ||||
|       - working-directory: web/ | ||||
|         run: | | ||||
|           npm ci | ||||
|           # lit-analyse doesn't understand path rewrites, so make it | ||||
|           # belive it's an actual module | ||||
|           cd node_modules/@goauthentik | ||||
|           ln -s ../../src/ web | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: lit-analyse | ||||
|         working-directory: web/ | ||||
|         run: npm run lit-analyse | ||||
|   ci-web-mark: | ||||
|     needs: | ||||
|       - lint-eslint | ||||
|       - lint-prettier | ||||
|       - lint-lit-analyse | ||||
|       - lint-build | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - run: echo mark | ||||
|   build: | ||||
|     needs: | ||||
|       - ci-web-mark | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
| @ -60,31 +120,3 @@ jobs: | ||||
|       - name: build | ||||
|         working-directory: web/ | ||||
|         run: npm run build | ||||
|   ci-web-mark: | ||||
|     if: always() | ||||
|     needs: | ||||
|       - build | ||||
|       - lint | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: re-actors/alls-green@release/v1 | ||||
|         with: | ||||
|           jobs: ${{ toJSON(needs) }} | ||||
|   test: | ||||
|     needs: | ||||
|       - ci-web-mark | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: web/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: web/package-lock.json | ||||
|       - working-directory: web/ | ||||
|         run: npm ci | ||||
|       - name: Generate API | ||||
|         run: make gen-client-ts | ||||
|       - name: test | ||||
|         working-directory: web/ | ||||
|         run: npm run test || exit 0 | ||||
|  | ||||
							
								
								
									
										25
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										25
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,21 +12,20 @@ on: | ||||
|       - version-* | ||||
|  | ||||
| jobs: | ||||
|   lint: | ||||
|   lint-prettier: | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         command: | ||||
|           - lint:lockfile | ||||
|           - prettier-check | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
|         with: | ||||
|           node-version-file: website/package.json | ||||
|           cache: "npm" | ||||
|           cache-dependency-path: website/package-lock.json | ||||
|       - working-directory: website/ | ||||
|         run: npm ci | ||||
|       - name: Lint | ||||
|       - name: prettier | ||||
|         working-directory: website/ | ||||
|         run: npm run ${{ matrix.command }} | ||||
|         run: npm run prettier-check | ||||
|   test: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
| @ -49,6 +48,7 @@ jobs: | ||||
|       matrix: | ||||
|         job: | ||||
|           - build | ||||
|           - build-docs-only | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: actions/setup-node@v4 | ||||
| @ -62,13 +62,10 @@ jobs: | ||||
|         working-directory: website/ | ||||
|         run: npm run ${{ matrix.job }} | ||||
|   ci-website-mark: | ||||
|     if: always() | ||||
|     needs: | ||||
|       - lint | ||||
|       - lint-prettier | ||||
|       - test | ||||
|       - build | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: re-actors/alls-green@release/v1 | ||||
|         with: | ||||
|           jobs: ${{ toJSON(needs) }} | ||||
|       - run: echo mark | ||||
|  | ||||
							
								
								
									
										44
									
								
								.github/workflows/gen-update-webauthn-mds.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										44
									
								
								.github/workflows/gen-update-webauthn-mds.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,44 +0,0 @@ | ||||
| name: authentik-gen-update-webauthn-mds | ||||
| on: | ||||
|   workflow_dispatch: | ||||
|   schedule: | ||||
|     - cron: '30 1 1,15 * *' | ||||
|  | ||||
| env: | ||||
|   POSTGRES_DB: authentik | ||||
|   POSTGRES_USER: authentik | ||||
|   POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77" | ||||
|  | ||||
| jobs: | ||||
|   build: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         with: | ||||
|           app_id: ${{ secrets.GH_APP_ID }} | ||||
|           private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - run: poetry run ak update_webauthn_mds | ||||
|       - uses: peter-evans/create-pull-request@v7 | ||||
|         id: cpr | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|           branch: update-fido-mds-client | ||||
|           commit-message: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs" | ||||
|           title: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs" | ||||
|           body: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs" | ||||
|           delete-branch: true | ||||
|           signoff: true | ||||
|           # ID from https://api.github.com/users/authentik-automation[bot] | ||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> | ||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|           pull-request-number: ${{ steps.cpr.outputs.pull-request-number }} | ||||
|           merge-method: squash | ||||
							
								
								
									
										1
									
								
								.github/workflows/ghcr-retention.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/ghcr-retention.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,7 +7,6 @@ on: | ||||
|  | ||||
| jobs: | ||||
|   clean-ghcr: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     name: Delete old unused container images | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|  | ||||
							
								
								
									
										2
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -42,7 +42,7 @@ jobs: | ||||
|         with: | ||||
|           githubToken: ${{ steps.generate_token.outputs.token }} | ||||
|           compressOnly: ${{ github.event_name != 'pull_request' }} | ||||
|       - uses: peter-evans/create-pull-request@v7 | ||||
|       - uses: peter-evans/create-pull-request@v5 | ||||
|         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" | ||||
|         id: cpr | ||||
|         with: | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,7 +12,6 @@ env: | ||||
|  | ||||
| jobs: | ||||
|   publish-source-docs: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 120 | ||||
|     steps: | ||||
|  | ||||
							
								
								
									
										1
									
								
								.github/workflows/release-next-branch.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/release-next-branch.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,7 +11,6 @@ permissions: | ||||
|  | ||||
| jobs: | ||||
|   update-next: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     environment: internal-production | ||||
|     steps: | ||||
|  | ||||
							
								
								
									
										109
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										109
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,4 +1,3 @@ | ||||
| --- | ||||
| name: authentik-on-release | ||||
|  | ||||
| on: | ||||
| @ -11,22 +10,15 @@ jobs: | ||||
|     permissions: | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|         uses: docker/setup-qemu-action@v3.0.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/server,beryju/authentik | ||||
|       - name: Docker Login Registry | ||||
|         uses: docker/login-action@v3 | ||||
|         with: | ||||
| @ -43,32 +35,29 @@ jobs: | ||||
|           mkdir -p ./gen-ts-api | ||||
|           mkdir -p ./gen-go-api | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         id: push | ||||
|         uses: docker/build-push-action@v5 | ||||
|         with: | ||||
|           context: . | ||||
|           push: true | ||||
|           push: ${{ github.event_name == 'release' }} | ||||
|           secrets: | | ||||
|             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||
|             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||
|           build-args: | | ||||
|             VERSION=${{ github.ref }} | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           tags: | | ||||
|             beryju/authentik:${{ steps.ev.outputs.version }}, | ||||
|             beryju/authentik:${{ steps.ev.outputs.versionFamily }}, | ||||
|             beryju/authentik:latest, | ||||
|             ghcr.io/goauthentik/server:${{ steps.ev.outputs.version }}, | ||||
|             ghcr.io/goauthentik/server:${{ steps.ev.outputs.versionFamily }}, | ||||
|             ghcr.io/goauthentik/server:latest | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|           build-args: | | ||||
|             VERSION=${{ steps.ev.outputs.version }} | ||||
|             VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }} | ||||
|   build-outpost: | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       # Needed to upload contianer images to ghcr.io | ||||
|       packages: write | ||||
|       # Needed for attestation | ||||
|       id-token: write | ||||
|       attestations: write | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
| @ -83,16 +72,12 @@ jobs: | ||||
|         with: | ||||
|           go-version-file: "go.mod" | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v3.2.0 | ||||
|         uses: docker/setup-qemu-action@v3.0.0 | ||||
|       - name: Set up Docker Buildx | ||||
|         uses: docker/setup-buildx-action@v3 | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }} | ||||
|       - name: make empty clients | ||||
|         run: | | ||||
|           mkdir -p ./gen-ts-api | ||||
| @ -109,22 +94,22 @@ jobs: | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Build Docker Image | ||||
|         uses: docker/build-push-action@v6 | ||||
|         id: push | ||||
|         uses: docker/build-push-action@v5 | ||||
|         with: | ||||
|           push: true | ||||
|           build-args: | | ||||
|             VERSION=${{ github.ref }} | ||||
|           tags: ${{ steps.ev.outputs.imageTags }} | ||||
|           push: ${{ github.event_name == 'release' }} | ||||
|           tags: | | ||||
|             beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.version }}, | ||||
|             beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }}, | ||||
|             beryju/authentik-${{ matrix.type }}:latest, | ||||
|             ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.version }}, | ||||
|             ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }}, | ||||
|             ghcr.io/goauthentik/${{ matrix.type }}:latest | ||||
|           file: ${{ matrix.type }}.Dockerfile | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|       - uses: actions/attest-build-provenance@v2 | ||||
|         id: attest | ||||
|         with: | ||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} | ||||
|           subject-digest: ${{ steps.push.outputs.digest }} | ||||
|           push-to-registry: true | ||||
|           build-args: | | ||||
|             VERSION=${{ steps.ev.outputs.version }} | ||||
|             VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }} | ||||
|   build-outpost-binary: | ||||
|     timeout-minutes: 120 | ||||
|     runs-on: ubuntu-latest | ||||
| @ -169,27 +154,6 @@ jobs: | ||||
|           file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||
|           asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||
|           tag: ${{ github.ref }} | ||||
|   upload-aws-cfn-template: | ||||
|     permissions: | ||||
|       # Needed for AWS login | ||||
|       id-token: write | ||||
|       contents: read | ||||
|     needs: | ||||
|       - build-server | ||||
|       - build-outpost | ||||
|     env: | ||||
|       AWS_REGION: eu-central-1 | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - uses: aws-actions/configure-aws-credentials@v4 | ||||
|         with: | ||||
|           role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik" | ||||
|           aws-region: ${{ env.AWS_REGION }} | ||||
|       - name: Upload template | ||||
|         run: | | ||||
|           aws s3 cp website/docs/install-config/install/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml | ||||
|           aws s3 cp website/docs/install-config/install/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml | ||||
|   test-release: | ||||
|     needs: | ||||
|       - build-server | ||||
| @ -200,12 +164,12 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Run test suite in final docker images | ||||
|         run: | | ||||
|           echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           docker compose pull -q | ||||
|           docker compose up --no-start | ||||
|           docker compose start postgresql redis | ||||
|           docker compose run -u root server test-all | ||||
|           echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||
|           docker-compose pull -q | ||||
|           docker-compose up --no-start | ||||
|           docker-compose start postgresql redis | ||||
|           docker-compose run -u root server test-all | ||||
|   sentry-release: | ||||
|     needs: | ||||
|       - build-server | ||||
| @ -217,18 +181,15 @@ jobs: | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/server | ||||
|       - name: Get static files from docker image | ||||
|         run: | | ||||
|           docker pull ${{ steps.ev.outputs.imageMainName }} | ||||
|           container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) | ||||
|           docker pull ghcr.io/goauthentik/server:latest | ||||
|           container=$(docker container create ghcr.io/goauthentik/server:latest) | ||||
|           docker cp ${container}:web/ . | ||||
|       - name: Create a Sentry.io release | ||||
|         uses: getsentry/action-release@v1 | ||||
|         continue-on-error: true | ||||
|         if: ${{ github.event_name == 'release' }} | ||||
|         env: | ||||
|           SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} | ||||
|           SENTRY_ORG: authentik-security-inc | ||||
|  | ||||
							
								
								
									
										27
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,4 +1,3 @@ | ||||
| --- | ||||
| name: authentik-on-tag | ||||
|  | ||||
| on: | ||||
| @ -14,28 +13,28 @@ jobs: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Pre-release test | ||||
|         run: | | ||||
|           echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env | ||||
|           echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||
|           docker buildx install | ||||
|           mkdir -p ./gen-ts-api | ||||
|           docker build -t testing:latest . | ||||
|           echo "AUTHENTIK_IMAGE=testing" >> .env | ||||
|           echo "AUTHENTIK_TAG=latest" >> .env | ||||
|           docker compose up --no-start | ||||
|           docker compose start postgresql redis | ||||
|           docker compose run -u root server test-all | ||||
|           docker-compose up --no-start | ||||
|           docker-compose start postgresql redis | ||||
|           docker-compose run -u root server test-all | ||||
|       - id: generate_token | ||||
|         uses: tibdex/github-app-token@v2 | ||||
|         with: | ||||
|           app_id: ${{ secrets.GH_APP_ID }} | ||||
|           private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} | ||||
|       - name: prepare variables | ||||
|         uses: ./.github/actions/docker-push-variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|       - name: Extract version number | ||||
|         id: get_version | ||||
|         uses: actions/github-script@v7 | ||||
|         with: | ||||
|           image-name: ghcr.io/goauthentik/server | ||||
|           github-token: ${{ steps.generate_token.outputs.token }} | ||||
|           script: | | ||||
|             return context.payload.ref.replace(/\/refs\/tags\/version\//, ''); | ||||
|       - name: Create Release | ||||
|         id: create_release | ||||
|         uses: actions/create-release@v1.1.4 | ||||
| @ -43,6 +42,6 @@ jobs: | ||||
|           GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} | ||||
|         with: | ||||
|           tag_name: ${{ github.ref }} | ||||
|           release_name: Release ${{ steps.ev.outputs.version }} | ||||
|           release_name: Release ${{ steps.get_version.outputs.result }} | ||||
|           draft: true | ||||
|           prerelease: ${{ steps.ev.outputs.prerelease == 'true' }} | ||||
|           prerelease: false | ||||
|  | ||||
							
								
								
									
										21
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										21
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,21 +0,0 @@ | ||||
| name: "authentik-repo-mirror" | ||||
|  | ||||
| on: [push, delete] | ||||
|  | ||||
| jobs: | ||||
|   to_internal: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|         with: | ||||
|           fetch-depth: 0 | ||||
|       - if: ${{ env.MIRROR_KEY != '' }} | ||||
|         uses: pixta-dev/repository-mirroring-action@v1 | ||||
|         with: | ||||
|           target_repo_url: | ||||
|             git@github.com:goauthentik/authentik-internal.git | ||||
|           ssh_private_key: | ||||
|             ${{ secrets.GH_MIRROR_KEY }} | ||||
|         env: | ||||
|           MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }} | ||||
							
								
								
									
										3
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,7 +11,6 @@ permissions: | ||||
|  | ||||
| jobs: | ||||
|   stale: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - id: generate_token | ||||
| @ -24,7 +23,7 @@ jobs: | ||||
|           repo-token: ${{ steps.generate_token.outputs.token }} | ||||
|           days-before-stale: 60 | ||||
|           days-before-close: 7 | ||||
|           exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing | ||||
|           exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question | ||||
|           stale-issue-label: wontfix | ||||
|           stale-issue-message: > | ||||
|             This issue has been automatically marked as stale because it has not had | ||||
|  | ||||
							
								
								
									
										4
									
								
								.github/workflows/translation-advice.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/translation-advice.yml
									
									
									
									
										vendored
									
									
								
							| @ -19,14 +19,14 @@ jobs: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - name: Find Comment | ||||
|         uses: peter-evans/find-comment@v3 | ||||
|         uses: peter-evans/find-comment@v2 | ||||
|         id: fc | ||||
|         with: | ||||
|           issue-number: ${{ github.event.pull_request.number }} | ||||
|           comment-author: "github-actions[bot]" | ||||
|           body-includes: authentik translations instructions | ||||
|       - name: Create or update comment | ||||
|         uses: peter-evans/create-or-update-comment@v4 | ||||
|         uses: peter-evans/create-or-update-comment@v3 | ||||
|         with: | ||||
|           comment-id: ${{ steps.fc.outputs.comment-id }} | ||||
|           issue-number: ${{ github.event.pull_request.number }} | ||||
|  | ||||
| @ -1,8 +1,9 @@ | ||||
| --- | ||||
| name: authentik-backend-translate-extract-compile | ||||
| name: authentik-backend-translate-compile | ||||
| on: | ||||
|   schedule: | ||||
|     - cron: "0 0 * * *" # every day at midnight | ||||
|   push: | ||||
|     branches: [main] | ||||
|     paths: | ||||
|       - "locale/**" | ||||
|   workflow_dispatch: | ||||
| 
 | ||||
| env: | ||||
| @ -24,20 +25,16 @@ jobs: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|       - name: Setup authentik env | ||||
|         uses: ./.github/actions/setup | ||||
|       - name: run extract | ||||
|         run: | | ||||
|           poetry run make i18n-extract | ||||
|       - name: run compile | ||||
|         run: | | ||||
|           poetry run ak compilemessages | ||||
|           make web-check-compile | ||||
|         run: poetry run ak compilemessages | ||||
|       - name: Create Pull Request | ||||
|         uses: peter-evans/create-pull-request@v7 | ||||
|         uses: peter-evans/create-pull-request@v5 | ||||
|         id: cpr | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
|           branch: extract-compile-backend-translation | ||||
|           commit-message: "core, web: update translations" | ||||
|           title: "core, web: update translations" | ||||
|           body: "core, web: update translations" | ||||
|           branch: compile-backend-translation | ||||
|           commit-message: "core: compile backend translations" | ||||
|           title: "core: compile backend translations" | ||||
|           body: "core: compile backend translations" | ||||
|           delete-branch: true | ||||
|           signoff: true | ||||
| @ -1,4 +1,4 @@ | ||||
| name: authentik-api-ts-publish | ||||
| name: authentik-web-api-publish | ||||
| on: | ||||
|   push: | ||||
|     branches: [main] | ||||
| @ -7,7 +7,6 @@ on: | ||||
|   workflow_dispatch: | ||||
| jobs: | ||||
|   build: | ||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - id: generate_token | ||||
| @ -32,16 +31,11 @@ jobs: | ||||
|         env: | ||||
|           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} | ||||
|       - name: Upgrade /web | ||||
|         working-directory: web | ||||
|         working-directory: web/ | ||||
|         run: | | ||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` | ||||
|           npm i @goauthentik/api@$VERSION | ||||
|       - name: Upgrade /web/packages/sfe | ||||
|         working-directory: web/packages/sfe | ||||
|         run: | | ||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` | ||||
|           npm i @goauthentik/api@$VERSION | ||||
|       - uses: peter-evans/create-pull-request@v7 | ||||
|       - uses: peter-evans/create-pull-request@v5 | ||||
|         id: cpr | ||||
|         with: | ||||
|           token: ${{ steps.generate_token.outputs.token }} | ||||
							
								
								
									
										5
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @ -10,12 +10,13 @@ | ||||
|         "Gruntfuggly.todo-tree", | ||||
|         "mechatroner.rainbow-csv", | ||||
|         "ms-python.black-formatter", | ||||
|         "charliermarsh.ruff", | ||||
|         "ms-python.isort", | ||||
|         "ms-python.pylint", | ||||
|         "ms-python.python", | ||||
|         "ms-python.vscode-pylance", | ||||
|         "ms-python.black-formatter", | ||||
|         "redhat.vscode-yaml", | ||||
|         "Tobermory.es6-string-html", | ||||
|         "unifiedjs.vscode-mdx" | ||||
|         "unifiedjs.vscode-mdx", | ||||
|     ] | ||||
| } | ||||
|  | ||||
							
								
								
									
										2
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							| @ -22,6 +22,6 @@ | ||||
|             }, | ||||
|             "justMyCode": true, | ||||
|             "django": true | ||||
|         } | ||||
|         }, | ||||
|     ] | ||||
| } | ||||
|  | ||||
							
								
								
									
										31
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										31
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -4,38 +4,33 @@ | ||||
|         "asgi", | ||||
|         "authentik", | ||||
|         "authn", | ||||
|         "entra", | ||||
|         "goauthentik", | ||||
|         "jwe", | ||||
|         "jwks", | ||||
|         "kubernetes", | ||||
|         "oidc", | ||||
|         "openid", | ||||
|         "passwordless", | ||||
|         "plex", | ||||
|         "saml", | ||||
|         "scim", | ||||
|         "slo", | ||||
|         "sso", | ||||
|         "totp", | ||||
|         "webauthn", | ||||
|         "traefik", | ||||
|         "webauthn" | ||||
|         "passwordless", | ||||
|         "kubernetes", | ||||
|         "sso", | ||||
|         "slo", | ||||
|         "scim", | ||||
|     ], | ||||
|     "todo-tree.tree.showCountsInTree": true, | ||||
|     "todo-tree.tree.showBadges": true, | ||||
|     "yaml.customTags": [ | ||||
|         "!Condition sequence", | ||||
|         "!Context scalar", | ||||
|         "!Enumerate sequence", | ||||
|         "!Env scalar", | ||||
|         "!Find sequence", | ||||
|         "!FindObject sequence", | ||||
|         "!Format sequence", | ||||
|         "!If sequence", | ||||
|         "!Index scalar", | ||||
|         "!KeyOf scalar", | ||||
|         "!Value scalar", | ||||
|         "!AtIndex scalar" | ||||
|         "!Context scalar", | ||||
|         "!Context sequence", | ||||
|         "!Format sequence", | ||||
|         "!Condition sequence", | ||||
|         "!Env sequence", | ||||
|         "!Env scalar", | ||||
|         "!If sequence" | ||||
|     ], | ||||
|     "typescript.preferences.importModuleSpecifier": "non-relative", | ||||
|     "typescript.preferences.importModuleSpecifierEnding": "index", | ||||
|  | ||||
							
								
								
									
										62
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										62
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @ -2,67 +2,85 @@ | ||||
|     "version": "2.0.0", | ||||
|     "tasks": [ | ||||
|         { | ||||
|             "label": "authentik/core: make", | ||||
|             "label": "authentik[core]: format & test", | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "make", "lint-fix", "lint"], | ||||
|             "presentation": { | ||||
|                 "panel": "new" | ||||
|             }, | ||||
|             "group": "test" | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make" | ||||
|             ], | ||||
|             "group": "build", | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/core: run", | ||||
|             "label": "authentik[core]: run", | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "ak", "server"], | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make", | ||||
|                 "run", | ||||
|             ], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
|                 "group": "running" | ||||
|             } | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/web: make", | ||||
|             "label": "authentik[web]: format", | ||||
|             "command": "make", | ||||
|             "args": ["web"], | ||||
|             "group": "build" | ||||
|             "group": "build", | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/web: watch", | ||||
|             "label": "authentik[web]: watch", | ||||
|             "command": "make", | ||||
|             "args": ["web-watch"], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
|                 "group": "running" | ||||
|             } | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik: install", | ||||
|             "command": "make", | ||||
|             "args": ["install", "-j4"], | ||||
|             "group": "build" | ||||
|             "args": ["install"], | ||||
|             "group": "build", | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/website: make", | ||||
|             "label": "authentik: i18n-extract", | ||||
|             "command": "poetry", | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make", | ||||
|                 "i18n-extract" | ||||
|             ], | ||||
|             "group": "build", | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik[website]: format", | ||||
|             "command": "make", | ||||
|             "args": ["website"], | ||||
|             "group": "build" | ||||
|             "group": "build", | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/website: watch", | ||||
|             "label": "authentik[website]: watch", | ||||
|             "command": "make", | ||||
|             "args": ["website-watch"], | ||||
|             "group": "build", | ||||
|             "presentation": { | ||||
|                 "panel": "dedicated", | ||||
|                 "group": "running" | ||||
|             } | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             "label": "authentik/api: generate", | ||||
|             "label": "authentik[api]: generate", | ||||
|             "command": "poetry", | ||||
|             "args": ["run", "make", "gen"], | ||||
|             "args": [ | ||||
|                 "run", | ||||
|                 "make", | ||||
|                 "gen" | ||||
|             ], | ||||
|             "group": "build" | ||||
|         } | ||||
|         }, | ||||
|     ] | ||||
| } | ||||
|  | ||||
							
								
								
									
										12
									
								
								CODEOWNERS
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								CODEOWNERS
									
									
									
									
									
								
							| @ -11,26 +11,16 @@ scripts/                        @goauthentik/backend | ||||
| tests/                          @goauthentik/backend | ||||
| pyproject.toml                  @goauthentik/backend | ||||
| poetry.lock                     @goauthentik/backend | ||||
| go.mod                          @goauthentik/backend | ||||
| go.sum                          @goauthentik/backend | ||||
| # Infrastructure | ||||
| .github/                        @goauthentik/infrastructure | ||||
| Dockerfile                      @goauthentik/infrastructure | ||||
| *Dockerfile                     @goauthentik/infrastructure | ||||
| .dockerignore                   @goauthentik/infrastructure | ||||
| docker-compose.yml              @goauthentik/infrastructure | ||||
| Makefile                        @goauthentik/infrastructure | ||||
| .editorconfig                   @goauthentik/infrastructure | ||||
| CODEOWNERS                      @goauthentik/infrastructure | ||||
| # Web | ||||
| web/                            @goauthentik/frontend | ||||
| tests/wdio/                     @goauthentik/frontend | ||||
| # Locale | ||||
| locale/                         @goauthentik/backend @goauthentik/frontend | ||||
| web/xliff/                      @goauthentik/backend @goauthentik/frontend | ||||
| # Docs & Website | ||||
| website/                        @goauthentik/docs | ||||
| CODE_OF_CONDUCT.md              @goauthentik/docs | ||||
| # Security | ||||
| SECURITY.md                     @goauthentik/security @goauthentik/docs | ||||
| website/docs/security/          @goauthentik/security @goauthentik/docs | ||||
| website/docs/security/          @goauthentik/security | ||||
|  | ||||
| @ -1 +1 @@ | ||||
| website/docs/developer-docs/index.md | ||||
| website/developer-docs/index.md | ||||
							
								
								
									
										61
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										61
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,7 @@ | ||||
| # syntax=docker/dockerfile:1 | ||||
|  | ||||
| # Stage 1: Build website | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder | ||||
|  | ||||
| ENV NODE_ENV=production | ||||
|  | ||||
| @ -14,28 +14,22 @@ RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.js | ||||
|  | ||||
| COPY ./website /work/website/ | ||||
| COPY ./blueprints /work/blueprints/ | ||||
| COPY ./schema.yml /work/ | ||||
| COPY ./SECURITY.md /work/ | ||||
|  | ||||
| RUN npm run build-bundled | ||||
| RUN npm run build-docs-only | ||||
|  | ||||
| # Stage 2: Build webui | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder | ||||
|  | ||||
| ARG GIT_BUILD_HASH | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
| ENV NODE_ENV=production | ||||
|  | ||||
| WORKDIR /work/web | ||||
|  | ||||
| RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | ||||
|     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ | ||||
|     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ | ||||
|     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ | ||||
|     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ | ||||
|     npm ci --include=dev | ||||
|  | ||||
| COPY ./package.json /work | ||||
| COPY ./web /work/web/ | ||||
| COPY ./website /work/website/ | ||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||
| @ -43,7 +37,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||
| RUN npm run build | ||||
|  | ||||
| # Stage 3: Build go proxy | ||||
| FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/golang:1.21.5-bookworm AS go-builder | ||||
|  | ||||
| ARG TARGETOS | ||||
| ARG TARGETARCH | ||||
| @ -54,11 +48,6 @@ ARG GOARCH=$TARGETARCH | ||||
|  | ||||
| WORKDIR /go/src/goauthentik.io | ||||
|  | ||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | ||||
|     dpkg --add-architecture arm64 && \ | ||||
|     apt-get update && \ | ||||
|     apt-get install -y --no-install-recommends crossbuild-essential-arm64 gcc-aarch64-linux-gnu | ||||
|  | ||||
| RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | ||||
|     --mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \ | ||||
|     --mount=type=cache,target=/go/pkg/mod \ | ||||
| @ -73,17 +62,17 @@ COPY ./internal /go/src/goauthentik.io/internal | ||||
| COPY ./go.mod /go/src/goauthentik.io/go.mod | ||||
| COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||
|  | ||||
| ENV CGO_ENABLED=0 | ||||
|  | ||||
| RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | ||||
|     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ | ||||
|     if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ | ||||
|     CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \ | ||||
|     go build -o /go/authentik ./cmd/server | ||||
|     GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server | ||||
|  | ||||
| # Stage 4: MaxMind GeoIP | ||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip | ||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.0 as geoip | ||||
|  | ||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||
| ENV GEOIPUPDATE_VERBOSE="1" | ||||
| ENV GEOIPUPDATE_VERBOSE="true" | ||||
| ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ||||
| ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY" | ||||
|  | ||||
| @ -94,10 +83,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | ||||
|     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||
|  | ||||
| # Stage 5: Python dependencies | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS python-deps | ||||
|  | ||||
| ARG TARGETARCH | ||||
| ARG TARGETVARIANT | ||||
| FROM docker.io/python:3.12.1-slim-bookworm AS python-deps | ||||
|  | ||||
| WORKDIR /ak-root/poetry | ||||
|  | ||||
| @ -110,38 +96,36 @@ RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloa | ||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | ||||
|     apt-get update && \ | ||||
|     # Required for installing pip packages | ||||
|     apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev | ||||
|     apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev | ||||
|  | ||||
| RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||
|     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ | ||||
|     --mount=type=cache,target=/root/.cache/pip \ | ||||
|     --mount=type=cache,target=/root/.cache/pypoetry \ | ||||
|     python -m venv /ak-root/venv/ && \ | ||||
|     bash -c "source ${VENV_PATH}/bin/activate && \ | ||||
|     pip3 install --upgrade pip && \ | ||||
|     pip3 install poetry && \ | ||||
|     poetry install --only=main --no-ansi --no-interaction --no-root && \ | ||||
|     pip install --force-reinstall /wheels/*" | ||||
|     poetry install --only=main --no-ansi --no-interaction | ||||
|  | ||||
| # Stage 6: Run | ||||
| FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS final-image | ||||
| FROM docker.io/python:3.12.1-slim-bookworm AS final-image | ||||
|  | ||||
| ARG VERSION | ||||
| ARG GIT_BUILD_HASH | ||||
| ARG VERSION | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
|  | ||||
| LABEL org.opencontainers.image.url=https://goauthentik.io | ||||
| LABEL org.opencontainers.image.description="goauthentik.io Main server image, see https://goauthentik.io for more info." | ||||
| LABEL org.opencontainers.image.source=https://github.com/goauthentik/authentik | ||||
| LABEL org.opencontainers.image.version=${VERSION} | ||||
| LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH} | ||||
| LABEL org.opencontainers.image.url https://goauthentik.io | ||||
| LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info. | ||||
| LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik | ||||
| LABEL org.opencontainers.image.version ${VERSION} | ||||
| LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH} | ||||
|  | ||||
| WORKDIR / | ||||
|  | ||||
| # We cannot cache this layer otherwise we'll end up with a bigger image | ||||
| RUN apt-get update && \ | ||||
|     # Required for runtime | ||||
|     apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 && \ | ||||
|     apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 ca-certificates && \ | ||||
|     # Required for bootstrap & healtcheck | ||||
|     apt-get install -y --no-install-recommends runit && \ | ||||
|     apt-get clean && \ | ||||
| @ -161,12 +145,11 @@ COPY ./tests /tests | ||||
| COPY ./manage.py / | ||||
| COPY ./blueprints /blueprints | ||||
| COPY ./lifecycle/ /lifecycle | ||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf | ||||
| COPY --from=go-builder /go/authentik /bin/authentik | ||||
| COPY --from=python-deps /ak-root/venv /ak-root/venv | ||||
| COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||
| COPY --from=web-builder /work/web/authentik/ /web/authentik/ | ||||
| COPY --from=website-builder /work/website/build/ /website/help/ | ||||
| COPY --from=website-builder /work/website/help/ /website/help/ | ||||
| COPY --from=geoip /usr/share/GeoIP /geoip | ||||
|  | ||||
| USER 1000 | ||||
| @ -178,8 +161,6 @@ ENV TMPDIR=/dev/shm/ \ | ||||
|     VENV_PATH="/ak-root/venv" \ | ||||
|     POETRY_VIRTUALENVS_CREATE=false | ||||
|  | ||||
| ENV GOFIPS=1 | ||||
|  | ||||
| HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | ||||
|  | ||||
| ENTRYPOINT [ "dumb-init", "--", "ak" ] | ||||
|  | ||||
							
								
								
									
										142
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										142
									
								
								Makefile
									
									
									
									
									
								
							| @ -5,13 +5,9 @@ PWD = $(shell pwd) | ||||
| UID = $(shell id -u) | ||||
| GID = $(shell id -g) | ||||
| NPM_VERSION = $(shell python -m scripts.npm_version) | ||||
| PY_SOURCES = authentik tests scripts lifecycle .github website/docs/install-config/install/aws | ||||
| PY_SOURCES = authentik tests scripts lifecycle | ||||
| DOCKER_IMAGE ?= "authentik:test" | ||||
|  | ||||
| GEN_API_TS = "gen-ts-api" | ||||
| GEN_API_PY = "gen-py-api" | ||||
| GEN_API_GO = "gen-go-api" | ||||
|  | ||||
| pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||
| pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null) | ||||
| pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null) | ||||
| @ -19,13 +15,13 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null) | ||||
| CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \ | ||||
| 		-I .github/codespell-words.txt \ | ||||
| 		-S 'web/src/locales/**' \ | ||||
| 		-S 'website/docs/developer-docs/api/reference/**' \ | ||||
| 		authentik \ | ||||
| 		internal \ | ||||
| 		cmd \ | ||||
| 		web/src \ | ||||
| 		website/src \ | ||||
| 		website/blog \ | ||||
| 		website/developer-docs \ | ||||
| 		website/docs \ | ||||
| 		website/integrations \ | ||||
| 		website/src | ||||
| @ -42,16 +38,16 @@ help:  ## Show this help | ||||
| 		sort | ||||
| 	@echo "" | ||||
|  | ||||
| go-test: | ||||
| test-go: | ||||
| 	go test -timeout 0 -v -race -cover ./... | ||||
|  | ||||
| test-docker:  ## Run all tests in a docker-compose | ||||
| 	echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env | ||||
| 	echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env | ||||
| 	docker compose pull -q | ||||
| 	docker compose up --no-start | ||||
| 	docker compose start postgresql redis | ||||
| 	docker compose run -u root server test-all | ||||
| 	echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||
| 	echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||
| 	docker-compose pull -q | ||||
| 	docker-compose up --no-start | ||||
| 	docker-compose start postgresql redis | ||||
| 	docker-compose run -u root server test-all | ||||
| 	rm -f .env | ||||
|  | ||||
| test: ## Run the server tests and produce a coverage report (locally) | ||||
| @ -59,37 +55,28 @@ test: ## Run the server tests and produce a coverage report (locally) | ||||
| 	coverage html | ||||
| 	coverage report | ||||
|  | ||||
| lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | ||||
| lint-fix:  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | ||||
| 	isort $(PY_SOURCES) | ||||
| 	black $(PY_SOURCES) | ||||
| 	ruff check --fix $(PY_SOURCES) | ||||
|  | ||||
| lint-codespell:  ## Reports spelling errors. | ||||
| 	ruff --fix $(PY_SOURCES) | ||||
| 	codespell -w $(CODESPELL_ARGS) | ||||
|  | ||||
| lint: ## Lint the python and golang sources | ||||
| 	bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules | ||||
| 	bandit -r $(PY_SOURCES) -x node_modules | ||||
| 	./web/node_modules/.bin/pyright $(PY_SOURCES) | ||||
| 	pylint $(PY_SOURCES) | ||||
| 	golangci-lint run -v | ||||
|  | ||||
| core-install: | ||||
| 	poetry install | ||||
|  | ||||
| migrate: ## Run the Authentik Django server's migrations | ||||
| 	python -m lifecycle.migrate | ||||
|  | ||||
| i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | ||||
| i18n-extract: i18n-extract-core web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | ||||
|  | ||||
| core-i18n-extract: | ||||
| 	ak makemessages \ | ||||
| 		--add-location file \ | ||||
| 		--no-obsolete \ | ||||
| 		--ignore web \ | ||||
| 		--ignore internal \ | ||||
| 		--ignore ${GEN_API_TS} \ | ||||
| 		--ignore ${GEN_API_GO} \ | ||||
| 		--ignore website \ | ||||
| 		-l en | ||||
| i18n-extract-core: | ||||
| 	ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en | ||||
|  | ||||
| install: web-install website-install core-install  ## Install all requires dependencies for `web`, `website` and `core` | ||||
| install: web-install website-install  ## Install all requires dependencies for `web`, `website` and `core` | ||||
| 	poetry install | ||||
|  | ||||
| dev-drop-db: | ||||
| 	dropdb -U ${pg_user} -h ${pg_host} ${pg_name} | ||||
| @ -107,14 +94,8 @@ dev-reset: dev-drop-db dev-create-db migrate  ## Drop and restore the Authentik | ||||
| ######################### | ||||
|  | ||||
| gen-build:  ## Extract the schema from the database | ||||
| 	AUTHENTIK_DEBUG=true \ | ||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||
| 		ak make_blueprint_schema > blueprints/schema.json | ||||
| 	AUTHENTIK_DEBUG=true \ | ||||
| 		AUTHENTIK_TENANTS__ENABLED=true \ | ||||
| 		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ | ||||
| 		ak spectacular --file schema.yml | ||||
| 	AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json | ||||
| 	AUTHENTIK_DEBUG=true ak spectacular --file schema.yml | ||||
|  | ||||
| gen-changelog:  ## (Release) generate the changelog based from the commits since the last tag | ||||
| 	git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md | ||||
| @ -125,7 +106,7 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a | ||||
| 	docker run \ | ||||
| 		--rm -v ${PWD}:/local \ | ||||
| 		--user ${UID}:${GID} \ | ||||
| 		docker.io/openapitools/openapi-diff:2.1.0-beta.8 \ | ||||
| 		docker.io/openapitools/openapi-diff:2.1.0-beta.6 \ | ||||
| 		--markdown /local/diff.md \ | ||||
| 		/local/old_schema.yml /local/schema.yml | ||||
| 	rm old_schema.yml | ||||
| @ -133,69 +114,48 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a | ||||
| 	sed -i 's/}/}/g' diff.md | ||||
| 	npx prettier --write diff.md | ||||
|  | ||||
| gen-clean-ts:  ## Remove generated API client for Typescript | ||||
| 	rm -rf ./${GEN_API_TS}/ | ||||
| 	rm -rf ./web/node_modules/@goauthentik/api/ | ||||
| gen-clean: | ||||
| 	rm -rf gen-go-api/ | ||||
| 	rm -rf gen-ts-api/ | ||||
| 	rm -rf web/node_modules/@goauthentik/api/ | ||||
|  | ||||
| gen-clean-go:  ## Remove generated API client for Go | ||||
| 	rm -rf ./${GEN_API_GO}/ | ||||
|  | ||||
| gen-clean-py:  ## Remove generated API client for Python | ||||
| 	rm -rf ./${GEN_API_PY}/ | ||||
|  | ||||
| gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients | ||||
|  | ||||
| gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescript into the authentik UI Application | ||||
| gen-client-ts:  ## Build and install the authentik API for Typescript into the authentik UI Application | ||||
| 	docker run \ | ||||
| 		--rm -v ${PWD}:/local \ | ||||
| 		--user ${UID}:${GID} \ | ||||
| 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||
| 		-i /local/schema.yml \ | ||||
| 		-g typescript-fetch \ | ||||
| 		-o /local/${GEN_API_TS} \ | ||||
| 		-o /local/gen-ts-api \ | ||||
| 		-c /local/scripts/api-ts-config.yaml \ | ||||
| 		--additional-properties=npmVersion=${NPM_VERSION} \ | ||||
| 		--git-repo-id authentik \ | ||||
| 		--git-user-id goauthentik | ||||
| 	mkdir -p web/node_modules/@goauthentik/api | ||||
| 	cd ./${GEN_API_TS} && npm i | ||||
| 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||
| 	cd gen-ts-api && npm i | ||||
| 	\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api | ||||
|  | ||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python | ||||
| gen-client-go:  ## Build and install the authentik API for Golang | ||||
| 	mkdir -p ./gen-go-api ./gen-go-api/templates | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./gen-go-api/templates/go.mod.mustache | ||||
| 	cp schema.yml ./gen-go-api/ | ||||
| 	docker run \ | ||||
| 		--rm -v ${PWD}:/local \ | ||||
| 		--user ${UID}:${GID} \ | ||||
| 		docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \ | ||||
| 		-i /local/schema.yml \ | ||||
| 		-g python \ | ||||
| 		-o /local/${GEN_API_PY} \ | ||||
| 		-c /local/scripts/api-py-config.yaml \ | ||||
| 		--additional-properties=packageVersion=${NPM_VERSION} \ | ||||
| 		--git-repo-id authentik \ | ||||
| 		--git-user-id goauthentik | ||||
| 	pip install ./${GEN_API_PY} | ||||
|  | ||||
| gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||
| 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache | ||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache | ||||
| 	cp schema.yml ./${GEN_API_GO}/ | ||||
| 	docker run \ | ||||
| 		--rm -v ${PWD}/${GEN_API_GO}:/local \ | ||||
| 		--rm -v ${PWD}/gen-go-api:/local \ | ||||
| 		--user ${UID}:${GID} \ | ||||
| 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||
| 		-i /local/schema.yml \ | ||||
| 		-g go \ | ||||
| 		-o /local/ \ | ||||
| 		-c /local/config.yaml | ||||
| 	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} | ||||
| 	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ | ||||
| 	go mod edit -replace goauthentik.io/api/v3=./gen-go-api | ||||
| 	rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/ | ||||
|  | ||||
| gen-dev-config:  ## Generate a local development config file | ||||
| 	python -m scripts.generate_config | ||||
|  | ||||
| gen: gen-build gen-client-ts | ||||
| gen: gen-build gen-clean gen-client-ts | ||||
|  | ||||
| ######################### | ||||
| ## Web | ||||
| @ -204,14 +164,11 @@ gen: gen-build gen-client-ts | ||||
| web-build: web-install  ## Build the Authentik UI | ||||
| 	cd web && npm run build | ||||
|  | ||||
| web: web-lint-fix web-lint web-check-compile  ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it | ||||
| web: web-lint-fix web-lint web-check-compile web-i18n-extract  ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it | ||||
|  | ||||
| web-install:  ## Install the necessary libraries to build the Authentik UI | ||||
| 	cd web && npm ci | ||||
|  | ||||
| web-test: ## Run tests for the Authentik UI | ||||
| 	cd web && npm run test | ||||
|  | ||||
| web-watch:  ## Build and watch the Authentik UI for changes, updating automatically | ||||
| 	rm -rf web/dist/ | ||||
| 	mkdir web/dist/ | ||||
| @ -243,7 +200,7 @@ website: website-lint-fix website-build  ## Automatically fix formatting issues | ||||
| website-install: | ||||
| 	cd website && npm ci | ||||
|  | ||||
| website-lint-fix: lint-codespell | ||||
| website-lint-fix: | ||||
| 	cd website && npm run prettier | ||||
|  | ||||
| website-build: | ||||
| @ -252,15 +209,11 @@ website-build: | ||||
| website-watch:  ## Build and watch the documentation website, updating automatically | ||||
| 	cd website && npm run watch | ||||
|  | ||||
| aws-cfn: | ||||
| 	cd website && npm run aws-cfn | ||||
|  | ||||
| ######################### | ||||
| ## Docker | ||||
| ######################### | ||||
|  | ||||
| docker:  ## Build a docker image of the current source tree | ||||
| 	mkdir -p ${GEN_API_TS} | ||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||
|  | ||||
| ######################### | ||||
| @ -273,6 +226,9 @@ ci--meta-debug: | ||||
| 	python -V | ||||
| 	node --version | ||||
|  | ||||
| ci-pylint: ci--meta-debug | ||||
| 	pylint $(PY_SOURCES) | ||||
|  | ||||
| ci-black: ci--meta-debug | ||||
| 	black --check $(PY_SOURCES) | ||||
|  | ||||
| @ -282,8 +238,14 @@ ci-ruff: ci--meta-debug | ||||
| ci-codespell: ci--meta-debug | ||||
| 	codespell $(CODESPELL_ARGS) -s | ||||
|  | ||||
| ci-isort: ci--meta-debug | ||||
| 	isort --check $(PY_SOURCES) | ||||
|  | ||||
| ci-bandit: ci--meta-debug | ||||
| 	bandit -r $(PY_SOURCES) | ||||
|  | ||||
| ci-pyright: ci--meta-debug | ||||
| 	./web/node_modules/.bin/pyright $(PY_SOURCES) | ||||
|  | ||||
| ci-pending-migrations: ci--meta-debug | ||||
| 	ak makemigrations --check | ||||
|  | ||||
							
								
								
									
										14
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								README.md
									
									
									
									
									
								
							| @ -15,9 +15,7 @@ | ||||
|  | ||||
| ## What is authentik? | ||||
|  | ||||
| authentik is an open-source Identity Provider that emphasizes flexibility and versatility, with support for a wide set of protocols. | ||||
|  | ||||
| Our [enterprise offer](https://goauthentik.io/pricing) can also be used as a self-hosted replacement for large-scale deployments of Okta/Auth0, Entra ID, Ping Identity, or other legacy IdPs for employees and B2B2C use. | ||||
| authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them. | ||||
|  | ||||
| ## Installation | ||||
|  | ||||
| @ -27,14 +25,14 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h | ||||
|  | ||||
| ## Screenshots | ||||
|  | ||||
| | Light                                                       | Dark                                                       | | ||||
| | ----------------------------------------------------------- | ---------------------------------------------------------- | | ||||
| |   |   | | ||||
| |  |  | | ||||
| | Light                                                  | Dark                                                  | | ||||
| | ------------------------------------------------------ | ----------------------------------------------------- | | ||||
| |   |   | | ||||
| |  |  | | ||||
|  | ||||
| ## Development | ||||
|  | ||||
| See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github) | ||||
| See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github) | ||||
|  | ||||
| ## Security | ||||
|  | ||||
|  | ||||
							
								
								
									
										22
									
								
								SECURITY.md
									
									
									
									
									
								
							
							
						
						
									
										22
									
								
								SECURITY.md
									
									
									
									
									
								
							| @ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di | ||||
|  | ||||
| ## Independent audits and pentests | ||||
|  | ||||
| We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specfic audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security). | ||||
| In May/June of 2023 [Cure53](https://cure53.de) conducted an audit and pentest. The [results](https://cure53.de/pentest-report_authentik.pdf) are published on the [Cure53 website](https://cure53.de/#publications-2023). For more details about authentik's response to the findings of the audit refer to [2023-06 Cure53 Code audit](https://goauthentik.io/docs/security/2023-06-cure53). | ||||
|  | ||||
| ## What authentik classifies as a CVE | ||||
|  | ||||
| @ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | ||||
|  | ||||
| (.x being the latest patch release for each version) | ||||
|  | ||||
| | Version   | Supported | | ||||
| | --------- | --------- | | ||||
| | 2024.8.x  | ✅        | | ||||
| | 2024.10.x | ✅        | | ||||
| | Version | Supported | | ||||
| | --- | --- | | ||||
| | 2023.6.x | ✅ | | ||||
| | 2023.8.x | ✅ | | ||||
|  | ||||
| ## Reporting a Vulnerability | ||||
|  | ||||
| @ -31,12 +31,12 @@ To report a vulnerability, send an email to [security@goauthentik.io](mailto:se | ||||
|  | ||||
| authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories: | ||||
|  | ||||
| | Score      | Severity | | ||||
| | ---------- | -------- | | ||||
| | 0.0        | None     | | ||||
| | 0.1 – 3.9  | Low      | | ||||
| | 4.0 – 6.9  | Medium   | | ||||
| | 7.0 – 8.9  | High     | | ||||
| | Score | Severity | | ||||
| | --- | --- | | ||||
| | 0.0 | None | | ||||
| | 0.1 – 3.9 | Low | | ||||
| | 4.0 – 6.9 | Medium | | ||||
| | 7.0 – 8.9 | High | | ||||
| | 9.0 – 10.0 | Critical | | ||||
|  | ||||
| ## Disclosure process | ||||
|  | ||||
| @ -1,12 +1,12 @@ | ||||
| """authentik root module""" | ||||
|  | ||||
| from os import environ | ||||
| from typing import Optional | ||||
|  | ||||
| __version__ = "2024.10.5" | ||||
| __version__ = "2023.10.5" | ||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||
|  | ||||
|  | ||||
| def get_build_hash(fallback: str | None = None) -> str: | ||||
| def get_build_hash(fallback: Optional[str] = None) -> str: | ||||
|     """Get build hash""" | ||||
|     build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "") | ||||
|     return fallback if build_hash == "" and fallback else build_hash | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """Meta API""" | ||||
|  | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from rest_framework.fields import CharField | ||||
| from rest_framework.permissions import IsAuthenticated | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """authentik administration metrics""" | ||||
|  | ||||
| from datetime import timedelta | ||||
|  | ||||
| from django.db.models.functions import ExtractHour | ||||
|  | ||||
| @ -1,23 +1,18 @@ | ||||
| """authentik administration overview""" | ||||
|  | ||||
| import platform | ||||
| from datetime import datetime | ||||
| from ssl import OPENSSL_VERSION | ||||
| from sys import version as python_version | ||||
| from typing import TypedDict | ||||
|  | ||||
| from cryptography.hazmat.backends.openssl.backend import backend | ||||
| from django.utils.timezone import now | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from gunicorn import version_info as gunicorn_version | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from authentik import get_full_version | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.utils.reflection import get_env | ||||
| from authentik.outposts.apps import MANAGED_OUTPOST | ||||
| from authentik.outposts.models import Outpost | ||||
| @ -28,13 +23,11 @@ class RuntimeDict(TypedDict): | ||||
|     """Runtime information""" | ||||
|  | ||||
|     python_version: str | ||||
|     gunicorn_version: str | ||||
|     environment: str | ||||
|     architecture: str | ||||
|     platform: str | ||||
|     uname: str | ||||
|     openssl_version: str | ||||
|     openssl_fips_enabled: bool | None | ||||
|     authentik_version: str | ||||
|  | ||||
|  | ||||
| class SystemInfoSerializer(PassiveSerializer): | ||||
| @ -44,9 +37,8 @@ class SystemInfoSerializer(PassiveSerializer): | ||||
|     http_host = SerializerMethodField() | ||||
|     http_is_secure = SerializerMethodField() | ||||
|     runtime = SerializerMethodField() | ||||
|     brand = SerializerMethodField() | ||||
|     tenant = SerializerMethodField() | ||||
|     server_time = SerializerMethodField() | ||||
|     embedded_outpost_disabled = SerializerMethodField() | ||||
|     embedded_outpost_host = SerializerMethodField() | ||||
|  | ||||
|     def get_http_headers(self, request: Request) -> dict[str, str]: | ||||
| @ -69,30 +61,22 @@ class SystemInfoSerializer(PassiveSerializer): | ||||
|     def get_runtime(self, request: Request) -> RuntimeDict: | ||||
|         """Get versions""" | ||||
|         return { | ||||
|             "architecture": platform.machine(), | ||||
|             "authentik_version": get_full_version(), | ||||
|             "environment": get_env(), | ||||
|             "openssl_fips_enabled": ( | ||||
|                 backend._fips_enabled if LicenseKey.get_total().status().is_valid else None | ||||
|             ), | ||||
|             "openssl_version": OPENSSL_VERSION, | ||||
|             "platform": platform.platform(), | ||||
|             "python_version": python_version, | ||||
|             "gunicorn_version": ".".join(str(x) for x in gunicorn_version), | ||||
|             "environment": get_env(), | ||||
|             "architecture": platform.machine(), | ||||
|             "platform": platform.platform(), | ||||
|             "uname": " ".join(platform.uname()), | ||||
|         } | ||||
|  | ||||
|     def get_brand(self, request: Request) -> str: | ||||
|         """Currently active brand""" | ||||
|         return str(request._request.brand) | ||||
|     def get_tenant(self, request: Request) -> str: | ||||
|         """Currently active tenant""" | ||||
|         return str(request._request.tenant) | ||||
|  | ||||
|     def get_server_time(self, request: Request) -> datetime: | ||||
|         """Current server time""" | ||||
|         return now() | ||||
|  | ||||
|     def get_embedded_outpost_disabled(self, request: Request) -> bool: | ||||
|         """Whether the embedded outpost is disabled""" | ||||
|         return CONFIG.get_bool("outposts.disable_embedded_outpost", False) | ||||
|  | ||||
|     def get_embedded_outpost_host(self, request: Request) -> str: | ||||
|         """Get the FQDN configured on the embedded outpost""" | ||||
|         outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST) | ||||
|  | ||||
							
								
								
									
										134
									
								
								authentik/admin/api/tasks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										134
									
								
								authentik/admin/api/tasks.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,134 @@ | ||||
| """Tasks API""" | ||||
| from importlib import import_module | ||||
|  | ||||
| from django.contrib import messages | ||||
| from django.http.response import Http404 | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.fields import ( | ||||
|     CharField, | ||||
|     ChoiceField, | ||||
|     DateTimeField, | ||||
|     ListField, | ||||
|     SerializerMethodField, | ||||
| ) | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.viewsets import ViewSet | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.api.decorators import permission_required | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus | ||||
| from authentik.rbac.permissions import HasPermission | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class TaskSerializer(PassiveSerializer): | ||||
|     """Serialize TaskInfo and TaskResult""" | ||||
|  | ||||
|     task_name = CharField() | ||||
|     task_description = CharField() | ||||
|     task_finish_timestamp = DateTimeField(source="finish_time") | ||||
|     task_duration = SerializerMethodField() | ||||
|  | ||||
|     status = ChoiceField( | ||||
|         source="result.status.name", | ||||
|         choices=[(x.name, x.name) for x in TaskResultStatus], | ||||
|     ) | ||||
|     messages = ListField(source="result.messages") | ||||
|  | ||||
|     def get_task_duration(self, instance: TaskInfo) -> int: | ||||
|         """Get the duration a task took to run""" | ||||
|         return max(instance.finish_timestamp - instance.start_timestamp, 0) | ||||
|  | ||||
|     def to_representation(self, instance: TaskInfo): | ||||
|         """When a new version of authentik adds fields to TaskInfo, | ||||
|         the API will fail with an AttributeError, as the classes | ||||
|         are pickled in cache. In that case, just delete the info""" | ||||
|         try: | ||||
|             return super().to_representation(instance) | ||||
|         # pylint: disable=broad-except | ||||
|         except Exception:  # pragma: no cover | ||||
|             if isinstance(self.instance, list): | ||||
|                 for inst in self.instance: | ||||
|                     inst.delete() | ||||
|             else: | ||||
|                 self.instance.delete() | ||||
|             return {} | ||||
|  | ||||
|  | ||||
| class TaskViewSet(ViewSet): | ||||
|     """Read-only view set that returns all background tasks""" | ||||
|  | ||||
|     permission_classes = [HasPermission("authentik_rbac.view_system_tasks")] | ||||
|     serializer_class = TaskSerializer | ||||
|  | ||||
|     @extend_schema( | ||||
|         responses={ | ||||
|             200: TaskSerializer(many=False), | ||||
|             404: OpenApiResponse(description="Task not found"), | ||||
|         }, | ||||
|         parameters=[ | ||||
|             OpenApiParameter( | ||||
|                 "id", | ||||
|                 type=OpenApiTypes.STR, | ||||
|                 location=OpenApiParameter.PATH, | ||||
|                 required=True, | ||||
|             ), | ||||
|         ], | ||||
|     ) | ||||
|     def retrieve(self, request: Request, pk=None) -> Response: | ||||
|         """Get a single system task""" | ||||
|         task = TaskInfo.by_name(pk) | ||||
|         if not task: | ||||
|             raise Http404 | ||||
|         return Response(TaskSerializer(task, many=False).data) | ||||
|  | ||||
|     @extend_schema(responses={200: TaskSerializer(many=True)}) | ||||
|     def list(self, request: Request) -> Response: | ||||
|         """List system tasks""" | ||||
|         tasks = sorted(TaskInfo.all().values(), key=lambda task: task.task_name) | ||||
|         return Response(TaskSerializer(tasks, many=True).data) | ||||
|  | ||||
|     @permission_required(None, ["authentik_rbac.run_system_tasks"]) | ||||
|     @extend_schema( | ||||
|         request=OpenApiTypes.NONE, | ||||
|         responses={ | ||||
|             204: OpenApiResponse(description="Task retried successfully"), | ||||
|             404: OpenApiResponse(description="Task not found"), | ||||
|             500: OpenApiResponse(description="Failed to retry task"), | ||||
|         }, | ||||
|         parameters=[ | ||||
|             OpenApiParameter( | ||||
|                 "id", | ||||
|                 type=OpenApiTypes.STR, | ||||
|                 location=OpenApiParameter.PATH, | ||||
|                 required=True, | ||||
|             ), | ||||
|         ], | ||||
|     ) | ||||
|     @action(detail=True, methods=["post"]) | ||||
|     def retry(self, request: Request, pk=None) -> Response: | ||||
|         """Retry task""" | ||||
|         task = TaskInfo.by_name(pk) | ||||
|         if not task: | ||||
|             raise Http404 | ||||
|         try: | ||||
|             task_module = import_module(task.task_call_module) | ||||
|             task_func = getattr(task_module, task.task_call_func) | ||||
|             LOGGER.debug("Running task", task=task_func) | ||||
|             task_func.delay(*task.task_call_args, **task.task_call_kwargs) | ||||
|             messages.success( | ||||
|                 self.request, | ||||
|                 _("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}), | ||||
|             ) | ||||
|             return Response(status=204) | ||||
|         except (ImportError, AttributeError):  # pragma: no cover | ||||
|             LOGGER.warning("Failed to run task, remove state", task=task) | ||||
|             # if we get an import error, the module path has probably changed | ||||
|             task.delete() | ||||
|             return Response(status=500) | ||||
| @ -1,5 +1,4 @@ | ||||
| """authentik administration overview""" | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from packaging.version import parse | ||||
| @ -10,9 +9,8 @@ from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from authentik import __version__, get_build_hash | ||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | ||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.outposts.models import Outpost | ||||
|  | ||||
|  | ||||
| class VersionSerializer(PassiveSerializer): | ||||
| @ -20,10 +18,8 @@ class VersionSerializer(PassiveSerializer): | ||||
|  | ||||
|     version_current = SerializerMethodField() | ||||
|     version_latest = SerializerMethodField() | ||||
|     version_latest_valid = SerializerMethodField() | ||||
|     build_hash = SerializerMethodField() | ||||
|     outdated = SerializerMethodField() | ||||
|     outpost_outdated = SerializerMethodField() | ||||
|  | ||||
|     def get_build_hash(self, _) -> str: | ||||
|         """Get build hash, if version is not latest or released""" | ||||
| @ -41,23 +37,10 @@ class VersionSerializer(PassiveSerializer): | ||||
|             return __version__ | ||||
|         return version_in_cache | ||||
|  | ||||
|     def get_version_latest_valid(self, _) -> bool: | ||||
|         """Check if latest version is valid""" | ||||
|         return cache.get(VERSION_CACHE_KEY) != VERSION_NULL | ||||
|  | ||||
|     def get_outdated(self, instance) -> bool: | ||||
|         """Check if we're running the latest version""" | ||||
|         return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance)) | ||||
|  | ||||
|     def get_outpost_outdated(self, _) -> bool: | ||||
|         """Check if any outpost is outdated/has a version mismatch""" | ||||
|         any_outdated = False | ||||
|         for outpost in Outpost.objects.all(): | ||||
|             for state in outpost.state: | ||||
|                 if state.version_outdated: | ||||
|                     any_outdated = True | ||||
|         return any_outdated | ||||
|  | ||||
|  | ||||
| class VersionView(APIView): | ||||
|     """Get running and latest version.""" | ||||
|  | ||||
| @ -1,33 +0,0 @@ | ||||
| from rest_framework.permissions import IsAdminUser | ||||
| from rest_framework.viewsets import ReadOnlyModelViewSet | ||||
|  | ||||
| from authentik.admin.models import VersionHistory | ||||
| from authentik.core.api.utils import ModelSerializer | ||||
|  | ||||
|  | ||||
| class VersionHistorySerializer(ModelSerializer): | ||||
|     """VersionHistory Serializer""" | ||||
|  | ||||
|     class Meta: | ||||
|         model = VersionHistory | ||||
|         fields = [ | ||||
|             "id", | ||||
|             "timestamp", | ||||
|             "version", | ||||
|             "build", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class VersionHistoryViewSet(ReadOnlyModelViewSet): | ||||
|     """VersionHistory Viewset""" | ||||
|  | ||||
|     queryset = VersionHistory.objects.all() | ||||
|     serializer_class = VersionHistorySerializer | ||||
|     permission_classes = [IsAdminUser] | ||||
|     filterset_fields = [ | ||||
|         "version", | ||||
|         "build", | ||||
|     ] | ||||
|     search_fields = ["version", "build"] | ||||
|     ordering = ["-timestamp"] | ||||
|     pagination_class = None | ||||
| @ -1,5 +1,4 @@ | ||||
| """authentik administration overview""" | ||||
|  | ||||
| from django.conf import settings | ||||
| from drf_spectacular.utils import extend_schema, inline_serializer | ||||
| from rest_framework.fields import IntegerField | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """authentik admin app config""" | ||||
|  | ||||
| from prometheus_client import Gauge, Info | ||||
|  | ||||
| from authentik.blueprints.apps import ManagedAppConfig | ||||
| @ -15,3 +14,7 @@ class AuthentikAdminConfig(ManagedAppConfig): | ||||
|     label = "authentik_admin" | ||||
|     verbose_name = "authentik Admin" | ||||
|     default = True | ||||
|  | ||||
|     def reconcile_load_admin_signals(self): | ||||
|         """Load admin signals""" | ||||
|         self.import_module("authentik.admin.signals") | ||||
|  | ||||
| @ -1,22 +0,0 @@ | ||||
| """authentik admin models""" | ||||
|  | ||||
| from django.db import models | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
|  | ||||
|  | ||||
| class VersionHistory(models.Model): | ||||
|     id = models.BigAutoField(primary_key=True) | ||||
|     timestamp = models.DateTimeField() | ||||
|     version = models.TextField() | ||||
|     build = models.TextField() | ||||
|  | ||||
|     class Meta: | ||||
|         managed = False | ||||
|         db_table = "authentik_version_history" | ||||
|         ordering = ("-timestamp",) | ||||
|         verbose_name = _("Version history") | ||||
|         verbose_name_plural = _("Version history") | ||||
|         default_permissions = [] | ||||
|  | ||||
|     def __str__(self): | ||||
|         return f"{self.version}.{self.build} ({self.timestamp})" | ||||
| @ -1,5 +1,4 @@ | ||||
| """authentik admin settings""" | ||||
|  | ||||
| from celery.schedules import crontab | ||||
|  | ||||
| from authentik.lib.utils.time import fqdn_rand | ||||
|  | ||||
| @ -1,7 +1,7 @@ | ||||
| """admin signals""" | ||||
|  | ||||
| from django.dispatch import receiver | ||||
|  | ||||
| from authentik.admin.api.tasks import TaskInfo | ||||
| from authentik.admin.apps import GAUGE_WORKERS | ||||
| from authentik.root.celery import CELERY_APP | ||||
| from authentik.root.monitoring import monitoring_set | ||||
| @ -12,3 +12,10 @@ def monitoring_set_workers(sender, **kwargs): | ||||
|     """Set worker gauge""" | ||||
|     count = len(CELERY_APP.control.ping(timeout=0.5)) | ||||
|     GAUGE_WORKERS.set(count) | ||||
|  | ||||
|  | ||||
| @receiver(monitoring_set) | ||||
| def monitoring_set_tasks(sender, **kwargs): | ||||
|     """Set task gauges""" | ||||
|     for task in TaskInfo.all().values(): | ||||
|         task.update_metrics() | ||||
|  | ||||
| @ -1,8 +1,9 @@ | ||||
| """authentik admin tasks""" | ||||
| import re | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.core.validators import URLValidator | ||||
| from django.db import DatabaseError, InternalError, ProgrammingError | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from packaging.version import parse | ||||
| from requests import RequestException | ||||
| from structlog.stdlib import get_logger | ||||
| @ -10,15 +11,21 @@ from structlog.stdlib import get_logger | ||||
| from authentik import __version__, get_build_hash | ||||
| from authentik.admin.apps import PROM_INFO | ||||
| from authentik.events.models import Event, EventAction, Notification | ||||
| from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task | ||||
| from authentik.events.monitored_tasks import ( | ||||
|     MonitoredTask, | ||||
|     TaskResult, | ||||
|     TaskResultStatus, | ||||
|     prefill_task, | ||||
| ) | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.utils.http import get_http_session | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| VERSION_NULL = "0.0.0" | ||||
| VERSION_CACHE_KEY = "authentik_latest_version" | ||||
| VERSION_CACHE_TIMEOUT = 8 * 60 * 60  # 8 hours | ||||
| # Chop of the first ^ because we want to search the entire string | ||||
| URL_FINDER = URLValidator.regex.pattern[1:] | ||||
| LOCAL_VERSION = parse(__version__) | ||||
|  | ||||
|  | ||||
| @ -47,13 +54,13 @@ def clear_update_notifications(): | ||||
|             notification.delete() | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task(bind=True, base=SystemTask) | ||||
| @CELERY_APP.task(bind=True, base=MonitoredTask) | ||||
| @prefill_task | ||||
| def update_latest_version(self: SystemTask): | ||||
| def update_latest_version(self: MonitoredTask): | ||||
|     """Update latest version info""" | ||||
|     if CONFIG.get_bool("disable_update_check"): | ||||
|         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) | ||||
|         self.set_status(TaskStatus.WARNING, "Version check disabled.") | ||||
|         cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT) | ||||
|         self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."])) | ||||
|         return | ||||
|     try: | ||||
|         response = get_http_session().get( | ||||
| @ -63,7 +70,9 @@ def update_latest_version(self: SystemTask): | ||||
|         data = response.json() | ||||
|         upstream_version = data.get("stable", {}).get("version") | ||||
|         cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT) | ||||
|         self.set_status(TaskStatus.SUCCESSFUL, "Successfully updated latest Version") | ||||
|         self.set_status( | ||||
|             TaskResult(TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"]) | ||||
|         ) | ||||
|         _set_prom_info() | ||||
|         # Check if upstream version is newer than what we're running, | ||||
|         # and if no event exists yet, create one. | ||||
| @ -74,19 +83,13 @@ def update_latest_version(self: SystemTask): | ||||
|                 context__new_version=upstream_version, | ||||
|             ).exists(): | ||||
|                 return | ||||
|             Event.new( | ||||
|                 EventAction.UPDATE_AVAILABLE, | ||||
|                 message=_( | ||||
|                     "New version {version} available!".format( | ||||
|                         version=upstream_version, | ||||
|                     ) | ||||
|                 ), | ||||
|                 new_version=upstream_version, | ||||
|                 changelog=data.get("stable", {}).get("changelog_url"), | ||||
|             ).save() | ||||
|             event_dict = {"new_version": upstream_version} | ||||
|             if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")): | ||||
|                 event_dict["message"] = f"Changelog: {match.group()}" | ||||
|             Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save() | ||||
|     except (RequestException, IndexError) as exc: | ||||
|         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) | ||||
|         self.set_error(exc) | ||||
|         cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT) | ||||
|         self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc)) | ||||
|  | ||||
|  | ||||
| _set_prom_info() | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """test admin api""" | ||||
|  | ||||
| from json import loads | ||||
|  | ||||
| from django.test import TestCase | ||||
| @ -8,6 +7,8 @@ from django.urls import reverse | ||||
| from authentik import __version__ | ||||
| from authentik.blueprints.tests import reconcile_app | ||||
| from authentik.core.models import Group, User | ||||
| from authentik.core.tasks import clean_expired_models | ||||
| from authentik.events.monitored_tasks import TaskResultStatus | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| @ -22,6 +23,53 @@ class TestAdminAPI(TestCase): | ||||
|         self.group.save() | ||||
|         self.client.force_login(self.user) | ||||
|  | ||||
|     def test_tasks(self): | ||||
|         """Test Task API""" | ||||
|         clean_expired_models.delay() | ||||
|         response = self.client.get(reverse("authentik_api:admin_system_tasks-list")) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         body = loads(response.content) | ||||
|         self.assertTrue(any(task["task_name"] == "clean_expired_models" for task in body)) | ||||
|  | ||||
|     def test_tasks_single(self): | ||||
|         """Test Task API (read single)""" | ||||
|         clean_expired_models.delay() | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:admin_system_tasks-detail", | ||||
|                 kwargs={"pk": "clean_expired_models"}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         body = loads(response.content) | ||||
|         self.assertEqual(body["status"], TaskResultStatus.SUCCESSFUL.name) | ||||
|         self.assertEqual(body["task_name"], "clean_expired_models") | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:admin_system_tasks-detail", kwargs={"pk": "qwerqwer"}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 404) | ||||
|  | ||||
|     def test_tasks_retry(self): | ||||
|         """Test Task API (retry)""" | ||||
|         clean_expired_models.delay() | ||||
|         response = self.client.post( | ||||
|             reverse( | ||||
|                 "authentik_api:admin_system_tasks-retry", | ||||
|                 kwargs={"pk": "clean_expired_models"}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 204) | ||||
|  | ||||
|     def test_tasks_retry_404(self): | ||||
|         """Test Task API (retry, 404)""" | ||||
|         response = self.client.post( | ||||
|             reverse( | ||||
|                 "authentik_api:admin_system_tasks-retry", | ||||
|                 kwargs={"pk": "qwerqewrqrqewrqewr"}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 404) | ||||
|  | ||||
|     def test_version(self): | ||||
|         """Test Version API""" | ||||
|         response = self.client.get(reverse("authentik_api:admin_version")) | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """test admin tasks""" | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.test import TestCase | ||||
| from requests_mock import Mocker | ||||
| @ -17,7 +16,6 @@ RESPONSE_VALID = { | ||||
|     "stable": { | ||||
|         "version": "99999999.9999999", | ||||
|         "changelog": "See https://goauthentik.io/test", | ||||
|         "changelog_url": "https://goauthentik.io/test", | ||||
|         "reason": "bugfix", | ||||
|     }, | ||||
| } | ||||
| @ -36,7 +34,7 @@ class TestAdminTasks(TestCase): | ||||
|                 Event.objects.filter( | ||||
|                     action=EventAction.UPDATE_AVAILABLE, | ||||
|                     context__new_version="99999999.9999999", | ||||
|                     context__message="New version 99999999.9999999 available!", | ||||
|                     context__message="Changelog: https://goauthentik.io/test", | ||||
|                 ).exists() | ||||
|             ) | ||||
|             # test that a consecutive check doesn't create a duplicate event | ||||
| @ -46,7 +44,7 @@ class TestAdminTasks(TestCase): | ||||
|                     Event.objects.filter( | ||||
|                         action=EventAction.UPDATE_AVAILABLE, | ||||
|                         context__new_version="99999999.9999999", | ||||
|                         context__message="New version 99999999.9999999 available!", | ||||
|                         context__message="Changelog: https://goauthentik.io/test", | ||||
|                     ) | ||||
|                 ), | ||||
|                 1, | ||||
|  | ||||
| @ -1,15 +1,15 @@ | ||||
| """API URLs""" | ||||
|  | ||||
| from django.urls import path | ||||
|  | ||||
| from authentik.admin.api.meta import AppsViewSet, ModelViewSet | ||||
| from authentik.admin.api.metrics import AdministrationMetricsViewSet | ||||
| from authentik.admin.api.system import SystemView | ||||
| from authentik.admin.api.tasks import TaskViewSet | ||||
| from authentik.admin.api.version import VersionView | ||||
| from authentik.admin.api.version_history import VersionHistoryViewSet | ||||
| from authentik.admin.api.workers import WorkerView | ||||
|  | ||||
| api_urlpatterns = [ | ||||
|     ("admin/system_tasks", TaskViewSet, "admin_system_tasks"), | ||||
|     ("admin/apps", AppsViewSet, "apps"), | ||||
|     ("admin/models", ModelViewSet, "models"), | ||||
|     path( | ||||
| @ -18,7 +18,6 @@ api_urlpatterns = [ | ||||
|         name="admin_metrics", | ||||
|     ), | ||||
|     path("admin/version/", VersionView.as_view(), name="admin_version"), | ||||
|     ("admin/version/history", VersionHistoryViewSet, "version_history"), | ||||
|     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), | ||||
|     path("admin/system/", SystemView.as_view(), name="admin_system"), | ||||
| ] | ||||
|  | ||||
| @ -10,3 +10,26 @@ class AuthentikAPIConfig(AppConfig): | ||||
|     label = "authentik_api" | ||||
|     mountpoint = "api/" | ||||
|     verbose_name = "authentik API" | ||||
|  | ||||
|     def ready(self) -> None: | ||||
|         from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||
|  | ||||
|         from authentik.api.authentication import TokenAuthentication | ||||
|  | ||||
|         # Class is defined here as it needs to be created early enough that drf-spectacular will | ||||
|         # find it, but also won't cause any import issues | ||||
|         # pylint: disable=unused-variable | ||||
|         class TokenSchema(OpenApiAuthenticationExtension): | ||||
|             """Auth schema""" | ||||
|  | ||||
|             target_class = TokenAuthentication | ||||
|             name = "authentik" | ||||
|  | ||||
|             def get_security_definition(self, auto_schema): | ||||
|                 """Auth schema""" | ||||
|                 return { | ||||
|                     "type": "apiKey", | ||||
|                     "in": "header", | ||||
|                     "name": "Authorization", | ||||
|                     "scheme": "bearer", | ||||
|                 } | ||||
|  | ||||
| @ -1,10 +1,8 @@ | ||||
| """API Authentication""" | ||||
|  | ||||
| from hmac import compare_digest | ||||
| from typing import Any | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from django.conf import settings | ||||
| from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||
| from rest_framework.exceptions import AuthenticationFailed | ||||
| from rest_framework.request import Request | ||||
| @ -18,7 +16,7 @@ from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| def validate_auth(header: bytes) -> str | None: | ||||
| def validate_auth(header: bytes) -> Optional[str]: | ||||
|     """Validate that the header is in a correct format, | ||||
|     returns type and credentials""" | ||||
|     auth_credentials = header.decode().strip() | ||||
| @ -33,7 +31,7 @@ def validate_auth(header: bytes) -> str | None: | ||||
|     return auth_credentials | ||||
|  | ||||
|  | ||||
| def bearer_auth(raw_header: bytes) -> User | None: | ||||
| def bearer_auth(raw_header: bytes) -> Optional[User]: | ||||
|     """raw_header in the Format of `Bearer ....`""" | ||||
|     user = auth_user_lookup(raw_header) | ||||
|     if not user: | ||||
| @ -43,7 +41,7 @@ def bearer_auth(raw_header: bytes) -> User | None: | ||||
|     return user | ||||
|  | ||||
|  | ||||
| def auth_user_lookup(raw_header: bytes) -> User | None: | ||||
| def auth_user_lookup(raw_header: bytes) -> Optional[User]: | ||||
|     """raw_header in the Format of `Bearer ....`""" | ||||
|     from authentik.providers.oauth2.models import AccessToken | ||||
|  | ||||
| @ -76,7 +74,7 @@ def auth_user_lookup(raw_header: bytes) -> User | None: | ||||
|     raise AuthenticationFailed("Token invalid/expired") | ||||
|  | ||||
|  | ||||
| def token_secret_key(value: str) -> User | None: | ||||
| def token_secret_key(value: str) -> Optional[User]: | ||||
|     """Check if the token is the secret key | ||||
|     and return the service account for the managed outpost""" | ||||
|     from authentik.outposts.apps import MANAGED_OUTPOST | ||||
| @ -103,14 +101,3 @@ class TokenAuthentication(BaseAuthentication): | ||||
|             return None | ||||
|  | ||||
|         return (user, None)  # pragma: no cover | ||||
|  | ||||
|  | ||||
| class TokenSchema(OpenApiAuthenticationExtension): | ||||
|     """Auth schema""" | ||||
|  | ||||
|     target_class = TokenAuthentication | ||||
|     name = "authentik" | ||||
|  | ||||
|     def get_security_definition(self, auto_schema): | ||||
|         """Auth schema""" | ||||
|         return {"type": "http", "scheme": "bearer"} | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """API Authorization""" | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db.models import Model | ||||
| from django.db.models.query import QuerySet | ||||
|  | ||||
| @ -1,7 +1,6 @@ | ||||
| """API Decorators""" | ||||
| 
 | ||||
| from collections.abc import Callable | ||||
| from functools import wraps | ||||
| from typing import Callable, Optional | ||||
| 
 | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| @ -11,26 +10,21 @@ from structlog.stdlib import get_logger | ||||
| LOGGER = get_logger() | ||||
| 
 | ||||
| 
 | ||||
| def permission_required(obj_perm: str | None = None, global_perms: list[str] | None = None): | ||||
| def permission_required(obj_perm: Optional[str] = None, global_perms: Optional[list[str]] = None): | ||||
|     """Check permissions for a single custom action""" | ||||
| 
 | ||||
|     def _check_obj_perm(self: ModelViewSet, request: Request): | ||||
|         # Check obj_perm both globally and on the specific object | ||||
|         # Having the global permission has higher priority | ||||
|         if request.user.has_perm(obj_perm): | ||||
|             return | ||||
|         obj = self.get_object() | ||||
|         if not request.user.has_perm(obj_perm, obj): | ||||
|             LOGGER.debug("denying access for object", user=request.user, perm=obj_perm, obj=obj) | ||||
|             self.permission_denied(request) | ||||
| 
 | ||||
|     def wrapper_outer(func: Callable): | ||||
|     def wrapper_outter(func: Callable): | ||||
|         """Check permissions for a single custom action""" | ||||
| 
 | ||||
|         @wraps(func) | ||||
|         def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response: | ||||
|             if obj_perm: | ||||
|                 _check_obj_perm(self, request) | ||||
|                 obj = self.get_object() | ||||
|                 if not request.user.has_perm(obj_perm, obj): | ||||
|                     LOGGER.debug( | ||||
|                         "denying access for object", user=request.user, perm=obj_perm, obj=obj | ||||
|                     ) | ||||
|                     return self.permission_denied(request) | ||||
|             if global_perms: | ||||
|                 for other_perm in global_perms: | ||||
|                     if not request.user.has_perm(other_perm): | ||||
| @ -40,4 +34,4 @@ def permission_required(obj_perm: str | None = None, global_perms: list[str] | N | ||||
| 
 | ||||
|         return wrapper | ||||
| 
 | ||||
|     return wrapper_outer | ||||
|     return wrapper_outter | ||||
| @ -1,5 +1,4 @@ | ||||
| """Pagination which includes total pages and current page""" | ||||
|  | ||||
| from rest_framework import pagination | ||||
| from rest_framework.response import Response | ||||
|  | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224""" | ||||
|  | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from drf_spectacular.generators import SchemaGenerator | ||||
| from drf_spectacular.plumbing import ( | ||||
| @ -12,7 +11,6 @@ from drf_spectacular.settings import spectacular_settings | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from rest_framework.settings import api_settings | ||||
|  | ||||
| from authentik.api.apps import AuthentikAPIConfig | ||||
| from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA | ||||
|  | ||||
|  | ||||
| @ -102,12 +100,3 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): | ||||
|             comp = result["components"]["schemas"][component] | ||||
|             comp["additionalProperties"] = {} | ||||
|     return result | ||||
|  | ||||
|  | ||||
| def preprocess_schema_exclude_non_api(endpoints, **kwargs): | ||||
|     """Filter out all API Views which are not mounted under /api""" | ||||
|     return [ | ||||
|         (path, path_regex, method, callback) | ||||
|         for path, path_regex, method, callback in endpoints | ||||
|         if path.startswith("/" + AuthentikAPIConfig.mountpoint) | ||||
|     ] | ||||
|  | ||||
| @ -1,13 +1,13 @@ | ||||
| {% extends "base/skeleton.html" %} | ||||
|  | ||||
| {% load authentik_core %} | ||||
| {% load static %} | ||||
|  | ||||
| {% block title %} | ||||
| API Browser - {{ brand.branding_title }} | ||||
| API Browser - {{ tenant.branding_title }} | ||||
| {% endblock %} | ||||
|  | ||||
| {% block head %} | ||||
| <script src="{% versioned_script 'dist/standalone/api-browser/index-%v.js' %}" type="module"></script> | ||||
| <script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script> | ||||
| <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> | ||||
| <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> | ||||
| {% endblock %} | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """Test API Authentication""" | ||||
|  | ||||
| import json | ||||
| from base64 import b64encode | ||||
|  | ||||
| @ -25,17 +24,17 @@ class TestAPIAuth(TestCase): | ||||
|     def test_invalid_type(self): | ||||
|         """Test invalid type""" | ||||
|         with self.assertRaises(AuthenticationFailed): | ||||
|             bearer_auth(b"foo bar") | ||||
|             bearer_auth("foo bar".encode()) | ||||
|  | ||||
|     def test_invalid_empty(self): | ||||
|         """Test invalid type""" | ||||
|         self.assertIsNone(bearer_auth(b"Bearer ")) | ||||
|         self.assertIsNone(bearer_auth(b"")) | ||||
|         self.assertIsNone(bearer_auth("Bearer ".encode())) | ||||
|         self.assertIsNone(bearer_auth("".encode())) | ||||
|  | ||||
|     def test_invalid_no_token(self): | ||||
|         """Test invalid with no token""" | ||||
|         with self.assertRaises(AuthenticationFailed): | ||||
|             auth = b64encode(b":abc").decode() | ||||
|             auth = b64encode(":abc".encode()).decode() | ||||
|             self.assertIsNone(bearer_auth(f"Basic :{auth}".encode())) | ||||
|  | ||||
|     def test_bearer_valid(self): | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """Test config API""" | ||||
|  | ||||
| from json import loads | ||||
|  | ||||
| from django.urls import reverse | ||||
|  | ||||
							
								
								
									
										34
									
								
								authentik/api/tests/test_decorators.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										34
									
								
								authentik/api/tests/test_decorators.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,34 @@ | ||||
| """test decorators api""" | ||||
| from django.urls import reverse | ||||
| from guardian.shortcuts import assign_perm | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import Application, User | ||||
| from authentik.lib.generators import generate_id | ||||
|  | ||||
|  | ||||
| class TestAPIDecorators(APITestCase): | ||||
|     """test decorators api""" | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.user = User.objects.create(username="test-user") | ||||
|  | ||||
|     def test_obj_perm_denied(self): | ||||
|         """Test object perm denied""" | ||||
|         self.client.force_login(self.user) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:application-metrics", kwargs={"slug": app.slug}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 403) | ||||
|  | ||||
|     def test_other_perm_denied(self): | ||||
|         """Test other perm denied""" | ||||
|         self.client.force_login(self.user) | ||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||
|         assign_perm("authentik_core.view_application", self.user, app) | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:application-metrics", kwargs={"slug": app.slug}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 403) | ||||
| @ -1,5 +1,4 @@ | ||||
| """Schema generation tests""" | ||||
|  | ||||
| from django.urls import reverse | ||||
| from rest_framework.test import APITestCase | ||||
| from yaml import safe_load | ||||
|  | ||||
| @ -1,6 +1,5 @@ | ||||
| """authentik API Modelviewset tests""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from typing import Callable | ||||
|  | ||||
| from django.test import TestCase | ||||
| from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet | ||||
| @ -26,6 +25,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable: | ||||
|  | ||||
|  | ||||
| for _, viewset, _ in router.registry: | ||||
|     if not issubclass(viewset, ModelViewSet | ReadOnlyModelViewSet): | ||||
|     if not issubclass(viewset, (ModelViewSet, ReadOnlyModelViewSet)): | ||||
|         continue | ||||
|     setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset)) | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """authentik api urls""" | ||||
|  | ||||
| from django.urls import include, path | ||||
|  | ||||
| from authentik.api.v3.urls import urlpatterns as v3_urls | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """core Configs API""" | ||||
|  | ||||
| from pathlib import Path | ||||
|  | ||||
| from django.conf import settings | ||||
| @ -68,16 +67,12 @@ class ConfigView(APIView): | ||||
|         """Get all capabilities this server instance supports""" | ||||
|         caps = [] | ||||
|         deb_test = settings.DEBUG or settings.TEST | ||||
|         if ( | ||||
|             CONFIG.get("storage.media.backend", "file") == "s3" | ||||
|             or Path(settings.STORAGES["default"]["OPTIONS"]["location"]).is_mount() | ||||
|             or deb_test | ||||
|         ): | ||||
|         if Path(settings.MEDIA_ROOT).is_mount() or deb_test: | ||||
|             caps.append(Capabilities.CAN_SAVE_MEDIA) | ||||
|         for processor in get_context_processors(): | ||||
|             if cap := processor.capability(): | ||||
|                 caps.append(cap) | ||||
|         if self.request.tenant.impersonation: | ||||
|         if CONFIG.get_bool("impersonation"): | ||||
|             caps.append(Capabilities.CAN_IMPERSONATE) | ||||
|         if settings.DEBUG:  # pragma: no cover | ||||
|             caps.append(Capabilities.CAN_DEBUG) | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """api v3 urls""" | ||||
|  | ||||
| from importlib import import_module | ||||
|  | ||||
| from django.urls import path | ||||
| @ -33,7 +32,7 @@ for _authentik_app in get_apps(): | ||||
|             app_name=_authentik_app.name, | ||||
|         ) | ||||
|         continue | ||||
|     urls: list = api_urls.api_urlpatterns | ||||
|     urls: list = getattr(api_urls, "api_urlpatterns") | ||||
|     for url in urls: | ||||
|         if isinstance(url, URLPattern): | ||||
|             _other_urls.append(url) | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """General API Views""" | ||||
|  | ||||
| from typing import Any | ||||
|  | ||||
| from django.urls import reverse | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """Serializer mixin for managed models""" | ||||
|  | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from drf_spectacular.utils import extend_schema, inline_serializer | ||||
| from rest_framework.decorators import action | ||||
| @ -10,13 +9,13 @@ from rest_framework.response import Response | ||||
| from rest_framework.serializers import ListSerializer, ModelSerializer | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.api.decorators import permission_required | ||||
| from authentik.blueprints.models import BlueprintInstance | ||||
| from authentik.blueprints.v1.importer import Importer | ||||
| from authentik.blueprints.v1.oci import OCI_PREFIX | ||||
| from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import JSONDictField, PassiveSerializer | ||||
| from authentik.rbac.decorators import permission_required | ||||
|  | ||||
|  | ||||
| class ManagedSerializer: | ||||
| @ -51,12 +50,8 @@ class BlueprintInstanceSerializer(ModelSerializer): | ||||
|         context = self.instance.context if self.instance else {} | ||||
|         valid, logs = Importer.from_string(content, context).validate() | ||||
|         if not valid: | ||||
|             raise ValidationError( | ||||
|                 [ | ||||
|                     _("Failed to validate blueprint"), | ||||
|                     *[f"- {x.event}" for x in logs], | ||||
|                 ] | ||||
|             ) | ||||
|             text_logs = "\n".join([x["event"] for x in logs]) | ||||
|             raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs})) | ||||
|         return content | ||||
|  | ||||
|     def validate(self, attrs: dict) -> dict: | ||||
|  | ||||
| @ -1,6 +1,5 @@ | ||||
| """authentik Blueprints app""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from importlib import import_module | ||||
| from inspect import ismethod | ||||
|  | ||||
| @ -8,100 +7,40 @@ from django.apps import AppConfig | ||||
| from django.db import DatabaseError, InternalError, ProgrammingError | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
|  | ||||
| from authentik.root.signals import startup | ||||
|  | ||||
|  | ||||
| class ManagedAppConfig(AppConfig): | ||||
|     """Basic reconciliation logic for apps""" | ||||
|  | ||||
|     logger: BoundLogger | ||||
|  | ||||
|     RECONCILE_GLOBAL_CATEGORY: str = "global" | ||||
|     RECONCILE_TENANT_CATEGORY: str = "tenant" | ||||
|     _logger: BoundLogger | ||||
|  | ||||
|     def __init__(self, app_name: str, *args, **kwargs) -> None: | ||||
|         super().__init__(app_name, *args, **kwargs) | ||||
|         self.logger = get_logger().bind(app_name=app_name) | ||||
|         self._logger = get_logger().bind(app_name=app_name) | ||||
|  | ||||
|     def ready(self) -> None: | ||||
|         self.import_related() | ||||
|         startup.connect(self._on_startup_callback, dispatch_uid=self.label) | ||||
|         self.reconcile() | ||||
|         return super().ready() | ||||
|  | ||||
|     def _on_startup_callback(self, sender, **_): | ||||
|         self._reconcile_global() | ||||
|         self._reconcile_tenant() | ||||
|  | ||||
|     def import_related(self): | ||||
|         """Automatically import related modules which rely on just being imported | ||||
|         to register themselves (mainly django signals and celery tasks)""" | ||||
|  | ||||
|         def import_relative(rel_module: str): | ||||
|             try: | ||||
|                 module_name = f"{self.name}.{rel_module}" | ||||
|                 import_module(module_name) | ||||
|                 self.logger.info("Imported related module", module=module_name) | ||||
|             except ModuleNotFoundError: | ||||
|                 pass | ||||
|  | ||||
|         import_relative("checks") | ||||
|         import_relative("tasks") | ||||
|         import_relative("signals") | ||||
|  | ||||
|     def import_module(self, path: str): | ||||
|         """Load module""" | ||||
|         import_module(path) | ||||
|  | ||||
|     def _reconcile(self, prefix: str) -> None: | ||||
|     def reconcile(self) -> None: | ||||
|         """reconcile ourselves""" | ||||
|         prefix = "reconcile_" | ||||
|         for meth_name in dir(self): | ||||
|             meth = getattr(self, meth_name) | ||||
|             if not ismethod(meth): | ||||
|                 continue | ||||
|             category = getattr(meth, "_authentik_managed_reconcile", None) | ||||
|             if category != prefix: | ||||
|             if not meth_name.startswith(prefix): | ||||
|                 continue | ||||
|             name = meth_name.replace(prefix, "") | ||||
|             try: | ||||
|                 self.logger.debug("Starting reconciler", name=name) | ||||
|                 self._logger.debug("Starting reconciler", name=name) | ||||
|                 meth() | ||||
|                 self.logger.debug("Successfully reconciled", name=name) | ||||
|                 self._logger.debug("Successfully reconciled", name=name) | ||||
|             except (DatabaseError, ProgrammingError, InternalError) as exc: | ||||
|                 self.logger.warning("Failed to run reconcile", name=name, exc=exc) | ||||
|  | ||||
|     @staticmethod | ||||
|     def reconcile_tenant(func: Callable): | ||||
|         """Mark a function to be called on startup (for each tenant)""" | ||||
|         func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_TENANT_CATEGORY | ||||
|         return func | ||||
|  | ||||
|     @staticmethod | ||||
|     def reconcile_global(func: Callable): | ||||
|         """Mark a function to be called on startup (globally)""" | ||||
|         func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY | ||||
|         return func | ||||
|  | ||||
|     def _reconcile_tenant(self) -> None: | ||||
|         """reconcile ourselves for tenanted methods""" | ||||
|         from authentik.tenants.models import Tenant | ||||
|  | ||||
|         try: | ||||
|             tenants = list(Tenant.objects.filter(ready=True)) | ||||
|         except (DatabaseError, ProgrammingError, InternalError) as exc: | ||||
|             self.logger.debug("Failed to get tenants to run reconcile", exc=exc) | ||||
|             return | ||||
|         for tenant in tenants: | ||||
|             with tenant: | ||||
|                 self._reconcile(self.RECONCILE_TENANT_CATEGORY) | ||||
|  | ||||
|     def _reconcile_global(self) -> None: | ||||
|         """ | ||||
|         reconcile ourselves for global methods. | ||||
|         Used for signals, tasks, etc. Database queries should not be made in here. | ||||
|         """ | ||||
|         from django_tenants.utils import get_public_schema_name, schema_context | ||||
|  | ||||
|         with schema_context(get_public_schema_name()): | ||||
|             self._reconcile(self.RECONCILE_GLOBAL_CATEGORY) | ||||
|                 self._logger.warning("Failed to run reconcile", name=name, exc=exc) | ||||
|  | ||||
|  | ||||
| class AuthentikBlueprintsConfig(ManagedAppConfig): | ||||
| @ -112,13 +51,11 @@ class AuthentikBlueprintsConfig(ManagedAppConfig): | ||||
|     verbose_name = "authentik Blueprints" | ||||
|     default = True | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_global | ||||
|     def load_blueprints_v1_tasks(self): | ||||
|     def reconcile_load_blueprints_v1_tasks(self): | ||||
|         """Load v1 tasks""" | ||||
|         self.import_module("authentik.blueprints.v1.tasks") | ||||
|  | ||||
|     @ManagedAppConfig.reconcile_tenant | ||||
|     def blueprints_discovery(self): | ||||
|     def reconcile_blueprints_discovery(self): | ||||
|         """Run blueprint discovery""" | ||||
|         from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints | ||||
|  | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """Apply blueprint from commandline""" | ||||
|  | ||||
| from sys import exit as sys_exit | ||||
|  | ||||
| from django.core.management.base import BaseCommand, no_translations | ||||
| @ -7,7 +6,6 @@ from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.blueprints.models import BlueprintInstance | ||||
| from authentik.blueprints.v1.importer import Importer | ||||
| from authentik.tenants.models import Tenant | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
| @ -18,18 +16,14 @@ class Command(BaseCommand): | ||||
|     @no_translations | ||||
|     def handle(self, *args, **options): | ||||
|         """Apply all blueprints in order, abort when one fails to import""" | ||||
|         for tenant in Tenant.objects.filter(ready=True): | ||||
|             with tenant: | ||||
|                 for blueprint_path in options.get("blueprints", []): | ||||
|                     content = BlueprintInstance(path=blueprint_path).retrieve() | ||||
|                     importer = Importer.from_string(content) | ||||
|                     valid, logs = importer.validate() | ||||
|                     if not valid: | ||||
|                         self.stderr.write("Blueprint invalid") | ||||
|                         for log in logs: | ||||
|                             self.stderr.write(f"\t{log.logger}: {log.event}: {log.attributes}") | ||||
|                         sys_exit(1) | ||||
|                     importer.apply() | ||||
|         for blueprint_path in options.get("blueprints", []): | ||||
|             content = BlueprintInstance(path=blueprint_path).retrieve() | ||||
|             importer = Importer.from_string(content) | ||||
|             valid, _ = importer.validate() | ||||
|             if not valid: | ||||
|                 self.stderr.write("blueprint invalid") | ||||
|                 sys_exit(1) | ||||
|             importer.apply() | ||||
|  | ||||
|     def add_arguments(self, parser): | ||||
|         parser.add_argument("blueprints", nargs="+", type=str) | ||||
|  | ||||
| @ -1,19 +1,17 @@ | ||||
| """Export blueprint of current authentik install""" | ||||
|  | ||||
| from django.core.management.base import no_translations | ||||
| from django.core.management.base import BaseCommand, no_translations | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.blueprints.v1.exporter import Exporter | ||||
| from authentik.tenants.management import TenantCommand | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class Command(TenantCommand): | ||||
| class Command(BaseCommand): | ||||
|     """Export blueprint of current authentik install""" | ||||
|  | ||||
|     @no_translations | ||||
|     def handle_per_tenant(self, *args, **options): | ||||
|     def handle(self, *args, **options): | ||||
|         """Export blueprint of current authentik install""" | ||||
|         exporter = Exporter() | ||||
|         self.stdout.write(exporter.export_to_string()) | ||||
|  | ||||
| @ -1,17 +1,14 @@ | ||||
| """Generate JSON Schema for blueprints""" | ||||
|  | ||||
| from json import dumps | ||||
| from typing import Any | ||||
|  | ||||
| from django.core.management.base import BaseCommand, no_translations | ||||
| from django.db.models import Model, fields | ||||
| from drf_jsonschema_serializer.convert import converter, field_to_converter | ||||
| from django.db.models import Model | ||||
| from drf_jsonschema_serializer.convert import field_to_converter | ||||
| from rest_framework.fields import Field, JSONField, UUIDField | ||||
| from rest_framework.relations import PrimaryKeyRelatedField | ||||
| from rest_framework.serializers import Serializer | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik import __version__ | ||||
| from authentik.blueprints.v1.common import BlueprintEntryDesiredState | ||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed | ||||
| from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | ||||
| @ -20,23 +17,6 @@ from authentik.lib.models import SerializerModel | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| @converter | ||||
| class PrimaryKeyRelatedFieldConverter: | ||||
|     """Custom primary key field converter which is aware of non-integer based PKs | ||||
|  | ||||
|     This is not an exhaustive fix for other non-int PKs, however in authentik we either | ||||
|     use UUIDs or ints""" | ||||
|  | ||||
|     field_class = PrimaryKeyRelatedField | ||||
|  | ||||
|     def convert(self, field: PrimaryKeyRelatedField): | ||||
|         model: Model = field.queryset.model | ||||
|         pk_field = model._meta.pk | ||||
|         if isinstance(pk_field, fields.UUIDField): | ||||
|             return {"type": "string", "format": "uuid"} | ||||
|         return {"type": "integer"} | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     """Generate JSON Schema for blueprints""" | ||||
|  | ||||
| @ -48,7 +28,7 @@ class Command(BaseCommand): | ||||
|             "$schema": "http://json-schema.org/draft-07/schema", | ||||
|             "$id": "https://goauthentik.io/blueprints/schema.json", | ||||
|             "type": "object", | ||||
|             "title": f"authentik {__version__} Blueprint schema", | ||||
|             "title": "authentik Blueprint schema", | ||||
|             "required": ["version", "entries"], | ||||
|             "properties": { | ||||
|                 "version": { | ||||
| @ -113,19 +93,16 @@ class Command(BaseCommand): | ||||
|             ) | ||||
|             model_path = f"{model._meta.app_label}.{model._meta.model_name}" | ||||
|             self.schema["properties"]["entries"]["items"]["oneOf"].append( | ||||
|                 self.template_entry(model_path, model, serializer) | ||||
|                 self.template_entry(model_path, serializer) | ||||
|             ) | ||||
|  | ||||
|     def template_entry(self, model_path: str, model: type[Model], serializer: Serializer) -> dict: | ||||
|     def template_entry(self, model_path: str, serializer: Serializer) -> dict: | ||||
|         """Template entry for a single model""" | ||||
|         model_schema = self.to_jsonschema(serializer) | ||||
|         model_schema["required"] = [] | ||||
|         def_name = f"model_{model_path}" | ||||
|         def_path = f"#/$defs/{def_name}" | ||||
|         self.schema["$defs"][def_name] = model_schema | ||||
|         def_name_perm = f"model_{model_path}_permissions" | ||||
|         def_path_perm = f"#/$defs/{def_name_perm}" | ||||
|         self.schema["$defs"][def_name_perm] = self.model_permissions(model) | ||||
|         return { | ||||
|             "type": "object", | ||||
|             "required": ["model", "identifiers"], | ||||
| @ -138,7 +115,6 @@ class Command(BaseCommand): | ||||
|                     "default": "present", | ||||
|                 }, | ||||
|                 "conditions": {"type": "array", "items": {"type": "boolean"}}, | ||||
|                 "permissions": {"$ref": def_path_perm}, | ||||
|                 "attrs": {"$ref": def_path}, | ||||
|                 "identifiers": {"$ref": def_path}, | ||||
|             }, | ||||
| @ -189,20 +165,3 @@ class Command(BaseCommand): | ||||
|         if required: | ||||
|             result["required"] = required | ||||
|         return result | ||||
|  | ||||
|     def model_permissions(self, model: type[Model]) -> dict: | ||||
|         perms = [x[0] for x in model._meta.permissions] | ||||
|         for action in model._meta.default_permissions: | ||||
|             perms.append(f"{action}_{model._meta.model_name}") | ||||
|         return { | ||||
|             "type": "array", | ||||
|             "items": { | ||||
|                 "type": "object", | ||||
|                 "required": ["permission"], | ||||
|                 "properties": { | ||||
|                     "permission": {"type": "string", "enum": perms}, | ||||
|                     "user": {"type": "integer"}, | ||||
|                     "role": {"type": "string"}, | ||||
|                 }, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
| @ -14,7 +14,7 @@ from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM | ||||
| from authentik.lib.config import CONFIG | ||||
|  | ||||
|  | ||||
| def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path): | ||||
| def check_blueprint_v1_file(BlueprintInstance: type, path: Path): | ||||
|     """Check if blueprint should be imported""" | ||||
|     from authentik.blueprints.models import BlueprintInstanceStatus | ||||
|     from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata | ||||
| @ -29,7 +29,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path): | ||||
|         if version != 1: | ||||
|             return | ||||
|         blueprint_file.seek(0) | ||||
|     instance = BlueprintInstance.objects.using(db_alias).filter(path=path).first() | ||||
|     instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first() | ||||
|     rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir"))) | ||||
|     meta = None | ||||
|     if metadata: | ||||
| @ -37,7 +37,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path): | ||||
|         if meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false": | ||||
|             return | ||||
|     if not instance: | ||||
|         BlueprintInstance.objects.using(db_alias).create( | ||||
|         instance = BlueprintInstance( | ||||
|             name=meta.name if meta else str(rel_path), | ||||
|             path=str(rel_path), | ||||
|             context={}, | ||||
| @ -47,6 +47,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path): | ||||
|             last_applied_hash="", | ||||
|             metadata=metadata or {}, | ||||
|         ) | ||||
|         instance.save() | ||||
|  | ||||
|  | ||||
| def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
| @ -55,7 +56,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit | ||||
|  | ||||
|     db_alias = schema_editor.connection.alias | ||||
|     for file in glob(f"{CONFIG.get('blueprints_dir')}/**/*.yaml", recursive=True): | ||||
|         check_blueprint_v1_file(BlueprintInstance, db_alias, Path(file)) | ||||
|         check_blueprint_v1_file(BlueprintInstance, Path(file)) | ||||
|  | ||||
|     for blueprint in BlueprintInstance.objects.using(db_alias).all(): | ||||
|         # If we already have flows (and we should always run before flow migrations) | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """blueprint models""" | ||||
|  | ||||
| from pathlib import Path | ||||
| from uuid import uuid4 | ||||
|  | ||||
| @ -71,19 +70,6 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel): | ||||
|     enabled = models.BooleanField(default=True) | ||||
|     managed_models = ArrayField(models.TextField(), default=list) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Blueprint Instance") | ||||
|         verbose_name_plural = _("Blueprint Instances") | ||||
|         unique_together = ( | ||||
|             ( | ||||
|                 "name", | ||||
|                 "path", | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"Blueprint Instance {self.name}" | ||||
|  | ||||
|     def retrieve_oci(self) -> str: | ||||
|         """Get blueprint from an OCI registry""" | ||||
|         client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://")) | ||||
| @ -102,7 +88,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel): | ||||
|                 raise BlueprintRetrievalFailed("Invalid blueprint path") | ||||
|             with full_path.open("r", encoding="utf-8") as _file: | ||||
|                 return _file.read() | ||||
|         except OSError as exc: | ||||
|         except (IOError, OSError) as exc: | ||||
|             raise BlueprintRetrievalFailed(exc) from exc | ||||
|  | ||||
|     def retrieve(self) -> str: | ||||
| @ -118,3 +104,16 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel): | ||||
|         from authentik.blueprints.api import BlueprintInstanceSerializer | ||||
|  | ||||
|         return BlueprintInstanceSerializer | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"Blueprint Instance {self.name}" | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _("Blueprint Instance") | ||||
|         verbose_name_plural = _("Blueprint Instances") | ||||
|         unique_together = ( | ||||
|             ( | ||||
|                 "name", | ||||
|                 "path", | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """blueprint Settings""" | ||||
|  | ||||
| from celery.schedules import crontab | ||||
|  | ||||
| from authentik.lib.utils.time import fqdn_rand | ||||
|  | ||||
| @ -1,7 +1,6 @@ | ||||
| """Blueprint helpers""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from functools import wraps | ||||
| from typing import Callable | ||||
|  | ||||
| from django.apps import apps | ||||
|  | ||||
| @ -39,7 +38,7 @@ def reconcile_app(app_name: str): | ||||
|         def wrapper(*args, **kwargs): | ||||
|             config = apps.get_app_config(app_name) | ||||
|             if isinstance(config, ManagedAppConfig): | ||||
|                 config._on_startup_callback(None) | ||||
|                 config.reconcile() | ||||
|             return func(*args, **kwargs) | ||||
|  | ||||
|         return wrapper | ||||
|  | ||||
| @ -1,24 +0,0 @@ | ||||
| version: 1 | ||||
| entries: | ||||
|   - model: authentik_core.user | ||||
|     id: user | ||||
|     identifiers: | ||||
|       username: "%(id)s" | ||||
|     attrs: | ||||
|       name: "%(id)s" | ||||
|   - model: authentik_rbac.role | ||||
|     id: role | ||||
|     identifiers: | ||||
|       name: "%(id)s" | ||||
|   - model: authentik_flows.flow | ||||
|     identifiers: | ||||
|       slug: "%(id)s" | ||||
|     attrs: | ||||
|       designation: authentication | ||||
|       name: foo | ||||
|       title: foo | ||||
|     permissions: | ||||
|       - permission: view_flow | ||||
|         user: !KeyOf user | ||||
|       - permission: view_flow | ||||
|         role: !KeyOf role | ||||
| @ -1,8 +0,0 @@ | ||||
| version: 1 | ||||
| entries: | ||||
|   - model: authentik_rbac.role | ||||
|     identifiers: | ||||
|       name: "%(id)s" | ||||
|     attrs: | ||||
|       permissions: | ||||
|         - authentik_blueprints.view_blueprintinstance | ||||
| @ -1,9 +0,0 @@ | ||||
| version: 1 | ||||
| entries: | ||||
|   - model: authentik_core.user | ||||
|     identifiers: | ||||
|       username: "%(id)s" | ||||
|     attrs: | ||||
|       name: "%(id)s" | ||||
|       permissions: | ||||
|         - authentik_blueprints.view_blueprintinstance | ||||
| @ -146,11 +146,6 @@ entries: | ||||
|                   ] | ||||
|               ] | ||||
|               nested_context: !Context context2 | ||||
|               at_index_sequence: !AtIndex [!Context sequence, 0] | ||||
|               at_index_sequence_default: !AtIndex [!Context sequence, 100, "non existent"] | ||||
|               at_index_mapping: !AtIndex [!Context mapping, "key2"] | ||||
|               at_index_mapping_default: !AtIndex [!Context mapping, "invalid", "non existent"] | ||||
|               find_object: !AtIndex [!FindObject [authentik_providers_oauth2.scopemapping, [scope_name, openid]], managed] | ||||
|       identifiers: | ||||
|           name: test | ||||
|       conditions: | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """authentik managed models tests""" | ||||
|  | ||||
| from django.test import TestCase | ||||
|  | ||||
| from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """Test blueprints OCI""" | ||||
|  | ||||
| from django.test import TransactionTestCase | ||||
| from requests_mock import Mocker | ||||
|  | ||||
|  | ||||
| @ -1,23 +1,22 @@ | ||||
| """test packaged blueprints""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from pathlib import Path | ||||
| from typing import Callable | ||||
|  | ||||
| from django.test import TransactionTestCase | ||||
|  | ||||
| from authentik.blueprints.models import BlueprintInstance | ||||
| from authentik.blueprints.tests import apply_blueprint | ||||
| from authentik.blueprints.v1.importer import Importer | ||||
| from authentik.brands.models import Brand | ||||
| from authentik.tenants.models import Tenant | ||||
|  | ||||
|  | ||||
| class TestPackaged(TransactionTestCase): | ||||
|     """Empty class, test methods are added dynamically""" | ||||
|  | ||||
|     @apply_blueprint("default/default-brand.yaml") | ||||
|     @apply_blueprint("default/default-tenant.yaml") | ||||
|     def test_decorator_static(self): | ||||
|         """Test @apply_blueprint decorator""" | ||||
|         self.assertTrue(Brand.objects.filter(domain="authentik-default").exists()) | ||||
|         self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists()) | ||||
|  | ||||
|  | ||||
| def blueprint_tester(file_name: Path) -> Callable: | ||||
| @ -27,8 +26,7 @@ def blueprint_tester(file_name: Path) -> Callable: | ||||
|         base = Path("blueprints/") | ||||
|         rel_path = Path(file_name).relative_to(base) | ||||
|         importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve()) | ||||
|         validation, logs = importer.validate() | ||||
|         self.assertTrue(validation, logs) | ||||
|         self.assertTrue(importer.validate()[0]) | ||||
|         self.assertTrue(importer.apply()) | ||||
|  | ||||
|     return tester | ||||
|  | ||||
| @ -1,6 +1,5 @@ | ||||
| """authentik managed models tests""" | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from typing import Callable, Type | ||||
|  | ||||
| from django.apps import apps | ||||
| from django.test import TestCase | ||||
| @ -14,7 +13,7 @@ class TestModels(TestCase): | ||||
|     """Test Models""" | ||||
|  | ||||
|  | ||||
| def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable: | ||||
| def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable: | ||||
|     """Test serializer""" | ||||
|  | ||||
|     def tester(self: TestModels): | ||||
|  | ||||
| @ -1,10 +1,8 @@ | ||||
| """Test blueprints v1""" | ||||
|  | ||||
| from os import environ | ||||
|  | ||||
| from django.test import TransactionTestCase | ||||
|  | ||||
| from authentik.blueprints.tests import apply_blueprint | ||||
| from authentik.blueprints.v1.exporter import FlowExporter | ||||
| from authentik.blueprints.v1.importer import Importer, transaction_rollback | ||||
| from authentik.core.models import Group | ||||
| @ -127,7 +125,6 @@ class TestBlueprintsV1(TransactionTestCase): | ||||
|  | ||||
|         self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before) | ||||
|  | ||||
|     @apply_blueprint("system/providers-oauth2.yaml") | ||||
|     def test_import_yaml_tags(self): | ||||
|         """Test some yaml tags""" | ||||
|         ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete() | ||||
| @ -138,93 +135,87 @@ class TestBlueprintsV1(TransactionTestCase): | ||||
|         self.assertTrue(importer.apply()) | ||||
|         policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first() | ||||
|         self.assertTrue(policy) | ||||
|         group = Group.objects.filter(name="test").first() | ||||
|         self.assertIsNotNone(group) | ||||
|         self.assertEqual( | ||||
|             group.attributes, | ||||
|             { | ||||
|                 "policy_pk1": str(policy.pk) + "-suffix", | ||||
|                 "policy_pk2": str(policy.pk) + "-suffix", | ||||
|                 "boolAnd": True, | ||||
|                 "boolNand": False, | ||||
|                 "boolOr": True, | ||||
|                 "boolNor": False, | ||||
|                 "boolXor": True, | ||||
|                 "boolXnor": False, | ||||
|                 "boolComplex": True, | ||||
|                 "if_true_complex": { | ||||
|                     "dictionary": { | ||||
|                         "with": {"keys": "and_values"}, | ||||
|                         "and_nested_custom_tags": "foo-bar", | ||||
|                     } | ||||
|                 }, | ||||
|                 "if_false_complex": ["list", "with", "items", "foo-bar"], | ||||
|                 "if_true_simple": True, | ||||
|                 "if_short": True, | ||||
|                 "if_false_simple": 2, | ||||
|                 "enumerate_mapping_to_mapping": { | ||||
|                     "prefix-key1": "other-prefix-value", | ||||
|                     "prefix-key2": "other-prefix-2", | ||||
|                 }, | ||||
|                 "enumerate_mapping_to_sequence": [ | ||||
|                     "prefixed-pair-key1-value", | ||||
|                     "prefixed-pair-key2-2", | ||||
|                 ], | ||||
|                 "enumerate_sequence_to_sequence": [ | ||||
|                     "prefixed-items-0-foo", | ||||
|                     "prefixed-items-1-bar", | ||||
|                 ], | ||||
|                 "enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"}, | ||||
|                 "nested_complex_enumeration": { | ||||
|                     "0": { | ||||
|                         "key1": [ | ||||
|                             ["prefixed-f", "prefixed-o", "prefixed-o"], | ||||
|                             { | ||||
|                                 "outer_value": "foo", | ||||
|                                 "outer_index": 0, | ||||
|                                 "middle_value": "value", | ||||
|                                 "middle_index": "key1", | ||||
|                             }, | ||||
|                         ], | ||||
|                         "key2": [ | ||||
|                             ["prefixed-f", "prefixed-o", "prefixed-o"], | ||||
|                             { | ||||
|                                 "outer_value": "foo", | ||||
|                                 "outer_index": 0, | ||||
|                                 "middle_value": 2, | ||||
|                                 "middle_index": "key2", | ||||
|                             }, | ||||
|                         ], | ||||
|         self.assertTrue( | ||||
|             Group.objects.filter( | ||||
|                 attributes={ | ||||
|                     "policy_pk1": str(policy.pk) + "-suffix", | ||||
|                     "policy_pk2": str(policy.pk) + "-suffix", | ||||
|                     "boolAnd": True, | ||||
|                     "boolNand": False, | ||||
|                     "boolOr": True, | ||||
|                     "boolNor": False, | ||||
|                     "boolXor": True, | ||||
|                     "boolXnor": False, | ||||
|                     "boolComplex": True, | ||||
|                     "if_true_complex": { | ||||
|                         "dictionary": { | ||||
|                             "with": {"keys": "and_values"}, | ||||
|                             "and_nested_custom_tags": "foo-bar", | ||||
|                         } | ||||
|                     }, | ||||
|                     "1": { | ||||
|                         "key1": [ | ||||
|                             ["prefixed-b", "prefixed-a", "prefixed-r"], | ||||
|                             { | ||||
|                                 "outer_value": "bar", | ||||
|                                 "outer_index": 1, | ||||
|                                 "middle_value": "value", | ||||
|                                 "middle_index": "key1", | ||||
|                             }, | ||||
|                         ], | ||||
|                         "key2": [ | ||||
|                             ["prefixed-b", "prefixed-a", "prefixed-r"], | ||||
|                             { | ||||
|                                 "outer_value": "bar", | ||||
|                                 "outer_index": 1, | ||||
|                                 "middle_value": 2, | ||||
|                                 "middle_index": "key2", | ||||
|                             }, | ||||
|                         ], | ||||
|                     "if_false_complex": ["list", "with", "items", "foo-bar"], | ||||
|                     "if_true_simple": True, | ||||
|                     "if_short": True, | ||||
|                     "if_false_simple": 2, | ||||
|                     "enumerate_mapping_to_mapping": { | ||||
|                         "prefix-key1": "other-prefix-value", | ||||
|                         "prefix-key2": "other-prefix-2", | ||||
|                     }, | ||||
|                 }, | ||||
|                 "nested_context": "context-nested-value", | ||||
|                 "env_null": None, | ||||
|                 "at_index_sequence": "foo", | ||||
|                 "at_index_sequence_default": "non existent", | ||||
|                 "at_index_mapping": 2, | ||||
|                 "at_index_mapping_default": "non existent", | ||||
|                 "find_object": "goauthentik.io/providers/oauth2/scope-openid", | ||||
|             }, | ||||
|                     "enumerate_mapping_to_sequence": [ | ||||
|                         "prefixed-pair-key1-value", | ||||
|                         "prefixed-pair-key2-2", | ||||
|                     ], | ||||
|                     "enumerate_sequence_to_sequence": [ | ||||
|                         "prefixed-items-0-foo", | ||||
|                         "prefixed-items-1-bar", | ||||
|                     ], | ||||
|                     "enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"}, | ||||
|                     "nested_complex_enumeration": { | ||||
|                         "0": { | ||||
|                             "key1": [ | ||||
|                                 ["prefixed-f", "prefixed-o", "prefixed-o"], | ||||
|                                 { | ||||
|                                     "outer_value": "foo", | ||||
|                                     "outer_index": 0, | ||||
|                                     "middle_value": "value", | ||||
|                                     "middle_index": "key1", | ||||
|                                 }, | ||||
|                             ], | ||||
|                             "key2": [ | ||||
|                                 ["prefixed-f", "prefixed-o", "prefixed-o"], | ||||
|                                 { | ||||
|                                     "outer_value": "foo", | ||||
|                                     "outer_index": 0, | ||||
|                                     "middle_value": 2, | ||||
|                                     "middle_index": "key2", | ||||
|                                 }, | ||||
|                             ], | ||||
|                         }, | ||||
|                         "1": { | ||||
|                             "key1": [ | ||||
|                                 ["prefixed-b", "prefixed-a", "prefixed-r"], | ||||
|                                 { | ||||
|                                     "outer_value": "bar", | ||||
|                                     "outer_index": 1, | ||||
|                                     "middle_value": "value", | ||||
|                                     "middle_index": "key1", | ||||
|                                 }, | ||||
|                             ], | ||||
|                             "key2": [ | ||||
|                                 ["prefixed-b", "prefixed-a", "prefixed-r"], | ||||
|                                 { | ||||
|                                     "outer_value": "bar", | ||||
|                                     "outer_index": 1, | ||||
|                                     "middle_value": 2, | ||||
|                                     "middle_index": "key2", | ||||
|                                 }, | ||||
|                             ], | ||||
|                         }, | ||||
|                     }, | ||||
|                     "nested_context": "context-nested-value", | ||||
|                     "env_null": None, | ||||
|                 } | ||||
|             ).exists() | ||||
|         ) | ||||
|         self.assertTrue( | ||||
|             OAuthSource.objects.filter( | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """Test blueprints v1 api""" | ||||
|  | ||||
| from json import loads | ||||
| from tempfile import NamedTemporaryFile, mkdtemp | ||||
|  | ||||
| @ -78,5 +77,5 @@ class TestBlueprintsV1API(APITestCase): | ||||
|         self.assertEqual(res.status_code, 400) | ||||
|         self.assertJSONEqual( | ||||
|             res.content.decode(), | ||||
|             {"content": ["Failed to validate blueprint", "- Invalid blueprint version"]}, | ||||
|             {"content": ["Failed to validate blueprint: Invalid blueprint version"]}, | ||||
|         ) | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """Test blueprints v1""" | ||||
|  | ||||
| from django.test import TransactionTestCase | ||||
|  | ||||
| from authentik.blueprints.v1.importer import Importer | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """Test blueprints v1""" | ||||
|  | ||||
| from django.test import TransactionTestCase | ||||
|  | ||||
| from authentik.blueprints.v1.importer import Importer | ||||
|  | ||||
| @ -1,57 +0,0 @@ | ||||
| """Test blueprints v1""" | ||||
|  | ||||
| from django.test import TransactionTestCase | ||||
| from guardian.shortcuts import get_perms | ||||
|  | ||||
| from authentik.blueprints.v1.importer import Importer | ||||
| from authentik.core.models import User | ||||
| from authentik.flows.models import Flow | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.lib.tests.utils import load_fixture | ||||
| from authentik.rbac.models import Role | ||||
|  | ||||
|  | ||||
| class TestBlueprintsV1RBAC(TransactionTestCase): | ||||
|     """Test Blueprints rbac attribute""" | ||||
|  | ||||
|     def test_user_permission(self): | ||||
|         """Test permissions""" | ||||
|         uid = generate_id() | ||||
|         import_yaml = load_fixture("fixtures/rbac_user.yaml", id=uid) | ||||
|  | ||||
|         importer = Importer.from_string(import_yaml) | ||||
|         self.assertTrue(importer.validate()[0]) | ||||
|         self.assertTrue(importer.apply()) | ||||
|         user = User.objects.filter(username=uid).first() | ||||
|         self.assertIsNotNone(user) | ||||
|         self.assertTrue(user.has_perms(["authentik_blueprints.view_blueprintinstance"])) | ||||
|  | ||||
|     def test_role_permission(self): | ||||
|         """Test permissions""" | ||||
|         uid = generate_id() | ||||
|         import_yaml = load_fixture("fixtures/rbac_role.yaml", id=uid) | ||||
|  | ||||
|         importer = Importer.from_string(import_yaml) | ||||
|         self.assertTrue(importer.validate()[0]) | ||||
|         self.assertTrue(importer.apply()) | ||||
|         role = Role.objects.filter(name=uid).first() | ||||
|         self.assertIsNotNone(role) | ||||
|         self.assertEqual( | ||||
|             list(role.group.permissions.all().values_list("codename", flat=True)), | ||||
|             ["view_blueprintinstance"], | ||||
|         ) | ||||
|  | ||||
|     def test_object_permission(self): | ||||
|         """Test permissions""" | ||||
|         uid = generate_id() | ||||
|         import_yaml = load_fixture("fixtures/rbac_object.yaml", id=uid) | ||||
|  | ||||
|         importer = Importer.from_string(import_yaml) | ||||
|         self.assertTrue(importer.validate()[0]) | ||||
|         self.assertTrue(importer.apply()) | ||||
|         flow = Flow.objects.filter(slug=uid).first() | ||||
|         user = User.objects.filter(username=uid).first() | ||||
|         role = Role.objects.filter(name=uid).first() | ||||
|         self.assertIsNotNone(flow) | ||||
|         self.assertEqual(get_perms(user, flow), ["view_flow"]) | ||||
|         self.assertEqual(get_perms(role.group, flow), ["view_flow"]) | ||||
| @ -1,5 +1,4 @@ | ||||
| """Test blueprints v1""" | ||||
|  | ||||
| from django.test import TransactionTestCase | ||||
|  | ||||
| from authentik.blueprints.v1.importer import Importer | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """Test blueprints v1 tasks""" | ||||
|  | ||||
| from hashlib import sha512 | ||||
| from tempfile import NamedTemporaryFile, mkdtemp | ||||
|  | ||||
| @ -54,7 +53,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | ||||
|             file.seek(0) | ||||
|             file_hash = sha512(file.read().encode()).hexdigest() | ||||
|             file.flush() | ||||
|             blueprints_discovery() | ||||
|             blueprints_discovery()  # pylint: disable=no-value-for-parameter | ||||
|             instance = BlueprintInstance.objects.filter(name=blueprint_id).first() | ||||
|             self.assertEqual(instance.last_applied_hash, file_hash) | ||||
|             self.assertEqual( | ||||
| @ -82,7 +81,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | ||||
|                 ) | ||||
|             ) | ||||
|             file.flush() | ||||
|             blueprints_discovery() | ||||
|             blueprints_discovery()  # pylint: disable=no-value-for-parameter | ||||
|             blueprint = BlueprintInstance.objects.filter(name="foo").first() | ||||
|             self.assertEqual( | ||||
|                 blueprint.last_applied_hash, | ||||
| @ -107,7 +106,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | ||||
|                 ) | ||||
|             ) | ||||
|             file.flush() | ||||
|             blueprints_discovery() | ||||
|             blueprints_discovery()  # pylint: disable=no-value-for-parameter | ||||
|             blueprint.refresh_from_db() | ||||
|             self.assertEqual( | ||||
|                 blueprint.last_applied_hash, | ||||
| @ -149,7 +148,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | ||||
|                 instance.status, | ||||
|                 BlueprintInstanceStatus.UNKNOWN, | ||||
|             ) | ||||
|             apply_blueprint(instance.pk) | ||||
|             apply_blueprint(instance.pk)  # pylint: disable=no-value-for-parameter | ||||
|             instance.refresh_from_db() | ||||
|             self.assertEqual(instance.last_applied_hash, "") | ||||
|             self.assertEqual( | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """API URLs""" | ||||
|  | ||||
| from authentik.blueprints.api import BlueprintInstanceViewSet | ||||
|  | ||||
| api_urlpatterns = [ | ||||
|  | ||||
| @ -1,14 +1,12 @@ | ||||
| """transfer common classes""" | ||||
|  | ||||
| from collections import OrderedDict | ||||
| from collections.abc import Generator, Iterable, Mapping | ||||
| from copy import copy | ||||
| from dataclasses import asdict, dataclass, field, is_dataclass | ||||
| from enum import Enum | ||||
| from functools import reduce | ||||
| from operator import ixor | ||||
| from os import getenv | ||||
| from typing import Any, Literal, Union | ||||
| from typing import Any, Iterable, Literal, Mapping, Optional, Union | ||||
| from uuid import UUID | ||||
|  | ||||
| from deepmerge import always_merger | ||||
| @ -24,10 +22,6 @@ from authentik.lib.sentry import SentryIgnoredException | ||||
| from authentik.policies.models import PolicyBindingModel | ||||
|  | ||||
|  | ||||
| class UNSET: | ||||
|     """Used to test whether a key has not been set.""" | ||||
|  | ||||
|  | ||||
| def get_attrs(obj: SerializerModel) -> dict[str, Any]: | ||||
|     """Get object's attributes via their serializer, and convert it to a normal dict""" | ||||
|     serializer: Serializer = obj.serializer(obj) | ||||
| @ -50,7 +44,7 @@ def get_attrs(obj: SerializerModel) -> dict[str, Any]: | ||||
| class BlueprintEntryState: | ||||
|     """State of a single instance""" | ||||
|  | ||||
|     instance: Model | None = None | ||||
|     instance: Optional[Model] = None | ||||
|  | ||||
|  | ||||
| class BlueprintEntryDesiredState(Enum): | ||||
| @ -62,15 +56,6 @@ class BlueprintEntryDesiredState(Enum): | ||||
|     MUST_CREATED = "must_created" | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class BlueprintEntryPermission: | ||||
|     """Describe object-level permissions""" | ||||
|  | ||||
|     permission: Union[str, "YAMLTag"] | ||||
|     user: Union[int, "YAMLTag", None] = field(default=None) | ||||
|     role: Union[str, "YAMLTag", None] = field(default=None) | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class BlueprintEntry: | ||||
|     """Single entry of a blueprint""" | ||||
| @ -81,15 +66,14 @@ class BlueprintEntry: | ||||
|     ) | ||||
|     conditions: list[Any] = field(default_factory=list) | ||||
|     identifiers: dict[str, Any] = field(default_factory=dict) | ||||
|     attrs: dict[str, Any] | None = field(default_factory=dict) | ||||
|     permissions: list[BlueprintEntryPermission] = field(default_factory=list) | ||||
|     attrs: Optional[dict[str, Any]] = field(default_factory=dict) | ||||
|  | ||||
|     id: str | None = None | ||||
|     id: Optional[str] = None | ||||
|  | ||||
|     _state: BlueprintEntryState = field(default_factory=BlueprintEntryState) | ||||
|  | ||||
|     def __post_init__(self, *args, **kwargs) -> None: | ||||
|         self.__tag_contexts: list[YAMLTagContext] = [] | ||||
|         self.__tag_contexts: list["YAMLTagContext"] = [] | ||||
|  | ||||
|     @staticmethod | ||||
|     def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry": | ||||
| @ -107,10 +91,10 @@ class BlueprintEntry: | ||||
|             attrs=all_attrs, | ||||
|         ) | ||||
|  | ||||
|     def get_tag_context( | ||||
|     def _get_tag_context( | ||||
|         self, | ||||
|         depth: int = 0, | ||||
|         context_tag_type: type["YAMLTagContext"] | tuple["YAMLTagContext", ...] | None = None, | ||||
|         context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None, | ||||
|     ) -> "YAMLTagContext": | ||||
|         """Get a YAMLTagContext object located at a certain depth in the tag tree""" | ||||
|         if depth < 0: | ||||
| @ -123,8 +107,8 @@ class BlueprintEntry: | ||||
|  | ||||
|         try: | ||||
|             return contexts[-(depth + 1)] | ||||
|         except IndexError as exc: | ||||
|             raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}") from exc | ||||
|         except IndexError: | ||||
|             raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}") | ||||
|  | ||||
|     def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any: | ||||
|         """Check if we have any special tags that need handling""" | ||||
| @ -164,17 +148,6 @@ class BlueprintEntry: | ||||
|         """Get the blueprint model, with yaml tags resolved if present""" | ||||
|         return str(self.tag_resolver(self.model, blueprint)) | ||||
|  | ||||
|     def get_permissions( | ||||
|         self, blueprint: "Blueprint" | ||||
|     ) -> Generator[BlueprintEntryPermission, None, None]: | ||||
|         """Get permissions of this entry, with all yaml tags resolved""" | ||||
|         for perm in self.permissions: | ||||
|             yield BlueprintEntryPermission( | ||||
|                 permission=self.tag_resolver(perm.permission, blueprint), | ||||
|                 user=self.tag_resolver(perm.user, blueprint), | ||||
|                 role=self.tag_resolver(perm.role, blueprint), | ||||
|             ) | ||||
|  | ||||
|     def check_all_conditions_match(self, blueprint: "Blueprint") -> bool: | ||||
|         """Check all conditions of this entry match (evaluate to True)""" | ||||
|         return all(self.tag_resolver(self.conditions, blueprint)) | ||||
| @ -196,7 +169,7 @@ class Blueprint: | ||||
|     entries: list[BlueprintEntry] = field(default_factory=list) | ||||
|     context: dict = field(default_factory=dict) | ||||
|  | ||||
|     metadata: BlueprintMetadata | None = field(default=None) | ||||
|     metadata: Optional[BlueprintMetadata] = field(default=None) | ||||
|  | ||||
|  | ||||
| class YAMLTag: | ||||
| @ -244,7 +217,7 @@ class Env(YAMLTag): | ||||
|     """Lookup environment variable with optional default""" | ||||
|  | ||||
|     key: str | ||||
|     default: Any | None | ||||
|     default: Optional[Any] | ||||
|  | ||||
|     def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None: | ||||
|         super().__init__() | ||||
| @ -263,7 +236,7 @@ class Context(YAMLTag): | ||||
|     """Lookup key from instance context""" | ||||
|  | ||||
|     key: str | ||||
|     default: Any | None | ||||
|     default: Optional[Any] | ||||
|  | ||||
|     def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None: | ||||
|         super().__init__() | ||||
| @ -307,11 +280,11 @@ class Format(YAMLTag): | ||||
|         try: | ||||
|             return self.format_string % tuple(args) | ||||
|         except TypeError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|             raise EntryInvalidError.from_entry(exc, entry) | ||||
|  | ||||
|  | ||||
| class Find(YAMLTag): | ||||
|     """Find any object primary key""" | ||||
|     """Find any object""" | ||||
|  | ||||
|     model_name: str | YAMLTag | ||||
|     conditions: list[list] | ||||
| @ -326,16 +299,13 @@ class Find(YAMLTag): | ||||
|                 values.append(loader.construct_object(node_values)) | ||||
|             self.conditions.append(values) | ||||
|  | ||||
|     def _get_instance(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||
|         if isinstance(self.model_name, YAMLTag): | ||||
|             model_name = self.model_name.resolve(entry, blueprint) | ||||
|         else: | ||||
|             model_name = self.model_name | ||||
|  | ||||
|         try: | ||||
|             model_class = apps.get_model(*model_name.split(".")) | ||||
|         except LookupError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|         model_class = apps.get_model(*model_name.split(".")) | ||||
|  | ||||
|         query = Q() | ||||
|         for cond in self.conditions: | ||||
| @ -348,29 +318,12 @@ class Find(YAMLTag): | ||||
|             else: | ||||
|                 query_value = cond[1] | ||||
|             query &= Q(**{query_key: query_value}) | ||||
|         return model_class.objects.filter(query).first() | ||||
|  | ||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||
|         instance = self._get_instance(entry, blueprint) | ||||
|         instance = model_class.objects.filter(query).first() | ||||
|         if instance: | ||||
|             return instance.pk | ||||
|         return None | ||||
|  | ||||
|  | ||||
| class FindObject(Find): | ||||
|     """Find any object""" | ||||
|  | ||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||
|         instance = self._get_instance(entry, blueprint) | ||||
|         if not instance: | ||||
|             return None | ||||
|         if not isinstance(instance, SerializerModel): | ||||
|             raise EntryInvalidError.from_entry( | ||||
|                 f"Model {self.model_name} is not resolvable through FindObject", entry | ||||
|             ) | ||||
|         return instance.serializer(instance=instance).data | ||||
|  | ||||
|  | ||||
| class Condition(YAMLTag): | ||||
|     """Convert all values to a single boolean""" | ||||
|  | ||||
| @ -412,7 +365,7 @@ class Condition(YAMLTag): | ||||
|             comparator = self._COMPARATORS[self.mode.upper()] | ||||
|             return comparator(tuple(bool(x) for x in args)) | ||||
|         except (TypeError, KeyError) as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|             raise EntryInvalidError.from_entry(exc, entry) | ||||
|  | ||||
|  | ||||
| class If(YAMLTag): | ||||
| @ -444,7 +397,7 @@ class If(YAMLTag): | ||||
|                 blueprint, | ||||
|             ) | ||||
|         except TypeError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|             raise EntryInvalidError.from_entry(exc, entry) | ||||
|  | ||||
|  | ||||
| class Enumerate(YAMLTag, YAMLTagContext): | ||||
| @ -458,7 +411,9 @@ class Enumerate(YAMLTag, YAMLTagContext): | ||||
|         "SEQ": (list, lambda a, b: [*a, b]), | ||||
|         "MAP": ( | ||||
|             dict, | ||||
|             lambda a, b: always_merger.merge(a, {b[0]: b[1]} if isinstance(b, tuple | list) else b), | ||||
|             lambda a, b: always_merger.merge( | ||||
|                 a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b | ||||
|             ), | ||||
|         ), | ||||
|     } | ||||
|  | ||||
| @ -500,7 +455,7 @@ class Enumerate(YAMLTag, YAMLTagContext): | ||||
|         try: | ||||
|             output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()] | ||||
|         except KeyError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|             raise EntryInvalidError.from_entry(exc, entry) | ||||
|  | ||||
|         result = output_class() | ||||
|  | ||||
| @ -528,13 +483,13 @@ class EnumeratedItem(YAMLTag): | ||||
|  | ||||
|     _SUPPORTED_CONTEXT_TAGS = (Enumerate,) | ||||
|  | ||||
|     def __init__(self, _loader: "BlueprintLoader", node: ScalarNode) -> None: | ||||
|     def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None: | ||||
|         super().__init__() | ||||
|         self.depth = int(node.value) | ||||
|  | ||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||
|         try: | ||||
|             context_tag: Enumerate = entry.get_tag_context( | ||||
|             context_tag: Enumerate = entry._get_tag_context( | ||||
|                 depth=self.depth, | ||||
|                 context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS, | ||||
|             ) | ||||
| @ -544,11 +499,9 @@ class EnumeratedItem(YAMLTag): | ||||
|                     f"{self.__class__.__name__} tags are only usable " | ||||
|                     f"inside an {Enumerate.__name__} tag", | ||||
|                     entry, | ||||
|                 ) from exc | ||||
|                 ) | ||||
|  | ||||
|             raise EntryInvalidError.from_entry( | ||||
|                 f"{self.__class__.__name__} tag: {exc}", entry | ||||
|             ) from exc | ||||
|             raise EntryInvalidError.from_entry(f"{self.__class__.__name__} tag: {exc}", entry) | ||||
|  | ||||
|         return context_tag.get_context(entry, blueprint) | ||||
|  | ||||
| @ -561,8 +514,8 @@ class Index(EnumeratedItem): | ||||
|  | ||||
|         try: | ||||
|             return context[0] | ||||
|         except IndexError as exc:  # pragma: no cover | ||||
|             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc | ||||
|         except IndexError:  # pragma: no cover | ||||
|             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) | ||||
|  | ||||
|  | ||||
| class Value(EnumeratedItem): | ||||
| @ -573,55 +526,8 @@ class Value(EnumeratedItem): | ||||
|  | ||||
|         try: | ||||
|             return context[1] | ||||
|         except IndexError as exc:  # pragma: no cover | ||||
|             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc | ||||
|  | ||||
|  | ||||
| class AtIndex(YAMLTag): | ||||
|     """Get value at index of a sequence or mapping""" | ||||
|  | ||||
|     obj: YAMLTag | dict | list | tuple | ||||
|     attribute: int | str | YAMLTag | ||||
|     default: Any | UNSET | ||||
|  | ||||
|     def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None: | ||||
|         super().__init__() | ||||
|         self.obj = loader.construct_object(node.value[0]) | ||||
|         self.attribute = loader.construct_object(node.value[1]) | ||||
|         if len(node.value) == 2:  # noqa: PLR2004 | ||||
|             self.default = UNSET | ||||
|         else: | ||||
|             self.default = loader.construct_object(node.value[2]) | ||||
|  | ||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||
|         if isinstance(self.obj, YAMLTag): | ||||
|             obj = self.obj.resolve(entry, blueprint) | ||||
|         else: | ||||
|             obj = self.obj | ||||
|         if isinstance(self.attribute, YAMLTag): | ||||
|             attribute = self.attribute.resolve(entry, blueprint) | ||||
|         else: | ||||
|             attribute = self.attribute | ||||
|  | ||||
|         if isinstance(obj, list | tuple): | ||||
|             try: | ||||
|                 return obj[attribute] | ||||
|             except TypeError as exc: | ||||
|                 raise EntryInvalidError.from_entry( | ||||
|                     f"Invalid index for list: {attribute}", entry | ||||
|                 ) from exc | ||||
|             except IndexError as exc: | ||||
|                 if self.default is UNSET: | ||||
|                     raise EntryInvalidError.from_entry( | ||||
|                         f"Index out of range: {attribute}", entry | ||||
|                     ) from exc | ||||
|                 return self.default | ||||
|         if attribute in obj: | ||||
|             return obj[attribute] | ||||
|         else: | ||||
|             if self.default is UNSET: | ||||
|                 raise EntryInvalidError.from_entry(f"Key does not exist: {attribute}", entry) | ||||
|             return self.default | ||||
|         except IndexError:  # pragma: no cover | ||||
|             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) | ||||
|  | ||||
|  | ||||
| class BlueprintDumper(SafeDumper): | ||||
| @ -648,11 +554,7 @@ class BlueprintDumper(SafeDumper): | ||||
|  | ||||
|             def factory(items): | ||||
|                 final_dict = dict(items) | ||||
|                 # Remove internal state variables | ||||
|                 final_dict.pop("_state", None) | ||||
|                 # Future-proof to only remove the ID if we don't set a value | ||||
|                 if "id" in final_dict and final_dict.get("id") is None: | ||||
|                     final_dict.pop("id") | ||||
|                 return final_dict | ||||
|  | ||||
|             data = asdict(data, dict_factory=factory) | ||||
| @ -666,7 +568,6 @@ class BlueprintLoader(SafeLoader): | ||||
|         super().__init__(*args, **kwargs) | ||||
|         self.add_constructor("!KeyOf", KeyOf) | ||||
|         self.add_constructor("!Find", Find) | ||||
|         self.add_constructor("!FindObject", FindObject) | ||||
|         self.add_constructor("!Context", Context) | ||||
|         self.add_constructor("!Format", Format) | ||||
|         self.add_constructor("!Condition", Condition) | ||||
| @ -675,19 +576,18 @@ class BlueprintLoader(SafeLoader): | ||||
|         self.add_constructor("!Enumerate", Enumerate) | ||||
|         self.add_constructor("!Value", Value) | ||||
|         self.add_constructor("!Index", Index) | ||||
|         self.add_constructor("!AtIndex", AtIndex) | ||||
|  | ||||
|  | ||||
| class EntryInvalidError(SentryIgnoredException): | ||||
|     """Error raised when an entry is invalid""" | ||||
|  | ||||
|     entry_model: str | None | ||||
|     entry_id: str | None | ||||
|     validation_error: ValidationError | None | ||||
|     serializer: Serializer | None = None | ||||
|     entry_model: Optional[str] | ||||
|     entry_id: Optional[str] | ||||
|     validation_error: Optional[ValidationError] | ||||
|     serializer: Optional[Serializer] = None | ||||
|  | ||||
|     def __init__( | ||||
|         self, *args: object, validation_error: ValidationError | None = None, **kwargs | ||||
|         self, *args: object, validation_error: Optional[ValidationError] = None, **kwargs | ||||
|     ) -> None: | ||||
|         super().__init__(*args) | ||||
|         self.entry_model = None | ||||
|  | ||||
| @ -1,6 +1,5 @@ | ||||
| """Blueprint exporter""" | ||||
|  | ||||
| from collections.abc import Iterable | ||||
| from typing import Iterable | ||||
| from uuid import UUID | ||||
|  | ||||
| from django.apps import apps | ||||
| @ -8,6 +7,7 @@ from django.contrib.auth import get_user_model | ||||
| from django.db.models import Model, Q, QuerySet | ||||
| from django.utils.timezone import now | ||||
| from django.utils.translation import gettext as _ | ||||
| from guardian.shortcuts import get_anonymous_user | ||||
| from yaml import dump | ||||
|  | ||||
| from authentik.blueprints.v1.common import ( | ||||
| @ -48,7 +48,7 @@ class Exporter: | ||||
|         """Return a queryset for `model`. Can be used to filter some | ||||
|         objects on some models""" | ||||
|         if model == get_user_model(): | ||||
|             return model.objects.exclude_anonymous() | ||||
|             return model.objects.exclude(pk=get_anonymous_user().pk) | ||||
|         return model.objects.all() | ||||
|  | ||||
|     def _pre_export(self, blueprint: Blueprint): | ||||
| @ -59,7 +59,7 @@ class Exporter: | ||||
|         blueprint = Blueprint() | ||||
|         self._pre_export(blueprint) | ||||
|         blueprint.metadata = BlueprintMetadata( | ||||
|             name=_("authentik Export - {date}".format_map({"date": str(now())})), | ||||
|             name=_("authentik Export - %(date)s" % {"date": str(now())}), | ||||
|             labels={ | ||||
|                 LABEL_AUTHENTIK_GENERATED: "true", | ||||
|             }, | ||||
| @ -74,7 +74,7 @@ class Exporter: | ||||
|  | ||||
|  | ||||
| class FlowExporter(Exporter): | ||||
|     """Exporter customized to only return objects related to `flow`""" | ||||
|     """Exporter customised to only return objects related to `flow`""" | ||||
|  | ||||
|     flow: Flow | ||||
|     with_policies: bool | ||||
|  | ||||
| @ -1,25 +1,22 @@ | ||||
| """Blueprint importer""" | ||||
|  | ||||
| from contextlib import contextmanager | ||||
| from copy import deepcopy | ||||
| from typing import Any | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from dacite.config import Config | ||||
| from dacite.core import from_dict | ||||
| from dacite.exceptions import DaciteError | ||||
| from deepmerge import always_merger | ||||
| from django.contrib.auth.models import Permission | ||||
| from django.contrib.contenttypes.models import ContentType | ||||
| from django.core.exceptions import FieldError | ||||
| from django.db.models import Model | ||||
| from django.db.models.query_utils import Q | ||||
| from django.db.transaction import atomic | ||||
| from django.db.utils import IntegrityError | ||||
| from guardian.models import UserObjectPermission | ||||
| from guardian.shortcuts import assign_perm | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.serializers import BaseSerializer, Serializer | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
| from structlog.testing import capture_logs | ||||
| from structlog.types import EventDict | ||||
| from yaml import load | ||||
|  | ||||
| from authentik.blueprints.v1.common import ( | ||||
| @ -33,75 +30,40 @@ from authentik.blueprints.v1.common import ( | ||||
| from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | ||||
| from authentik.core.models import ( | ||||
|     AuthenticatedSession, | ||||
|     GroupSourceConnection, | ||||
|     PropertyMapping, | ||||
|     Provider, | ||||
|     Source, | ||||
|     User, | ||||
|     UserSourceConnection, | ||||
| ) | ||||
| from authentik.enterprise.license import LicenseKey | ||||
| from authentik.enterprise.models import LicenseUsage | ||||
| from authentik.enterprise.providers.google_workspace.models import ( | ||||
|     GoogleWorkspaceProviderGroup, | ||||
|     GoogleWorkspaceProviderUser, | ||||
| ) | ||||
| from authentik.enterprise.providers.microsoft_entra.models import ( | ||||
|     MicrosoftEntraProviderGroup, | ||||
|     MicrosoftEntraProviderUser, | ||||
| ) | ||||
| from authentik.enterprise.providers.rac.models import ConnectionToken | ||||
| from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import ( | ||||
|     EndpointDevice, | ||||
|     EndpointDeviceConnection, | ||||
| ) | ||||
| from authentik.events.logs import LogEvent, capture_logs | ||||
| from authentik.events.models import SystemTask | ||||
| from authentik.events.utils import cleanse_dict | ||||
| from authentik.flows.models import FlowToken, Stage | ||||
| from authentik.lib.models import SerializerModel | ||||
| from authentik.lib.sentry import SentryIgnoredException | ||||
| from authentik.lib.utils.reflection import get_apps | ||||
| from authentik.outposts.models import OutpostServiceConnection | ||||
| from authentik.policies.models import Policy, PolicyBindingModel | ||||
| from authentik.policies.reputation.models import Reputation | ||||
| from authentik.providers.oauth2.models import ( | ||||
|     AccessToken, | ||||
|     AuthorizationCode, | ||||
|     DeviceToken, | ||||
|     RefreshToken, | ||||
| ) | ||||
| from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser | ||||
| from authentik.rbac.models import Role | ||||
| from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser | ||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType | ||||
| from authentik.tenants.models import Tenant | ||||
| from authentik.providers.scim.models import SCIMGroup, SCIMUser | ||||
|  | ||||
| # Context set when the serializer is created in a blueprint context | ||||
| # Update website/docs/customize/blueprints/v1/models.md when used | ||||
| # Update website/developer-docs/blueprints/v1/models.md when used | ||||
| SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry" | ||||
|  | ||||
|  | ||||
| def excluded_models() -> list[type[Model]]: | ||||
|     """Return a list of all excluded models that shouldn't be exposed via API | ||||
|     or other means (internal only, base classes, non-used objects, etc)""" | ||||
|  | ||||
|     # pylint: disable=imported-auth-user | ||||
|     from django.contrib.auth.models import Group as DjangoGroup | ||||
|     from django.contrib.auth.models import User as DjangoUser | ||||
|  | ||||
|     return ( | ||||
|         # Django only classes | ||||
|         DjangoUser, | ||||
|         DjangoGroup, | ||||
|         ContentType, | ||||
|         Permission, | ||||
|         UserObjectPermission, | ||||
|         # Base classes | ||||
|         Provider, | ||||
|         Source, | ||||
|         PropertyMapping, | ||||
|         UserSourceConnection, | ||||
|         GroupSourceConnection, | ||||
|         Stage, | ||||
|         OutpostServiceConnection, | ||||
|         Policy, | ||||
| @ -109,34 +71,16 @@ def excluded_models() -> list[type[Model]]: | ||||
|         # Classes that have other dependencies | ||||
|         AuthenticatedSession, | ||||
|         # Classes which are only internally managed | ||||
|         # FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin | ||||
|         FlowToken, | ||||
|         LicenseUsage, | ||||
|         SCIMProviderGroup, | ||||
|         SCIMProviderUser, | ||||
|         Tenant, | ||||
|         SystemTask, | ||||
|         ConnectionToken, | ||||
|         AuthorizationCode, | ||||
|         AccessToken, | ||||
|         RefreshToken, | ||||
|         Reputation, | ||||
|         WebAuthnDeviceType, | ||||
|         SCIMSourceUser, | ||||
|         SCIMSourceGroup, | ||||
|         GoogleWorkspaceProviderUser, | ||||
|         GoogleWorkspaceProviderGroup, | ||||
|         MicrosoftEntraProviderUser, | ||||
|         MicrosoftEntraProviderGroup, | ||||
|         EndpointDevice, | ||||
|         EndpointDeviceConnection, | ||||
|         DeviceToken, | ||||
|         SCIMGroup, | ||||
|         SCIMUser, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def is_model_allowed(model: type[Model]) -> bool: | ||||
|     """Check if model is allowed""" | ||||
|     return model not in excluded_models() and issubclass(model, SerializerModel | BaseMetaModel) | ||||
|     return model not in excluded_models() and issubclass(model, (SerializerModel, BaseMetaModel)) | ||||
|  | ||||
|  | ||||
| class DoRollback(SentryIgnoredException): | ||||
| @ -154,39 +98,22 @@ def transaction_rollback(): | ||||
|         pass | ||||
|  | ||||
|  | ||||
| def rbac_models() -> dict: | ||||
|     models = {} | ||||
|     for app in get_apps(): | ||||
|         for model in app.get_models(): | ||||
|             if not is_model_allowed(model): | ||||
|                 continue | ||||
|             models[model._meta.model_name] = app.label | ||||
|     return models | ||||
|  | ||||
|  | ||||
| class Importer: | ||||
|     """Import Blueprint from raw dict or YAML/JSON""" | ||||
|  | ||||
|     logger: BoundLogger | ||||
|     _import: Blueprint | ||||
|  | ||||
|     def __init__(self, blueprint: Blueprint, context: dict | None = None): | ||||
|     def __init__(self, blueprint: Blueprint, context: Optional[dict] = None): | ||||
|         self.__pk_map: dict[Any, Model] = {} | ||||
|         self._import = blueprint | ||||
|         self.logger = get_logger() | ||||
|         ctx = self.default_context() | ||||
|         ctx = {} | ||||
|         always_merger.merge(ctx, self._import.context) | ||||
|         if context: | ||||
|             always_merger.merge(ctx, context) | ||||
|         self._import.context = ctx | ||||
|  | ||||
|     def default_context(self): | ||||
|         """Default context""" | ||||
|         return { | ||||
|             "goauthentik.io/enterprise/licensed": LicenseKey.get_total().status().is_valid, | ||||
|             "goauthentik.io/rbac/models": rbac_models(), | ||||
|         } | ||||
|  | ||||
|     @staticmethod | ||||
|     def from_string(yaml_input: str, context: dict | None = None) -> "Importer": | ||||
|         """Parse YAML string and create blueprint importer from it""" | ||||
| @ -209,14 +136,14 @@ class Importer: | ||||
|  | ||||
|         def updater(value) -> Any: | ||||
|             if value in self.__pk_map: | ||||
|                 self.logger.debug("Updating reference in entry", value=value) | ||||
|                 self.logger.debug("updating reference in entry", value=value) | ||||
|                 return self.__pk_map[value] | ||||
|             return value | ||||
|  | ||||
|         for key, value in attrs.items(): | ||||
|             try: | ||||
|                 if isinstance(value, dict): | ||||
|                     for _, _inner_key in enumerate(value): | ||||
|                     for idx, _inner_key in enumerate(value): | ||||
|                         value[_inner_key] = updater(value[_inner_key]) | ||||
|                 elif isinstance(value, list): | ||||
|                     for idx, _inner_value in enumerate(value): | ||||
| @ -245,17 +172,15 @@ class Importer: | ||||
|  | ||||
|         return main_query | sub_query | ||||
|  | ||||
|     def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None:  # noqa: PLR0915 | ||||
|     # pylint: disable-msg=too-many-locals | ||||
|     def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]: | ||||
|         """Validate a single entry""" | ||||
|         if not entry.check_all_conditions_match(self._import): | ||||
|             self.logger.debug("One or more conditions of this entry are not fulfilled, skipping") | ||||
|             return None | ||||
|  | ||||
|         model_app_label, model_name = entry.get_model(self._import).split(".") | ||||
|         try: | ||||
|             model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||
|         except LookupError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|         model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||
|         # Don't use isinstance since we don't want to check for inheritance | ||||
|         if not is_model_allowed(model): | ||||
|             raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry) | ||||
| @ -299,13 +224,9 @@ class Importer: | ||||
|  | ||||
|         serializer_kwargs = {} | ||||
|         model_instance = existing_models.first() | ||||
|         if ( | ||||
|             not isinstance(model(), BaseMetaModel) | ||||
|             and model_instance | ||||
|             and entry.state != BlueprintEntryDesiredState.MUST_CREATED | ||||
|         ): | ||||
|         if not isinstance(model(), BaseMetaModel) and model_instance: | ||||
|             self.logger.debug( | ||||
|                 "Initialise serializer with instance", | ||||
|                 "initialise serializer with instance", | ||||
|                 model=model, | ||||
|                 instance=model_instance, | ||||
|                 pk=model_instance.pk, | ||||
| @ -313,17 +234,16 @@ class Importer: | ||||
|             serializer_kwargs["instance"] = model_instance | ||||
|             serializer_kwargs["partial"] = True | ||||
|         elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED: | ||||
|             msg = ( | ||||
|                 f"State is set to {BlueprintEntryDesiredState.MUST_CREATED.value} " | ||||
|                 "and object exists already", | ||||
|             ) | ||||
|             raise EntryInvalidError.from_entry( | ||||
|                 ValidationError({k: msg for k in entry.identifiers.keys()}, "unique"), | ||||
|                 ( | ||||
|                     f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} " | ||||
|                     "and object exists already", | ||||
|                 ), | ||||
|                 entry, | ||||
|             ) | ||||
|         else: | ||||
|             self.logger.debug( | ||||
|                 "Initialised new serializer instance", | ||||
|                 "initialised new serializer instance", | ||||
|                 model=model, | ||||
|                 **cleanse_dict(updated_identifiers), | ||||
|             ) | ||||
| @ -335,7 +255,10 @@ class Importer: | ||||
|         try: | ||||
|             full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import)) | ||||
|         except ValueError as exc: | ||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc | ||||
|             raise EntryInvalidError.from_entry( | ||||
|                 exc, | ||||
|                 entry, | ||||
|             ) from exc | ||||
|         always_merger.merge(full_data, updated_identifiers) | ||||
|         serializer_kwargs["data"] = full_data | ||||
|  | ||||
| @ -356,15 +279,6 @@ class Importer: | ||||
|             ) from exc | ||||
|         return serializer | ||||
|  | ||||
|     def _apply_permissions(self, instance: Model, entry: BlueprintEntry): | ||||
|         """Apply object-level permissions for an entry""" | ||||
|         for perm in entry.get_permissions(self._import): | ||||
|             if perm.user is not None: | ||||
|                 assign_perm(perm.permission, User.objects.get(pk=perm.user), instance) | ||||
|             if perm.role is not None: | ||||
|                 role = Role.objects.get(pk=perm.role) | ||||
|                 role.assign_permission(perm.permission, obj=instance) | ||||
|  | ||||
|     def apply(self) -> bool: | ||||
|         """Apply (create/update) models yaml, in database transaction""" | ||||
|         try: | ||||
| @ -386,7 +300,7 @@ class Importer: | ||||
|                 model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||
|             except LookupError: | ||||
|                 self.logger.warning( | ||||
|                     "App or Model does not exist", app=model_app_label, model=model_name | ||||
|                     "app or model does not exist", app=model_app_label, model=model_name | ||||
|                 ) | ||||
|                 return False | ||||
|             # Validate each single entry | ||||
| @ -398,7 +312,7 @@ class Importer: | ||||
|                 if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT: | ||||
|                     serializer = exc.serializer | ||||
|                 else: | ||||
|                     self.logger.warning(f"Entry invalid: {exc}", entry=entry, error=exc) | ||||
|                     self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc) | ||||
|                     if raise_errors: | ||||
|                         raise exc | ||||
|                     return False | ||||
| @ -418,42 +332,43 @@ class Importer: | ||||
|                     and state == BlueprintEntryDesiredState.CREATED | ||||
|                 ): | ||||
|                     self.logger.debug( | ||||
|                         "Instance exists, skipping", | ||||
|                         "instance exists, skipping", | ||||
|                         model=model, | ||||
|                         instance=instance, | ||||
|                         pk=instance.pk, | ||||
|                     ) | ||||
|                 else: | ||||
|                     instance = serializer.save() | ||||
|                     self.logger.debug("Updated model", model=instance) | ||||
|                     self.logger.debug("updated model", model=instance) | ||||
|                 if "pk" in entry.identifiers: | ||||
|                     self.__pk_map[entry.identifiers["pk"]] = instance.pk | ||||
|                 entry._state = BlueprintEntryState(instance) | ||||
|                 self._apply_permissions(instance, entry) | ||||
|             elif state == BlueprintEntryDesiredState.ABSENT: | ||||
|                 instance: Model | None = serializer.instance | ||||
|                 instance: Optional[Model] = serializer.instance | ||||
|                 if instance.pk: | ||||
|                     instance.delete() | ||||
|                     self.logger.debug("Deleted model", mode=instance) | ||||
|                     self.logger.debug("deleted model", mode=instance) | ||||
|                     continue | ||||
|                 self.logger.debug("Entry to delete with no instance, skipping") | ||||
|                 self.logger.debug("entry to delete with no instance, skipping") | ||||
|         return True | ||||
|  | ||||
|     def validate(self, raise_validation_errors=False) -> tuple[bool, list[LogEvent]]: | ||||
|     def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]: | ||||
|         """Validate loaded blueprint export, ensure all models are allowed | ||||
|         and serializers have no errors""" | ||||
|         self.logger.debug("Starting blueprint import validation") | ||||
|         orig_import = deepcopy(self._import) | ||||
|         if self._import.version != 1: | ||||
|             self.logger.warning("Invalid blueprint version") | ||||
|             return False, [LogEvent("Invalid blueprint version", log_level="warning", logger=None)] | ||||
|             return False, [{"event": "Invalid blueprint version"}] | ||||
|         with ( | ||||
|             transaction_rollback(), | ||||
|             capture_logs() as logs, | ||||
|         ): | ||||
|             successful = self._apply_models(raise_errors=raise_validation_errors) | ||||
|             if not successful: | ||||
|                 self.logger.warning("Blueprint validation failed") | ||||
|                 self.logger.debug("Blueprint validation failed") | ||||
|         for log in logs: | ||||
|             getattr(self.logger, log.get("log_level"))(**log) | ||||
|         self.logger.debug("Finished blueprint import validation") | ||||
|         self._import = orig_import | ||||
|         return successful, logs | ||||
|  | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	