Compare commits
	
		
			3 Commits
		
	
	
		
			version/20
			...
			20240219-m
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| f8714aab11 | |||
| 6e707964b4 | |||
| ed30ae434d | 
| @ -1,5 +1,5 @@ | |||||||
| [bumpversion] | [bumpversion] | ||||||
| current_version = 2025.2.2 | current_version = 2023.10.7 | ||||||
| tag = True | tag = True | ||||||
| commit = True | commit = True | ||||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? | ||||||
| @ -17,18 +17,12 @@ optional_value = final | |||||||
|  |  | ||||||
| [bumpversion:file:pyproject.toml] | [bumpversion:file:pyproject.toml] | ||||||
|  |  | ||||||
| [bumpversion:file:package.json] |  | ||||||
|  |  | ||||||
| [bumpversion:file:docker-compose.yml] | [bumpversion:file:docker-compose.yml] | ||||||
|  |  | ||||||
| [bumpversion:file:schema.yml] | [bumpversion:file:schema.yml] | ||||||
|  |  | ||||||
| [bumpversion:file:blueprints/schema.json] |  | ||||||
|  |  | ||||||
| [bumpversion:file:authentik/__init__.py] | [bumpversion:file:authentik/__init__.py] | ||||||
|  |  | ||||||
| [bumpversion:file:internal/constants/constants.go] | [bumpversion:file:internal/constants/constants.go] | ||||||
|  |  | ||||||
| [bumpversion:file:web/src/common/constants.ts] | [bumpversion:file:web/src/common/constants.ts] | ||||||
|  |  | ||||||
| [bumpversion:file:lifecycle/aws/template.yaml] |  | ||||||
|  | |||||||
| @ -6,7 +6,6 @@ build/** | |||||||
| build_docs/** | build_docs/** | ||||||
| *Dockerfile | *Dockerfile | ||||||
| blueprints/local | blueprints/local | ||||||
| .git |  | ||||||
| !gen-ts-api/node_modules | !gen-ts-api/node_modules | ||||||
| !gen-ts-api/dist/** | !gen-ts-api/dist/** | ||||||
| !gen-go-api/ | !gen-go-api/ | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							| @ -1 +1 @@ | |||||||
| custom: https://goauthentik.io/pricing/ | github: [BeryJu] | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/ISSUE_TEMPLATE/question.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/ISSUE_TEMPLATE/question.md
									
									
									
									
										vendored
									
									
								
							| @ -9,7 +9,7 @@ assignees: "" | |||||||
| **Describe your question/** | **Describe your question/** | ||||||
| A clear and concise description of what you're trying to do. | A clear and concise description of what you're trying to do. | ||||||
|  |  | ||||||
| **Relevant info** | **Relevant infos** | ||||||
| i.e. Version of other software you're using, specifics of your setup | i.e. Version of other software you're using, specifics of your setup | ||||||
|  |  | ||||||
| **Screenshots** | **Screenshots** | ||||||
|  | |||||||
| @ -35,6 +35,14 @@ runs: | |||||||
|             AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s |             AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||||
|             ``` |             ``` | ||||||
|  |  | ||||||
|  |             For arm64, use these values: | ||||||
|  |  | ||||||
|  |             ```shell | ||||||
|  |             AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server | ||||||
|  |             AUTHENTIK_TAG=${{ inputs.tag }}-arm64 | ||||||
|  |             AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||||
|  |             ``` | ||||||
|  |  | ||||||
|             Afterwards, run the upgrade commands from the latest release notes. |             Afterwards, run the upgrade commands from the latest release notes. | ||||||
|           </details> |           </details> | ||||||
|           <details> |           <details> | ||||||
| @ -46,10 +54,20 @@ runs: | |||||||
|             authentik: |             authentik: | ||||||
|                 outposts: |                 outposts: | ||||||
|                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s |                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||||
|             global: |             image: | ||||||
|                 image: |                 repository: ghcr.io/goauthentik/dev-server | ||||||
|                     repository: ghcr.io/goauthentik/dev-server |                 tag: ${{ inputs.tag }} | ||||||
|                     tag: ${{ inputs.tag }} |             ``` | ||||||
|  |  | ||||||
|  |             For arm64, use these values: | ||||||
|  |  | ||||||
|  |             ```yaml | ||||||
|  |             authentik: | ||||||
|  |                 outposts: | ||||||
|  |                     container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s | ||||||
|  |             image: | ||||||
|  |                 repository: ghcr.io/goauthentik/dev-server | ||||||
|  |                 tag: ${{ inputs.tag }}-arm64 | ||||||
|             ``` |             ``` | ||||||
|  |  | ||||||
|             Afterwards, run the upgrade commands from the latest release notes. |             Afterwards, run the upgrade commands from the latest release notes. | ||||||
|  | |||||||
							
								
								
									
										89
									
								
								.github/actions/docker-push-variables/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										89
									
								
								.github/actions/docker-push-variables/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -9,15 +9,8 @@ inputs: | |||||||
|   image-arch: |   image-arch: | ||||||
|     required: false |     required: false | ||||||
|     description: "Docker image arch" |     description: "Docker image arch" | ||||||
|   release: |  | ||||||
|     required: true |  | ||||||
|     description: "True if this is a release build, false if this is a dev/PR build" |  | ||||||
|  |  | ||||||
| outputs: | outputs: | ||||||
|   shouldPush: |  | ||||||
|     description: "Whether to push the image or not" |  | ||||||
|     value: ${{ steps.ev.outputs.shouldPush }} |  | ||||||
|  |  | ||||||
|   sha: |   sha: | ||||||
|     description: "sha" |     description: "sha" | ||||||
|     value: ${{ steps.ev.outputs.sha }} |     value: ${{ steps.ev.outputs.sha }} | ||||||
| @ -32,36 +25,72 @@ outputs: | |||||||
|   imageTags: |   imageTags: | ||||||
|     description: "Docker image tags" |     description: "Docker image tags" | ||||||
|     value: ${{ steps.ev.outputs.imageTags }} |     value: ${{ steps.ev.outputs.imageTags }} | ||||||
|   imageTagsJSON: |  | ||||||
|     description: "Docker image tags, as a JSON array" |  | ||||||
|     value: ${{ steps.ev.outputs.imageTagsJSON }} |  | ||||||
|   attestImageNames: |  | ||||||
|     description: "Docker image names used for attestation" |  | ||||||
|     value: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|   cacheTo: |  | ||||||
|     description: "cache-to value for the docker build step" |  | ||||||
|     value: ${{ steps.ev.outputs.cacheTo }} |  | ||||||
|   imageMainTag: |   imageMainTag: | ||||||
|     description: "Docker image main tag" |     description: "Docker image main tag" | ||||||
|     value: ${{ steps.ev.outputs.imageMainTag }} |     value: ${{ steps.ev.outputs.imageMainTag }} | ||||||
|   imageMainName: |  | ||||||
|     description: "Docker image main name" |  | ||||||
|     value: ${{ steps.ev.outputs.imageMainName }} |  | ||||||
|   imageBuildArgs: |  | ||||||
|     description: "Docker image build args" |  | ||||||
|     value: ${{ steps.ev.outputs.imageBuildArgs }} |  | ||||||
|  |  | ||||||
| runs: | runs: | ||||||
|   using: "composite" |   using: "composite" | ||||||
|   steps: |   steps: | ||||||
|     - name: Generate config |     - name: Generate config | ||||||
|       id: ev |       id: ev | ||||||
|       shell: bash |       shell: python | ||||||
|       env: |  | ||||||
|         IMAGE_NAME: ${{ inputs.image-name }} |  | ||||||
|         IMAGE_ARCH: ${{ inputs.image-arch }} |  | ||||||
|         RELEASE: ${{ inputs.release }} |  | ||||||
|         PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} |  | ||||||
|         REF: ${{ github.ref }} |  | ||||||
|       run: | |       run: | | ||||||
|         python3 ${{ github.action_path }}/push_vars.py |         """Helper script to get the actual branch name, docker safe""" | ||||||
|  |         import configparser | ||||||
|  |         import os | ||||||
|  |         from time import time | ||||||
|  |  | ||||||
|  |         parser = configparser.ConfigParser() | ||||||
|  |         parser.read(".bumpversion.cfg") | ||||||
|  |  | ||||||
|  |         branch_name = os.environ["GITHUB_REF"] | ||||||
|  |         if os.environ.get("GITHUB_HEAD_REF", "") != "": | ||||||
|  |             branch_name = os.environ["GITHUB_HEAD_REF"] | ||||||
|  |         safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-") | ||||||
|  |  | ||||||
|  |         image_names = "${{ inputs.image-name }}".split(",") | ||||||
|  |         image_arch = "${{ inputs.image-arch }}" or None | ||||||
|  |  | ||||||
|  |         is_pull_request = bool("${{ github.event.pull_request.head.sha }}") | ||||||
|  |         is_release = "dev" not in image_names[0] | ||||||
|  |  | ||||||
|  |         sha = os.environ["GITHUB_SHA"] if not is_pull_request else "${{ github.event.pull_request.head.sha }}" | ||||||
|  |  | ||||||
|  |         # 2042.1.0 or 2042.1.0-rc1 | ||||||
|  |         version = parser.get("bumpversion", "current_version") | ||||||
|  |         # 2042.1 | ||||||
|  |         version_family = ".".join(version.split("-", 1)[0].split(".")[:-1]) | ||||||
|  |         prerelease = "-" in version | ||||||
|  |  | ||||||
|  |         image_tags = [] | ||||||
|  |         if is_release: | ||||||
|  |             for name in image_names: | ||||||
|  |                 image_tags += [ | ||||||
|  |                     f"{name}:{version}", | ||||||
|  |                 ] | ||||||
|  |             if not prerelease: | ||||||
|  |                 image_tags += [ | ||||||
|  |                     f"{name}:latest", | ||||||
|  |                     f"{name}:{version_family}", | ||||||
|  |                 ] | ||||||
|  |         else: | ||||||
|  |             suffix = "" | ||||||
|  |             if image_arch and image_arch != "amd64": | ||||||
|  |                 suffix = f"-{image_arch}" | ||||||
|  |             for name in image_names: | ||||||
|  |                 image_tags += [ | ||||||
|  |                     f"{name}:gh-{sha}{suffix}",  # Used for ArgoCD and PR comments | ||||||
|  |                     f"{name}:gh-{safe_branch_name}{suffix}",  # For convenience | ||||||
|  |                     f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}",  # Use by FluxCD | ||||||
|  |                 ] | ||||||
|  |  | ||||||
|  |         image_main_tag = image_tags[0] | ||||||
|  |         image_tags_rendered = ",".join(image_tags) | ||||||
|  |  | ||||||
|  |         with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output: | ||||||
|  |             print("sha=%s" % sha, file=_output) | ||||||
|  |             print("version=%s" % version, file=_output) | ||||||
|  |             print("prerelease=%s" % prerelease, file=_output) | ||||||
|  |             print("imageTags=%s" % image_tags_rendered, file=_output) | ||||||
|  |             print("imageMainTag=%s" % image_main_tag, file=_output) | ||||||
|  | |||||||
							
								
								
									
										101
									
								
								.github/actions/docker-push-variables/push_vars.py
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										101
									
								
								.github/actions/docker-push-variables/push_vars.py
									
									
									
									
										vendored
									
									
								
							| @ -1,101 +0,0 @@ | |||||||
| """Helper script to get the actual branch name, docker safe""" |  | ||||||
|  |  | ||||||
| import configparser |  | ||||||
| import os |  | ||||||
| from json import dumps |  | ||||||
| from time import time |  | ||||||
|  |  | ||||||
| parser = configparser.ConfigParser() |  | ||||||
| parser.read(".bumpversion.cfg") |  | ||||||
|  |  | ||||||
| # Decide if we should push the image or not |  | ||||||
| should_push = True |  | ||||||
| if len(os.environ.get("DOCKER_USERNAME", "")) < 1: |  | ||||||
|     # Don't push if we don't have DOCKER_USERNAME, i.e. no secrets are available |  | ||||||
|     should_push = False |  | ||||||
| if os.environ.get("GITHUB_REPOSITORY").lower() == "goauthentik/authentik-internal": |  | ||||||
|     # Don't push on the internal repo |  | ||||||
|     should_push = False |  | ||||||
|  |  | ||||||
| branch_name = os.environ["GITHUB_REF"] |  | ||||||
| if os.environ.get("GITHUB_HEAD_REF", "") != "": |  | ||||||
|     branch_name = os.environ["GITHUB_HEAD_REF"] |  | ||||||
| safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-").replace("'", "-") |  | ||||||
|  |  | ||||||
| image_names = os.getenv("IMAGE_NAME").split(",") |  | ||||||
| image_arch = os.getenv("IMAGE_ARCH") or None |  | ||||||
|  |  | ||||||
| is_pull_request = bool(os.getenv("PR_HEAD_SHA")) |  | ||||||
| is_release = "dev" not in image_names[0] |  | ||||||
|  |  | ||||||
| sha = os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA") |  | ||||||
|  |  | ||||||
| # 2042.1.0 or 2042.1.0-rc1 |  | ||||||
| version = parser.get("bumpversion", "current_version") |  | ||||||
| # 2042.1 |  | ||||||
| version_family = ".".join(version.split("-", 1)[0].split(".")[:-1]) |  | ||||||
| prerelease = "-" in version |  | ||||||
|  |  | ||||||
| image_tags = [] |  | ||||||
| if is_release: |  | ||||||
|     for name in image_names: |  | ||||||
|         image_tags += [ |  | ||||||
|             f"{name}:{version}", |  | ||||||
|         ] |  | ||||||
|         if not prerelease: |  | ||||||
|             image_tags += [ |  | ||||||
|                 f"{name}:latest", |  | ||||||
|                 f"{name}:{version_family}", |  | ||||||
|             ] |  | ||||||
| else: |  | ||||||
|     suffix = "" |  | ||||||
|     if image_arch: |  | ||||||
|         suffix = f"-{image_arch}" |  | ||||||
|     for name in image_names: |  | ||||||
|         image_tags += [ |  | ||||||
|             f"{name}:gh-{sha}{suffix}",  # Used for ArgoCD and PR comments |  | ||||||
|             f"{name}:gh-{safe_branch_name}{suffix}",  # For convenience |  | ||||||
|             f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}",  # Use by FluxCD |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
| image_main_tag = image_tags[0].split(":")[-1] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_attest_image_names(image_with_tags: list[str]): |  | ||||||
|     """Attestation only for GHCR""" |  | ||||||
|     image_tags = [] |  | ||||||
|     for image_name in set(name.split(":")[0] for name in image_with_tags): |  | ||||||
|         if not image_name.startswith("ghcr.io"): |  | ||||||
|             continue |  | ||||||
|         image_tags.append(image_name) |  | ||||||
|     return ",".join(set(image_tags)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # Generate `cache-to` param |  | ||||||
| cache_to = "" |  | ||||||
| if should_push: |  | ||||||
|     _cache_tag = "buildcache" |  | ||||||
|     if image_arch: |  | ||||||
|         _cache_tag += f"-{image_arch}" |  | ||||||
|     cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| image_build_args = [] |  | ||||||
| if os.getenv("RELEASE", "false").lower() == "true": |  | ||||||
|     image_build_args = [f"VERSION={os.getenv('REF')}"] |  | ||||||
| else: |  | ||||||
|     image_build_args = [f"GIT_BUILD_HASH={sha}"] |  | ||||||
| image_build_args = "\n".join(image_build_args) |  | ||||||
|  |  | ||||||
| with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output: |  | ||||||
|     print(f"shouldPush={str(should_push).lower()}", file=_output) |  | ||||||
|     print(f"sha={sha}", file=_output) |  | ||||||
|     print(f"version={version}", file=_output) |  | ||||||
|     print(f"prerelease={prerelease}", file=_output) |  | ||||||
|     print(f"imageTags={','.join(image_tags)}", file=_output) |  | ||||||
|     print(f"imageTagsJSON={dumps(image_tags)}", file=_output) |  | ||||||
|     print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output) |  | ||||||
|     print(f"imageMainTag={image_main_tag}", file=_output) |  | ||||||
|     print(f"imageMainName={image_tags[0]}", file=_output) |  | ||||||
|     print(f"cacheTo={cache_to}", file=_output) |  | ||||||
|     print(f"imageBuildArgs={image_build_args}", file=_output) |  | ||||||
							
								
								
									
										18
									
								
								.github/actions/docker-push-variables/test.sh
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										18
									
								
								.github/actions/docker-push-variables/test.sh
									
									
									
									
										vendored
									
									
								
							| @ -1,18 +0,0 @@ | |||||||
| #!/bin/bash -x |  | ||||||
| SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) |  | ||||||
| # Non-pushing PR |  | ||||||
| GITHUB_OUTPUT=/dev/stdout \ |  | ||||||
|     GITHUB_REF=ref \ |  | ||||||
|     GITHUB_SHA=sha \ |  | ||||||
|     IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \ |  | ||||||
|     GITHUB_REPOSITORY=goauthentik/authentik \ |  | ||||||
|     python $SCRIPT_DIR/push_vars.py |  | ||||||
|  |  | ||||||
| # Pushing PR/main |  | ||||||
| GITHUB_OUTPUT=/dev/stdout \ |  | ||||||
|     GITHUB_REF=ref \ |  | ||||||
|     GITHUB_SHA=sha \ |  | ||||||
|     IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \ |  | ||||||
|     GITHUB_REPOSITORY=goauthentik/authentik \ |  | ||||||
|     DOCKER_USERNAME=foo \ |  | ||||||
|     python $SCRIPT_DIR/push_vars.py |  | ||||||
							
								
								
									
										12
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										12
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							| @ -14,28 +14,28 @@ runs: | |||||||
|       run: | |       run: | | ||||||
|         pipx install poetry || true |         pipx install poetry || true | ||||||
|         sudo apt-get update |         sudo apt-get update | ||||||
|         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server |         sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext | ||||||
|     - name: Setup python and restore poetry |     - name: Setup python and restore poetry | ||||||
|       uses: actions/setup-python@v5 |       uses: actions/setup-python@v4 | ||||||
|       with: |       with: | ||||||
|         python-version-file: "pyproject.toml" |         python-version-file: "pyproject.toml" | ||||||
|         cache: "poetry" |         cache: "poetry" | ||||||
|     - name: Setup node |     - name: Setup node | ||||||
|       uses: actions/setup-node@v4 |       uses: actions/setup-node@v3 | ||||||
|       with: |       with: | ||||||
|         node-version-file: web/package.json |         node-version-file: web/package.json | ||||||
|         cache: "npm" |         cache: "npm" | ||||||
|         cache-dependency-path: web/package-lock.json |         cache-dependency-path: web/package-lock.json | ||||||
|     - name: Setup go |     - name: Setup go | ||||||
|       uses: actions/setup-go@v5 |       uses: actions/setup-go@v4 | ||||||
|       with: |       with: | ||||||
|         go-version-file: "go.mod" |         go-version-file: "go.mod" | ||||||
|     - name: Setup dependencies |     - name: Setup dependencies | ||||||
|       shell: bash |       shell: bash | ||||||
|       run: | |       run: | | ||||||
|         export PSQL_TAG=${{ inputs.postgresql_version }} |         export PSQL_TAG=${{ inputs.postgresql_version }} | ||||||
|         docker compose -f .github/actions/setup/docker-compose.yml up -d |         docker-compose -f .github/actions/setup/docker-compose.yml up -d | ||||||
|         poetry install --sync |         poetry install | ||||||
|         cd web && npm ci |         cd web && npm ci | ||||||
|     - name: Generate config |     - name: Generate config | ||||||
|       shell: poetry run python {0} |       shell: poetry run python {0} | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/docker-compose.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,3 +1,5 @@ | |||||||
|  | version: "3.7" | ||||||
|  |  | ||||||
| services: | services: | ||||||
|   postgresql: |   postgresql: | ||||||
|     image: docker.io/library/postgres:${PSQL_TAG:-16} |     image: docker.io/library/postgres:${PSQL_TAG:-16} | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/codespell-words.txt
									
									
									
									
										vendored
									
									
								
							| @ -4,4 +4,3 @@ hass | |||||||
| warmup | warmup | ||||||
| ontext | ontext | ||||||
| singed | singed | ||||||
| assertIn |  | ||||||
|  | |||||||
							
								
								
									
										57
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										57
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							| @ -21,9 +21,7 @@ updates: | |||||||
|     labels: |     labels: | ||||||
|       - dependencies |       - dependencies | ||||||
|   - package-ecosystem: npm |   - package-ecosystem: npm | ||||||
|     directories: |     directory: "/web" | ||||||
|       - "/web" |  | ||||||
|       - "/web/sfe" |  | ||||||
|     schedule: |     schedule: | ||||||
|       interval: daily |       interval: daily | ||||||
|       time: "04:00" |       time: "04:00" | ||||||
| @ -32,6 +30,7 @@ updates: | |||||||
|     open-pull-requests-limit: 10 |     open-pull-requests-limit: 10 | ||||||
|     commit-message: |     commit-message: | ||||||
|       prefix: "web:" |       prefix: "web:" | ||||||
|  |     # TODO: deduplicate these groups | ||||||
|     groups: |     groups: | ||||||
|       sentry: |       sentry: | ||||||
|         patterns: |         patterns: | ||||||
| @ -43,11 +42,9 @@ updates: | |||||||
|           - "babel-*" |           - "babel-*" | ||||||
|       eslint: |       eslint: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@eslint/*" |  | ||||||
|           - "@typescript-eslint/*" |           - "@typescript-eslint/*" | ||||||
|           - "eslint-*" |  | ||||||
|           - "eslint" |           - "eslint" | ||||||
|           - "typescript-eslint" |           - "eslint-*" | ||||||
|       storybook: |       storybook: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@storybook/*" |           - "@storybook/*" | ||||||
| @ -55,16 +52,38 @@ updates: | |||||||
|       esbuild: |       esbuild: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@esbuild/*" |           - "@esbuild/*" | ||||||
|           - "esbuild*" |   - package-ecosystem: npm | ||||||
|       rollup: |     directory: "/tests/wdio" | ||||||
|  |     schedule: | ||||||
|  |       interval: daily | ||||||
|  |       time: "04:00" | ||||||
|  |     labels: | ||||||
|  |       - dependencies | ||||||
|  |     open-pull-requests-limit: 10 | ||||||
|  |     commit-message: | ||||||
|  |       prefix: "web:" | ||||||
|  |     # TODO: deduplicate these groups | ||||||
|  |     groups: | ||||||
|  |       sentry: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@rollup/*" |           - "@sentry/*" | ||||||
|           - "rollup-*" |           - "@spotlightjs/*" | ||||||
|           - "rollup*" |       babel: | ||||||
|       swc: |  | ||||||
|         patterns: |         patterns: | ||||||
|           - "@swc/*" |           - "@babel/*" | ||||||
|           - "swc-*" |           - "babel-*" | ||||||
|  |       eslint: | ||||||
|  |         patterns: | ||||||
|  |           - "@typescript-eslint/*" | ||||||
|  |           - "eslint" | ||||||
|  |           - "eslint-*" | ||||||
|  |       storybook: | ||||||
|  |         patterns: | ||||||
|  |           - "@storybook/*" | ||||||
|  |           - "*storybook*" | ||||||
|  |       esbuild: | ||||||
|  |         patterns: | ||||||
|  |           - "@esbuild/*" | ||||||
|       wdio: |       wdio: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@wdio/*" |           - "@wdio/*" | ||||||
| @ -82,16 +101,6 @@ updates: | |||||||
|       docusaurus: |       docusaurus: | ||||||
|         patterns: |         patterns: | ||||||
|           - "@docusaurus/*" |           - "@docusaurus/*" | ||||||
|   - package-ecosystem: npm |  | ||||||
|     directory: "/lifecycle/aws" |  | ||||||
|     schedule: |  | ||||||
|       interval: daily |  | ||||||
|       time: "04:00" |  | ||||||
|     open-pull-requests-limit: 10 |  | ||||||
|     commit-message: |  | ||||||
|       prefix: "lifecycle/aws:" |  | ||||||
|     labels: |  | ||||||
|       - dependencies |  | ||||||
|   - package-ecosystem: pip |   - package-ecosystem: pip | ||||||
|     directory: "/" |     directory: "/" | ||||||
|     schedule: |     schedule: | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							| @ -1,7 +1,7 @@ | |||||||
| <!-- | <!-- | ||||||
| 👋 Hi there! Welcome. | 👋 Hi there! Welcome. | ||||||
|  |  | ||||||
| Please check the Contributing guidelines: https://docs.goauthentik.io/docs/developer-docs/#how-can-i-contribute | Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute | ||||||
| --> | --> | ||||||
|  |  | ||||||
| ## Details | ## Details | ||||||
|  | |||||||
| @ -1,96 +0,0 @@ | |||||||
| # Re-usable workflow for a single-architecture build |  | ||||||
| name: Single-arch Container build |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   workflow_call: |  | ||||||
|     inputs: |  | ||||||
|       image_name: |  | ||||||
|         required: true |  | ||||||
|         type: string |  | ||||||
|       image_arch: |  | ||||||
|         required: true |  | ||||||
|         type: string |  | ||||||
|       runs-on: |  | ||||||
|         required: true |  | ||||||
|         type: string |  | ||||||
|       registry_dockerhub: |  | ||||||
|         default: false |  | ||||||
|         type: boolean |  | ||||||
|       registry_ghcr: |  | ||||||
|         default: false |  | ||||||
|         type: boolean |  | ||||||
|       release: |  | ||||||
|         default: false |  | ||||||
|         type: boolean |  | ||||||
|     outputs: |  | ||||||
|       image-digest: |  | ||||||
|         value: ${{ jobs.build.outputs.image-digest }} |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   build: |  | ||||||
|     name: Build ${{ inputs.image_arch }} |  | ||||||
|     runs-on: ${{ inputs.runs-on }} |  | ||||||
|     outputs: |  | ||||||
|       image-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|     permissions: |  | ||||||
|       # Needed to upload container images to ghcr.io |  | ||||||
|       packages: write |  | ||||||
|       # Needed for attestation |  | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - uses: docker/setup-qemu-action@v3.4.0 |  | ||||||
|       - uses: docker/setup-buildx-action@v3 |  | ||||||
|       - name: prepare variables |  | ||||||
|         uses: ./.github/actions/docker-push-variables |  | ||||||
|         id: ev |  | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |  | ||||||
|           image-name: ${{ inputs.image_name }} |  | ||||||
|           image-arch: ${{ inputs.image_arch }} |  | ||||||
|           release: ${{ inputs.release }} |  | ||||||
|       - name: Login to Docker Hub |  | ||||||
|         if: ${{ inputs.registry_dockerhub }} |  | ||||||
|         uses: docker/login-action@v3 |  | ||||||
|         with: |  | ||||||
|           username: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|           password: ${{ secrets.DOCKER_PASSWORD }} |  | ||||||
|       - name: Login to GitHub Container Registry |  | ||||||
|         if: ${{ inputs.registry_ghcr }} |  | ||||||
|         uses: docker/login-action@v3 |  | ||||||
|         with: |  | ||||||
|           registry: ghcr.io |  | ||||||
|           username: ${{ github.repository_owner }} |  | ||||||
|           password: ${{ secrets.GITHUB_TOKEN }} |  | ||||||
|       - name: make empty clients |  | ||||||
|         if: ${{ inputs.release }} |  | ||||||
|         run: | |  | ||||||
|           mkdir -p ./gen-ts-api |  | ||||||
|           mkdir -p ./gen-go-api |  | ||||||
|       - name: generate ts client |  | ||||||
|         if: ${{ !inputs.release }} |  | ||||||
|         run: make gen-client-ts |  | ||||||
|       - name: Build Docker Image |  | ||||||
|         uses: docker/build-push-action@v6 |  | ||||||
|         id: push |  | ||||||
|         with: |  | ||||||
|           context: . |  | ||||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|           secrets: | |  | ||||||
|             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} |  | ||||||
|             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} |  | ||||||
|           build-args: | |  | ||||||
|             ${{ steps.ev.outputs.imageBuildArgs }} |  | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |  | ||||||
|           platforms: linux/${{ inputs.image_arch }} |  | ||||||
|           cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }} |  | ||||||
|           cache-to: ${{ steps.ev.outputs.cacheTo }} |  | ||||||
|       - uses: actions/attest-build-provenance@v2 |  | ||||||
|         id: attest |  | ||||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
							
								
								
									
										104
									
								
								.github/workflows/_reusable-docker-build.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										104
									
								
								.github/workflows/_reusable-docker-build.yaml
									
									
									
									
										vendored
									
									
								
							| @ -1,104 +0,0 @@ | |||||||
| # Re-usable workflow for a multi-architecture build |  | ||||||
| name: Multi-arch container build |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   workflow_call: |  | ||||||
|     inputs: |  | ||||||
|       image_name: |  | ||||||
|         required: true |  | ||||||
|         type: string |  | ||||||
|       registry_dockerhub: |  | ||||||
|         default: false |  | ||||||
|         type: boolean |  | ||||||
|       registry_ghcr: |  | ||||||
|         default: true |  | ||||||
|         type: boolean |  | ||||||
|       release: |  | ||||||
|         default: false |  | ||||||
|         type: boolean |  | ||||||
|     outputs: {} |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   build-server-amd64: |  | ||||||
|     uses: ./.github/workflows/_reusable-docker-build-single.yaml |  | ||||||
|     secrets: inherit |  | ||||||
|     with: |  | ||||||
|       image_name: ${{ inputs.image_name }} |  | ||||||
|       image_arch: amd64 |  | ||||||
|       runs-on: ubuntu-latest |  | ||||||
|       registry_dockerhub: ${{ inputs.registry_dockerhub }} |  | ||||||
|       registry_ghcr: ${{ inputs.registry_ghcr }} |  | ||||||
|       release: ${{ inputs.release }} |  | ||||||
|   build-server-arm64: |  | ||||||
|     uses: ./.github/workflows/_reusable-docker-build-single.yaml |  | ||||||
|     secrets: inherit |  | ||||||
|     with: |  | ||||||
|       image_name: ${{ inputs.image_name }} |  | ||||||
|       image_arch: arm64 |  | ||||||
|       runs-on: ubuntu-22.04-arm |  | ||||||
|       registry_dockerhub: ${{ inputs.registry_dockerhub }} |  | ||||||
|       registry_ghcr: ${{ inputs.registry_ghcr }} |  | ||||||
|       release: ${{ inputs.release }} |  | ||||||
|   get-tags: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     needs: |  | ||||||
|       - build-server-amd64 |  | ||||||
|       - build-server-arm64 |  | ||||||
|     outputs: |  | ||||||
|       tags: ${{ steps.ev.outputs.imageTagsJSON }} |  | ||||||
|       shouldPush: ${{ steps.ev.outputs.shouldPush }} |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - name: prepare variables |  | ||||||
|         uses: ./.github/actions/docker-push-variables |  | ||||||
|         id: ev |  | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |  | ||||||
|           image-name: ${{ inputs.image_name }} |  | ||||||
|   merge-server: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     if: ${{ needs.get-tags.outputs.shouldPush == 'true' }} |  | ||||||
|     needs: |  | ||||||
|       - get-tags |  | ||||||
|       - build-server-amd64 |  | ||||||
|       - build-server-arm64 |  | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|       matrix: |  | ||||||
|         tag: ${{ fromJson(needs.get-tags.outputs.tags) }} |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - name: prepare variables |  | ||||||
|         uses: ./.github/actions/docker-push-variables |  | ||||||
|         id: ev |  | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |  | ||||||
|           image-name: ${{ inputs.image_name }} |  | ||||||
|       - name: Login to Docker Hub |  | ||||||
|         if: ${{ inputs.registry_dockerhub }} |  | ||||||
|         uses: docker/login-action@v3 |  | ||||||
|         with: |  | ||||||
|           username: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|           password: ${{ secrets.DOCKER_PASSWORD }} |  | ||||||
|       - name: Login to GitHub Container Registry |  | ||||||
|         if: ${{ inputs.registry_ghcr }} |  | ||||||
|         uses: docker/login-action@v3 |  | ||||||
|         with: |  | ||||||
|           registry: ghcr.io |  | ||||||
|           username: ${{ github.repository_owner }} |  | ||||||
|           password: ${{ secrets.GITHUB_TOKEN }} |  | ||||||
|       - uses: int128/docker-manifest-create-action@v2 |  | ||||||
|         id: build |  | ||||||
|         with: |  | ||||||
|           tags: ${{ matrix.tag }} |  | ||||||
|           sources: | |  | ||||||
|             ${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }} |  | ||||||
|             ${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }} |  | ||||||
|       - uses: actions/attest-build-provenance@v2 |  | ||||||
|         id: attest |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.build.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
							
								
								
									
										66
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										66
									
								
								.github/workflows/api-py-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,66 +0,0 @@ | |||||||
| name: authentik-api-py-publish |  | ||||||
| on: |  | ||||||
|   push: |  | ||||||
|     branches: [main] |  | ||||||
|     paths: |  | ||||||
|       - "schema.yml" |  | ||||||
|   workflow_dispatch: |  | ||||||
| jobs: |  | ||||||
|   build: |  | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     permissions: |  | ||||||
|       id-token: write |  | ||||||
|     steps: |  | ||||||
|       - id: generate_token |  | ||||||
|         uses: tibdex/github-app-token@v2 |  | ||||||
|         with: |  | ||||||
|           app_id: ${{ secrets.GH_APP_ID }} |  | ||||||
|           private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|         with: |  | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |  | ||||||
|       - name: Install poetry & deps |  | ||||||
|         shell: bash |  | ||||||
|         run: | |  | ||||||
|           pipx install poetry || true |  | ||||||
|           sudo apt-get update |  | ||||||
|           sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext |  | ||||||
|       - name: Setup python and restore poetry |  | ||||||
|         uses: actions/setup-python@v5 |  | ||||||
|         with: |  | ||||||
|           python-version-file: "pyproject.toml" |  | ||||||
|           cache: "poetry" |  | ||||||
|       - name: Generate API Client |  | ||||||
|         run: make gen-client-py |  | ||||||
|       - name: Publish package |  | ||||||
|         working-directory: gen-py-api/ |  | ||||||
|         run: | |  | ||||||
|           poetry build |  | ||||||
|       - name: Publish package to PyPI |  | ||||||
|         uses: pypa/gh-action-pypi-publish@release/v1 |  | ||||||
|         with: |  | ||||||
|           packages-dir: gen-py-api/dist/ |  | ||||||
|       # We can't easily upgrade the API client being used due to poetry being poetry |  | ||||||
|       # so we'll have to rely on dependabot |  | ||||||
|       # - name: Upgrade / |  | ||||||
|       #   run: | |  | ||||||
|       #     export VERSION=$(cd gen-py-api && poetry version -s) |  | ||||||
|       #     poetry add "authentik_client=$VERSION" --allow-prereleases --lock |  | ||||||
|       # - uses: peter-evans/create-pull-request@v6 |  | ||||||
|       #   id: cpr |  | ||||||
|       #   with: |  | ||||||
|       #     token: ${{ steps.generate_token.outputs.token }} |  | ||||||
|       #     branch: update-root-api-client |  | ||||||
|       #     commit-message: "root: bump API Client version" |  | ||||||
|       #     title: "root: bump API Client version" |  | ||||||
|       #     body: "root: bump API Client version" |  | ||||||
|       #     delete-branch: true |  | ||||||
|       #     signoff: true |  | ||||||
|       #     # ID from https://api.github.com/users/authentik-automation[bot] |  | ||||||
|       #     author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |  | ||||||
|       # - uses: peter-evans/enable-pull-request-automerge@v3 |  | ||||||
|       #   with: |  | ||||||
|       #     token: ${{ steps.generate_token.outputs.token }} |  | ||||||
|       #     pull-request-number: ${{ steps.cpr.outputs.pull-request-number }} |  | ||||||
|       #     merge-method: squash |  | ||||||
							
								
								
									
										46
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										46
									
								
								.github/workflows/ci-aws-cfn.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,46 +0,0 @@ | |||||||
| name: authentik-ci-aws-cfn |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   push: |  | ||||||
|     branches: |  | ||||||
|       - main |  | ||||||
|       - next |  | ||||||
|       - version-* |  | ||||||
|   pull_request: |  | ||||||
|     branches: |  | ||||||
|       - main |  | ||||||
|       - version-* |  | ||||||
|  |  | ||||||
| env: |  | ||||||
|   POSTGRES_DB: authentik |  | ||||||
|   POSTGRES_USER: authentik |  | ||||||
|   POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77" |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   check-changes-applied: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - name: Setup authentik env |  | ||||||
|         uses: ./.github/actions/setup |  | ||||||
|       - uses: actions/setup-node@v4 |  | ||||||
|         with: |  | ||||||
|           node-version-file: lifecycle/aws/package.json |  | ||||||
|           cache: "npm" |  | ||||||
|           cache-dependency-path: lifecycle/aws/package-lock.json |  | ||||||
|       - working-directory: lifecycle/aws/ |  | ||||||
|         run: | |  | ||||||
|           npm ci |  | ||||||
|       - name: Check changes have been applied |  | ||||||
|         run: | |  | ||||||
|           poetry run make aws-cfn |  | ||||||
|           git diff --exit-code |  | ||||||
|   ci-aws-cfn-mark: |  | ||||||
|     if: always() |  | ||||||
|     needs: |  | ||||||
|       - check-changes-applied |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: re-actors/alls-green@release/v1 |  | ||||||
|         with: |  | ||||||
|           jobs: ${{ toJSON(needs) }} |  | ||||||
							
								
								
									
										28
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										28
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,28 +0,0 @@ | |||||||
| --- |  | ||||||
| name: authentik-ci-main-daily |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   workflow_dispatch: |  | ||||||
|   schedule: |  | ||||||
|     # Every night at 3am |  | ||||||
|     - cron: "0 3 * * *" |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   test-container: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|       matrix: |  | ||||||
|         version: |  | ||||||
|           - docs |  | ||||||
|           - version-2024-12 |  | ||||||
|           - version-2024-10 |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - run: | |  | ||||||
|           current="$(pwd)" |  | ||||||
|           dir="/tmp/authentik/${{ matrix.version }}" |  | ||||||
|           mkdir -p $dir |  | ||||||
|           cd $dir |  | ||||||
|           wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml |  | ||||||
|           ${current}/scripts/test_docker.sh |  | ||||||
							
								
								
									
										137
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										137
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,6 +7,8 @@ on: | |||||||
|       - main |       - main | ||||||
|       - next |       - next | ||||||
|       - version-* |       - version-* | ||||||
|  |     paths-ignore: | ||||||
|  |       - website/** | ||||||
|   pull_request: |   pull_request: | ||||||
|     branches: |     branches: | ||||||
|       - main |       - main | ||||||
| @ -26,7 +28,10 @@ jobs: | |||||||
|           - bandit |           - bandit | ||||||
|           - black |           - black | ||||||
|           - codespell |           - codespell | ||||||
|  |           - isort | ||||||
|           - pending-migrations |           - pending-migrations | ||||||
|  |           # - pylint | ||||||
|  |           - pyright | ||||||
|           - ruff |           - ruff | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
| @ -43,26 +48,16 @@ jobs: | |||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: run migrations |       - name: run migrations | ||||||
|         run: poetry run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|   test-make-seed: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - id: seed |  | ||||||
|         run: | |  | ||||||
|           echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT" |  | ||||||
|     outputs: |  | ||||||
|       seed: ${{ steps.seed.outputs.seed }} |  | ||||||
|   test-migrations-from-stable: |   test-migrations-from-stable: | ||||||
|     name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 |     name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     timeout-minutes: 20 |  | ||||||
|     needs: test-make-seed |  | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         psql: |         psql: | ||||||
|  |           - 12-alpine | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|         run_id: [1, 2, 3, 4, 5] |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|         with: |         with: | ||||||
| @ -104,23 +99,19 @@ jobs: | |||||||
|         env: |         env: | ||||||
|           # Test in the main database that we just migrated from the previous stable version |           # Test in the main database that we just migrated from the previous stable version | ||||||
|           AUTHENTIK_POSTGRESQL__TEST__NAME: authentik |           AUTHENTIK_POSTGRESQL__TEST__NAME: authentik | ||||||
|           CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }} |  | ||||||
|           CI_RUN_ID: ${{ matrix.run_id }} |  | ||||||
|           CI_TOTAL_RUNS: "5" |  | ||||||
|         run: | |         run: | | ||||||
|           poetry run make ci-test |           poetry run make test | ||||||
|   test-unittest: |   test-unittest: | ||||||
|     name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 |     name: test-unittest - PostgreSQL ${{ matrix.psql }} | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     timeout-minutes: 20 |     timeout-minutes: 30 | ||||||
|     needs: test-make-seed |  | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         psql: |         psql: | ||||||
|  |           - 12-alpine | ||||||
|           - 15-alpine |           - 15-alpine | ||||||
|           - 16-alpine |           - 16-alpine | ||||||
|         run_id: [1, 2, 3, 4, 5] |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
| @ -128,23 +119,14 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           postgresql_version: ${{ matrix.psql }} |           postgresql_version: ${{ matrix.psql }} | ||||||
|       - name: run unittest |       - name: run unittest | ||||||
|         env: |  | ||||||
|           CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }} |  | ||||||
|           CI_RUN_ID: ${{ matrix.run_id }} |  | ||||||
|           CI_TOTAL_RUNS: "5" |  | ||||||
|         run: | |         run: | | ||||||
|           poetry run make ci-test |           poetry run make test | ||||||
|  |           poetry run coverage xml | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v4 | ||||||
|         with: |         with: | ||||||
|           flags: unit |           flags: unit | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |           token: ${{ secrets.CODECOV_TOKEN }} | ||||||
|       - if: ${{ !cancelled() }} |  | ||||||
|         uses: codecov/test-results-action@v1 |  | ||||||
|         with: |  | ||||||
|           flags: unit |  | ||||||
|           file: unittest.xml |  | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |  | ||||||
|   test-integration: |   test-integration: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     timeout-minutes: 30 |     timeout-minutes: 30 | ||||||
| @ -153,22 +135,16 @@ jobs: | |||||||
|       - name: Setup authentik env |       - name: Setup authentik env | ||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: Create k8s Kind Cluster |       - name: Create k8s Kind Cluster | ||||||
|         uses: helm/kind-action@v1.12.0 |         uses: helm/kind-action@v1.9.0 | ||||||
|       - name: run integration |       - name: run integration | ||||||
|         run: | |         run: | | ||||||
|           poetry run coverage run manage.py test tests/integration |           poetry run coverage run manage.py test tests/integration | ||||||
|           poetry run coverage xml |           poetry run coverage xml | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v4 | ||||||
|         with: |         with: | ||||||
|           flags: integration |           flags: integration | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |           token: ${{ secrets.CODECOV_TOKEN }} | ||||||
|       - if: ${{ !cancelled() }} |  | ||||||
|         uses: codecov/test-results-action@v1 |  | ||||||
|         with: |  | ||||||
|           flags: integration |  | ||||||
|           file: unittest.xml |  | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |  | ||||||
|   test-e2e: |   test-e2e: | ||||||
|     name: test-e2e (${{ matrix.job.name }}) |     name: test-e2e (${{ matrix.job.name }}) | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
| @ -189,8 +165,6 @@ jobs: | |||||||
|             glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap* |             glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap* | ||||||
|           - name: radius |           - name: radius | ||||||
|             glob: tests/e2e/test_provider_radius* |             glob: tests/e2e/test_provider_radius* | ||||||
|           - name: scim |  | ||||||
|             glob: tests/e2e/test_source_scim* |  | ||||||
|           - name: flows |           - name: flows | ||||||
|             glob: tests/e2e/test_flows* |             glob: tests/e2e/test_flows* | ||||||
|     steps: |     steps: | ||||||
| @ -199,7 +173,7 @@ jobs: | |||||||
|         uses: ./.github/actions/setup |         uses: ./.github/actions/setup | ||||||
|       - name: Setup e2e env (chrome, etc) |       - name: Setup e2e env (chrome, etc) | ||||||
|         run: | |         run: | | ||||||
|           docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull |           docker-compose -f tests/e2e/docker-compose.yml up -d | ||||||
|       - id: cache-web |       - id: cache-web | ||||||
|         uses: actions/cache@v4 |         uses: actions/cache@v4 | ||||||
|         with: |         with: | ||||||
| @ -217,18 +191,11 @@ jobs: | |||||||
|           poetry run coverage run manage.py test ${{ matrix.job.glob }} |           poetry run coverage run manage.py test ${{ matrix.job.glob }} | ||||||
|           poetry run coverage xml |           poetry run coverage xml | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v5 |         uses: codecov/codecov-action@v4 | ||||||
|         with: |         with: | ||||||
|           flags: e2e |           flags: e2e | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |           token: ${{ secrets.CODECOV_TOKEN }} | ||||||
|       - if: ${{ !cancelled() }} |  | ||||||
|         uses: codecov/test-results-action@v1 |  | ||||||
|         with: |  | ||||||
|           flags: e2e |  | ||||||
|           file: unittest.xml |  | ||||||
|           token: ${{ secrets.CODECOV_TOKEN }} |  | ||||||
|   ci-core-mark: |   ci-core-mark: | ||||||
|     if: always() |  | ||||||
|     needs: |     needs: | ||||||
|       - lint |       - lint | ||||||
|       - test-migrations |       - test-migrations | ||||||
| @ -238,22 +205,57 @@ jobs: | |||||||
|       - test-e2e |       - test-e2e | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: re-actors/alls-green@release/v1 |       - run: echo mark | ||||||
|         with: |  | ||||||
|           jobs: ${{ toJSON(needs) }} |  | ||||||
|   build: |   build: | ||||||
|     permissions: |     strategy: | ||||||
|       # Needed to upload container images to ghcr.io |       fail-fast: false | ||||||
|       packages: write |       matrix: | ||||||
|       # Needed for attestation |         arch: | ||||||
|       id-token: write |           - amd64 | ||||||
|       attestations: write |           - arm64 | ||||||
|     needs: ci-core-mark |     needs: ci-core-mark | ||||||
|     uses: ./.github/workflows/_reusable-docker-build.yaml |     runs-on: ubuntu-latest | ||||||
|     secrets: inherit |     permissions: | ||||||
|     with: |       # Needed to upload contianer images to ghcr.io | ||||||
|       image_name: ghcr.io/goauthentik/dev-server |       packages: write | ||||||
|       release: false |     timeout-minutes: 120 | ||||||
|  |     if: "github.repository == 'goauthentik/authentik'" | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v4 | ||||||
|  |         with: | ||||||
|  |           ref: ${{ github.event.pull_request.head.sha }} | ||||||
|  |       - name: Set up QEMU | ||||||
|  |         uses: docker/setup-qemu-action@v3.0.0 | ||||||
|  |       - name: Set up Docker Buildx | ||||||
|  |         uses: docker/setup-buildx-action@v3 | ||||||
|  |       - name: prepare variables | ||||||
|  |         uses: ./.github/actions/docker-push-variables | ||||||
|  |         id: ev | ||||||
|  |         with: | ||||||
|  |           image-name: ghcr.io/goauthentik/dev-server | ||||||
|  |           image-arch: ${{ matrix.arch }} | ||||||
|  |       - name: Login to Container Registry | ||||||
|  |         uses: docker/login-action@v3 | ||||||
|  |         with: | ||||||
|  |           registry: ghcr.io | ||||||
|  |           username: ${{ github.repository_owner }} | ||||||
|  |           password: ${{ secrets.GITHUB_TOKEN }} | ||||||
|  |       - name: generate ts client | ||||||
|  |         run: make gen-client-ts | ||||||
|  |       - name: Build Docker Image | ||||||
|  |         uses: docker/build-push-action@v5 | ||||||
|  |         with: | ||||||
|  |           context: . | ||||||
|  |           secrets: | | ||||||
|  |             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||||
|  |             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||||
|  |           tags: ${{ steps.ev.outputs.imageTags }} | ||||||
|  |           push: true | ||||||
|  |           build-args: | | ||||||
|  |             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||||
|  |           cache-from: type=gha | ||||||
|  |           cache-to: type=gha,mode=max | ||||||
|  |           platforms: linux/${{ matrix.arch }} | ||||||
|   pr-comment: |   pr-comment: | ||||||
|     needs: |     needs: | ||||||
|       - build |       - build | ||||||
| @ -270,12 +272,9 @@ jobs: | |||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|         uses: ./.github/actions/docker-push-variables |         uses: ./.github/actions/docker-push-variables | ||||||
|         id: ev |         id: ev | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |         with: | ||||||
|           image-name: ghcr.io/goauthentik/dev-server |           image-name: ghcr.io/goauthentik/dev-server | ||||||
|       - name: Comment on PR |       - name: Comment on PR | ||||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|         uses: ./.github/actions/comment-pr-instructions |         uses: ./.github/actions/comment-pr-instructions | ||||||
|         with: |         with: | ||||||
|           tag: ${{ steps.ev.outputs.imageMainTag }} |           tag: gh-${{ steps.ev.outputs.imageMainTag }} | ||||||
|  | |||||||
							
								
								
									
										36
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										36
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -29,9 +29,9 @@ jobs: | |||||||
|       - name: Generate API |       - name: Generate API | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: golangci-lint |       - name: golangci-lint | ||||||
|         uses: golangci/golangci-lint-action@v6 |         uses: golangci/golangci-lint-action@v4 | ||||||
|         with: |         with: | ||||||
|           version: latest |           version: v1.54.2 | ||||||
|           args: --timeout 5000s --verbose |           args: --timeout 5000s --verbose | ||||||
|           skip-cache: true |           skip-cache: true | ||||||
|   test-unittest: |   test-unittest: | ||||||
| @ -49,15 +49,12 @@ jobs: | |||||||
|         run: | |         run: | | ||||||
|           go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./... |           go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./... | ||||||
|   ci-outpost-mark: |   ci-outpost-mark: | ||||||
|     if: always() |  | ||||||
|     needs: |     needs: | ||||||
|       - lint-golint |       - lint-golint | ||||||
|       - test-unittest |       - test-unittest | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: re-actors/alls-green@release/v1 |       - run: echo mark | ||||||
|         with: |  | ||||||
|           jobs: ${{ toJSON(needs) }} |  | ||||||
|   build-container: |   build-container: | ||||||
|     timeout-minutes: 120 |     timeout-minutes: 120 | ||||||
|     needs: |     needs: | ||||||
| @ -72,28 +69,23 @@ jobs: | |||||||
|           - rac |           - rac | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     permissions: |     permissions: | ||||||
|       # Needed to upload container images to ghcr.io |       # Needed to upload contianer images to ghcr.io | ||||||
|       packages: write |       packages: write | ||||||
|       # Needed for attestation |     if: "github.repository == 'goauthentik/authentik'" | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|         with: |         with: | ||||||
|           ref: ${{ github.event.pull_request.head.sha }} |           ref: ${{ github.event.pull_request.head.sha }} | ||||||
|       - name: Set up QEMU |       - name: Set up QEMU | ||||||
|         uses: docker/setup-qemu-action@v3.4.0 |         uses: docker/setup-qemu-action@v3.0.0 | ||||||
|       - name: Set up Docker Buildx |       - name: Set up Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@v3 |         uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|         uses: ./.github/actions/docker-push-variables |         uses: ./.github/actions/docker-push-variables | ||||||
|         id: ev |         id: ev | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |         with: | ||||||
|           image-name: ghcr.io/goauthentik/dev-${{ matrix.type }} |           image-name: ghcr.io/goauthentik/dev-${{ matrix.type }} | ||||||
|       - name: Login to Container Registry |       - name: Login to Container Registry | ||||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|         uses: docker/login-action@v3 |         uses: docker/login-action@v3 | ||||||
|         with: |         with: | ||||||
|           registry: ghcr.io |           registry: ghcr.io | ||||||
| @ -102,25 +94,17 @@ jobs: | |||||||
|       - name: Generate API |       - name: Generate API | ||||||
|         run: make gen-client-go |         run: make gen-client-go | ||||||
|       - name: Build Docker Image |       - name: Build Docker Image | ||||||
|         id: push |         uses: docker/build-push-action@v5 | ||||||
|         uses: docker/build-push-action@v6 |  | ||||||
|         with: |         with: | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |           tags: ${{ steps.ev.outputs.imageTags }} | ||||||
|           file: ${{ matrix.type }}.Dockerfile |           file: ${{ matrix.type }}.Dockerfile | ||||||
|           push: ${{ steps.ev.outputs.shouldPush == 'true' }} |           push: true | ||||||
|           build-args: | |           build-args: | | ||||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} |             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||||
|           platforms: linux/amd64,linux/arm64 |           platforms: linux/amd64,linux/arm64 | ||||||
|           context: . |           context: . | ||||||
|           cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache |           cache-from: type=gha | ||||||
|           cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }} |           cache-to: type=gha,mode=max | ||||||
|       - uses: actions/attest-build-provenance@v2 |  | ||||||
|         id: attest |  | ||||||
|         if: ${{ steps.ev.outputs.shouldPush == 'true' }} |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   build-binary: |   build-binary: | ||||||
|     timeout-minutes: 120 |     timeout-minutes: 120 | ||||||
|     needs: |     needs: | ||||||
|  | |||||||
							
								
								
									
										118
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										118
									
								
								.github/workflows/ci-web.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,23 +12,14 @@ on: | |||||||
|       - version-* |       - version-* | ||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   lint: |   lint-eslint: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         command: |  | ||||||
|           - lint |  | ||||||
|           - lint:lockfile |  | ||||||
|           - tsc |  | ||||||
|           - prettier-check |  | ||||||
|         project: |         project: | ||||||
|           - web |           - web | ||||||
|         include: |           - tests/wdio | ||||||
|           - command: tsc |  | ||||||
|             project: web |  | ||||||
|           - command: lit-analyse |  | ||||||
|             project: web |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
| @ -37,14 +28,83 @@ jobs: | |||||||
|           cache: "npm" |           cache: "npm" | ||||||
|           cache-dependency-path: ${{ matrix.project }}/package-lock.json |           cache-dependency-path: ${{ matrix.project }}/package-lock.json | ||||||
|       - working-directory: ${{ matrix.project }}/ |       - working-directory: ${{ matrix.project }}/ | ||||||
|         run: | |         run: npm ci | ||||||
|           npm ci |  | ||||||
|       - name: Generate API |       - name: Generate API | ||||||
|         run: make gen-client-ts |         run: make gen-client-ts | ||||||
|       - name: Lint |       - name: Eslint | ||||||
|         working-directory: ${{ matrix.project }}/ |         working-directory: ${{ matrix.project }}/ | ||||||
|         run: npm run ${{ matrix.command }} |         run: npm run lint | ||||||
|  |   lint-build: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v4 | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: web/package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: web/package-lock.json | ||||||
|  |       - working-directory: web/ | ||||||
|  |         run: npm ci | ||||||
|  |       - name: Generate API | ||||||
|  |         run: make gen-client-ts | ||||||
|  |       - name: TSC | ||||||
|  |         working-directory: web/ | ||||||
|  |         run: npm run tsc | ||||||
|  |   lint-prettier: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     strategy: | ||||||
|  |       fail-fast: false | ||||||
|  |       matrix: | ||||||
|  |         project: | ||||||
|  |           - web | ||||||
|  |           - tests/wdio | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v4 | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: ${{ matrix.project }}/package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: ${{ matrix.project }}/package-lock.json | ||||||
|  |       - working-directory: ${{ matrix.project }}/ | ||||||
|  |         run: npm ci | ||||||
|  |       - name: Generate API | ||||||
|  |         run: make gen-client-ts | ||||||
|  |       - name: prettier | ||||||
|  |         working-directory: ${{ matrix.project }}/ | ||||||
|  |         run: npm run prettier-check | ||||||
|  |   lint-lit-analyse: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v4 | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: web/package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: web/package-lock.json | ||||||
|  |       - working-directory: web/ | ||||||
|  |         run: | | ||||||
|  |           npm ci | ||||||
|  |           # lit-analyse doesn't understand path rewrites, so make it | ||||||
|  |           # belive it's an actual module | ||||||
|  |           cd node_modules/@goauthentik | ||||||
|  |           ln -s ../../src/ web | ||||||
|  |       - name: Generate API | ||||||
|  |         run: make gen-client-ts | ||||||
|  |       - name: lit-analyse | ||||||
|  |         working-directory: web/ | ||||||
|  |         run: npm run lit-analyse | ||||||
|  |   ci-web-mark: | ||||||
|  |     needs: | ||||||
|  |       - lint-eslint | ||||||
|  |       - lint-prettier | ||||||
|  |       - lint-lit-analyse | ||||||
|  |       - lint-build | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - run: echo mark | ||||||
|   build: |   build: | ||||||
|  |     needs: | ||||||
|  |       - ci-web-mark | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
| @ -60,31 +120,3 @@ jobs: | |||||||
|       - name: build |       - name: build | ||||||
|         working-directory: web/ |         working-directory: web/ | ||||||
|         run: npm run build |         run: npm run build | ||||||
|   ci-web-mark: |  | ||||||
|     if: always() |  | ||||||
|     needs: |  | ||||||
|       - build |  | ||||||
|       - lint |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: re-actors/alls-green@release/v1 |  | ||||||
|         with: |  | ||||||
|           jobs: ${{ toJSON(needs) }} |  | ||||||
|   test: |  | ||||||
|     needs: |  | ||||||
|       - ci-web-mark |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - uses: actions/setup-node@v4 |  | ||||||
|         with: |  | ||||||
|           node-version-file: web/package.json |  | ||||||
|           cache: "npm" |  | ||||||
|           cache-dependency-path: web/package-lock.json |  | ||||||
|       - working-directory: web/ |  | ||||||
|         run: npm ci |  | ||||||
|       - name: Generate API |  | ||||||
|         run: make gen-client-ts |  | ||||||
|       - name: test |  | ||||||
|         working-directory: web/ |  | ||||||
|         run: npm run test || exit 0 |  | ||||||
|  | |||||||
							
								
								
									
										25
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										25
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,21 +12,20 @@ on: | |||||||
|       - version-* |       - version-* | ||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   lint: |   lint-prettier: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|       matrix: |  | ||||||
|         command: |  | ||||||
|           - lint:lockfile |  | ||||||
|           - prettier-check |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|  |       - uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version-file: website/package.json | ||||||
|  |           cache: "npm" | ||||||
|  |           cache-dependency-path: website/package-lock.json | ||||||
|       - working-directory: website/ |       - working-directory: website/ | ||||||
|         run: npm ci |         run: npm ci | ||||||
|       - name: Lint |       - name: prettier | ||||||
|         working-directory: website/ |         working-directory: website/ | ||||||
|         run: npm run ${{ matrix.command }} |         run: npm run prettier-check | ||||||
|   test: |   test: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
| @ -49,6 +48,7 @@ jobs: | |||||||
|       matrix: |       matrix: | ||||||
|         job: |         job: | ||||||
|           - build |           - build | ||||||
|  |           - build-docs-only | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - uses: actions/setup-node@v4 |       - uses: actions/setup-node@v4 | ||||||
| @ -62,13 +62,10 @@ jobs: | |||||||
|         working-directory: website/ |         working-directory: website/ | ||||||
|         run: npm run ${{ matrix.job }} |         run: npm run ${{ matrix.job }} | ||||||
|   ci-website-mark: |   ci-website-mark: | ||||||
|     if: always() |  | ||||||
|     needs: |     needs: | ||||||
|       - lint |       - lint-prettier | ||||||
|       - test |       - test | ||||||
|       - build |       - build | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: re-actors/alls-green@release/v1 |       - run: echo mark | ||||||
|         with: |  | ||||||
|           jobs: ${{ toJSON(needs) }} |  | ||||||
|  | |||||||
							
								
								
									
										44
									
								
								.github/workflows/gen-update-webauthn-mds.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										44
									
								
								.github/workflows/gen-update-webauthn-mds.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,44 +0,0 @@ | |||||||
| name: authentik-gen-update-webauthn-mds |  | ||||||
| on: |  | ||||||
|   workflow_dispatch: |  | ||||||
|   schedule: |  | ||||||
|     - cron: '30 1 1,15 * *' |  | ||||||
|  |  | ||||||
| env: |  | ||||||
|   POSTGRES_DB: authentik |  | ||||||
|   POSTGRES_USER: authentik |  | ||||||
|   POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77" |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   build: |  | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - id: generate_token |  | ||||||
|         uses: tibdex/github-app-token@v2 |  | ||||||
|         with: |  | ||||||
|           app_id: ${{ secrets.GH_APP_ID }} |  | ||||||
|           private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|         with: |  | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |  | ||||||
|       - name: Setup authentik env |  | ||||||
|         uses: ./.github/actions/setup |  | ||||||
|       - run: poetry run ak update_webauthn_mds |  | ||||||
|       - uses: peter-evans/create-pull-request@v7 |  | ||||||
|         id: cpr |  | ||||||
|         with: |  | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |  | ||||||
|           branch: update-fido-mds-client |  | ||||||
|           commit-message: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs" |  | ||||||
|           title: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs" |  | ||||||
|           body: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs" |  | ||||||
|           delete-branch: true |  | ||||||
|           signoff: true |  | ||||||
|           # ID from https://api.github.com/users/authentik-automation[bot] |  | ||||||
|           author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> |  | ||||||
|       - uses: peter-evans/enable-pull-request-automerge@v3 |  | ||||||
|         with: |  | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |  | ||||||
|           pull-request-number: ${{ steps.cpr.outputs.pull-request-number }} |  | ||||||
|           merge-method: squash |  | ||||||
							
								
								
									
										1
									
								
								.github/workflows/ghcr-retention.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/ghcr-retention.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,7 +7,6 @@ on: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   clean-ghcr: |   clean-ghcr: | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     name: Delete old unused container images |     name: Delete old unused container images | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|  | |||||||
							
								
								
									
										2
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							| @ -42,7 +42,7 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           githubToken: ${{ steps.generate_token.outputs.token }} |           githubToken: ${{ steps.generate_token.outputs.token }} | ||||||
|           compressOnly: ${{ github.event_name != 'pull_request' }} |           compressOnly: ${{ github.event_name != 'pull_request' }} | ||||||
|       - uses: peter-evans/create-pull-request@v7 |       - uses: peter-evans/create-pull-request@v6 | ||||||
|         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" |         if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" | ||||||
|         id: cpr |         id: cpr | ||||||
|         with: |         with: | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/publish-source-docs.yml
									
									
									
									
										vendored
									
									
								
							| @ -12,7 +12,6 @@ env: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   publish-source-docs: |   publish-source-docs: | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     timeout-minutes: 120 |     timeout-minutes: 120 | ||||||
|     steps: |     steps: | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/workflows/release-next-branch.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/release-next-branch.yml
									
									
									
									
										vendored
									
									
								
							| @ -11,7 +11,6 @@ permissions: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   update-next: |   update-next: | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     environment: internal-production |     environment: internal-production | ||||||
|     steps: |     steps: | ||||||
|  | |||||||
							
								
								
									
										108
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										108
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,27 +7,51 @@ on: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   build-server: |   build-server: | ||||||
|     uses: ./.github/workflows/_reusable-docker-build.yaml |     runs-on: ubuntu-latest | ||||||
|     secrets: inherit |  | ||||||
|     permissions: |     permissions: | ||||||
|       # Needed to upload container images to ghcr.io |       # Needed to upload contianer images to ghcr.io | ||||||
|       packages: write |       packages: write | ||||||
|       # Needed for attestation |     steps: | ||||||
|       id-token: write |       - uses: actions/checkout@v4 | ||||||
|       attestations: write |       - name: Set up QEMU | ||||||
|     with: |         uses: docker/setup-qemu-action@v3.0.0 | ||||||
|       image_name: ghcr.io/goauthentik/server,beryju/authentik |       - name: Set up Docker Buildx | ||||||
|       release: true |         uses: docker/setup-buildx-action@v3 | ||||||
|       registry_dockerhub: true |       - name: prepare variables | ||||||
|       registry_ghcr: true |         uses: ./.github/actions/docker-push-variables | ||||||
|  |         id: ev | ||||||
|  |         with: | ||||||
|  |           image-name: ghcr.io/goauthentik/server,beryju/authentik | ||||||
|  |       - name: Docker Login Registry | ||||||
|  |         uses: docker/login-action@v3 | ||||||
|  |         with: | ||||||
|  |           username: ${{ secrets.DOCKER_USERNAME }} | ||||||
|  |           password: ${{ secrets.DOCKER_PASSWORD }} | ||||||
|  |       - name: Login to GitHub Container Registry | ||||||
|  |         uses: docker/login-action@v3 | ||||||
|  |         with: | ||||||
|  |           registry: ghcr.io | ||||||
|  |           username: ${{ github.repository_owner }} | ||||||
|  |           password: ${{ secrets.GITHUB_TOKEN }} | ||||||
|  |       - name: make empty clients | ||||||
|  |         run: | | ||||||
|  |           mkdir -p ./gen-ts-api | ||||||
|  |           mkdir -p ./gen-go-api | ||||||
|  |       - name: Build Docker Image | ||||||
|  |         uses: docker/build-push-action@v5 | ||||||
|  |         with: | ||||||
|  |           context: . | ||||||
|  |           push: true | ||||||
|  |           secrets: | | ||||||
|  |             GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }} | ||||||
|  |             GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }} | ||||||
|  |           tags: ${{ steps.ev.outputs.imageTags }} | ||||||
|  |           platforms: linux/amd64,linux/arm64 | ||||||
|   build-outpost: |   build-outpost: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     permissions: |     permissions: | ||||||
|       # Needed to upload container images to ghcr.io |       # Needed to upload contianer images to ghcr.io | ||||||
|       packages: write |       packages: write | ||||||
|       # Needed for attestation |  | ||||||
|       id-token: write |  | ||||||
|       attestations: write |  | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
| @ -42,14 +66,12 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           go-version-file: "go.mod" |           go-version-file: "go.mod" | ||||||
|       - name: Set up QEMU |       - name: Set up QEMU | ||||||
|         uses: docker/setup-qemu-action@v3.4.0 |         uses: docker/setup-qemu-action@v3.0.0 | ||||||
|       - name: Set up Docker Buildx |       - name: Set up Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@v3 |         uses: docker/setup-buildx-action@v3 | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|         uses: ./.github/actions/docker-push-variables |         uses: ./.github/actions/docker-push-variables | ||||||
|         id: ev |         id: ev | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |         with: | ||||||
|           image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }} |           image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }} | ||||||
|       - name: make empty clients |       - name: make empty clients | ||||||
| @ -68,22 +90,13 @@ jobs: | |||||||
|           username: ${{ github.repository_owner }} |           username: ${{ github.repository_owner }} | ||||||
|           password: ${{ secrets.GITHUB_TOKEN }} |           password: ${{ secrets.GITHUB_TOKEN }} | ||||||
|       - name: Build Docker Image |       - name: Build Docker Image | ||||||
|         uses: docker/build-push-action@v6 |         uses: docker/build-push-action@v5 | ||||||
|         id: push |  | ||||||
|         with: |         with: | ||||||
|           push: true |           push: true | ||||||
|           build-args: | |  | ||||||
|             VERSION=${{ github.ref }} |  | ||||||
|           tags: ${{ steps.ev.outputs.imageTags }} |           tags: ${{ steps.ev.outputs.imageTags }} | ||||||
|           file: ${{ matrix.type }}.Dockerfile |           file: ${{ matrix.type }}.Dockerfile | ||||||
|           platforms: linux/amd64,linux/arm64 |           platforms: linux/amd64,linux/arm64 | ||||||
|           context: . |           context: . | ||||||
|       - uses: actions/attest-build-provenance@v2 |  | ||||||
|         id: attest |  | ||||||
|         with: |  | ||||||
|           subject-name: ${{ steps.ev.outputs.attestImageNames }} |  | ||||||
|           subject-digest: ${{ steps.push.outputs.digest }} |  | ||||||
|           push-to-registry: true |  | ||||||
|   build-outpost-binary: |   build-outpost-binary: | ||||||
|     timeout-minutes: 120 |     timeout-minutes: 120 | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
| @ -128,27 +141,6 @@ jobs: | |||||||
|           file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} |           file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||||
|           asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} |           asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||||
|           tag: ${{ github.ref }} |           tag: ${{ github.ref }} | ||||||
|   upload-aws-cfn-template: |  | ||||||
|     permissions: |  | ||||||
|       # Needed for AWS login |  | ||||||
|       id-token: write |  | ||||||
|       contents: read |  | ||||||
|     needs: |  | ||||||
|       - build-server |  | ||||||
|       - build-outpost |  | ||||||
|     env: |  | ||||||
|       AWS_REGION: eu-central-1 |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|       - uses: aws-actions/configure-aws-credentials@v4 |  | ||||||
|         with: |  | ||||||
|           role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik" |  | ||||||
|           aws-region: ${{ env.AWS_REGION }} |  | ||||||
|       - name: Upload template |  | ||||||
|         run: | |  | ||||||
|           aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml |  | ||||||
|           aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml |  | ||||||
|   test-release: |   test-release: | ||||||
|     needs: |     needs: | ||||||
|       - build-server |       - build-server | ||||||
| @ -159,12 +151,12 @@ jobs: | |||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - name: Run test suite in final docker images |       - name: Run test suite in final docker images | ||||||
|         run: | |         run: | | ||||||
|           echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env |           echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||||
|           echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env |           echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||||
|           docker compose pull -q |           docker-compose pull -q | ||||||
|           docker compose up --no-start |           docker-compose up --no-start | ||||||
|           docker compose start postgresql redis |           docker-compose start postgresql redis | ||||||
|           docker compose run -u root server test-all |           docker-compose run -u root server test-all | ||||||
|   sentry-release: |   sentry-release: | ||||||
|     needs: |     needs: | ||||||
|       - build-server |       - build-server | ||||||
| @ -176,14 +168,12 @@ jobs: | |||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|         uses: ./.github/actions/docker-push-variables |         uses: ./.github/actions/docker-push-variables | ||||||
|         id: ev |         id: ev | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |         with: | ||||||
|           image-name: ghcr.io/goauthentik/server |           image-name: ghcr.io/goauthentik/server | ||||||
|       - name: Get static files from docker image |       - name: Get static files from docker image | ||||||
|         run: | |         run: | | ||||||
|           docker pull ${{ steps.ev.outputs.imageMainName }} |           docker pull ${{ steps.ev.outputs.imageMainTag }} | ||||||
|           container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) |           container=$(docker container create ${{ steps.ev.outputs.imageMainTag }}) | ||||||
|           docker cp ${container}:web/ . |           docker cp ${container}:web/ . | ||||||
|       - name: Create a Sentry.io release |       - name: Create a Sentry.io release | ||||||
|         uses: getsentry/action-release@v1 |         uses: getsentry/action-release@v1 | ||||||
|  | |||||||
							
								
								
									
										13
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										13
									
								
								.github/workflows/release-tag.yml
									
									
									
									
										vendored
									
									
								
							| @ -14,7 +14,16 @@ jobs: | |||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|       - name: Pre-release test |       - name: Pre-release test | ||||||
|         run: | |         run: | | ||||||
|           make test-docker |           echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||||
|  |           echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||||
|  |           docker buildx install | ||||||
|  |           mkdir -p ./gen-ts-api | ||||||
|  |           docker build -t testing:latest . | ||||||
|  |           echo "AUTHENTIK_IMAGE=testing" >> .env | ||||||
|  |           echo "AUTHENTIK_TAG=latest" >> .env | ||||||
|  |           docker-compose up --no-start | ||||||
|  |           docker-compose start postgresql redis | ||||||
|  |           docker-compose run -u root server test-all | ||||||
|       - id: generate_token |       - id: generate_token | ||||||
|         uses: tibdex/github-app-token@v2 |         uses: tibdex/github-app-token@v2 | ||||||
|         with: |         with: | ||||||
| @ -23,8 +32,6 @@ jobs: | |||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|         uses: ./.github/actions/docker-push-variables |         uses: ./.github/actions/docker-push-variables | ||||||
|         id: ev |         id: ev | ||||||
|         env: |  | ||||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} |  | ||||||
|         with: |         with: | ||||||
|           image-name: ghcr.io/goauthentik/server |           image-name: ghcr.io/goauthentik/server | ||||||
|       - name: Create Release |       - name: Create Release | ||||||
|  | |||||||
							
								
								
									
										21
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										21
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,21 +0,0 @@ | |||||||
| name: "authentik-repo-mirror" |  | ||||||
|  |  | ||||||
| on: [push, delete] |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   to_internal: |  | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v4 |  | ||||||
|         with: |  | ||||||
|           fetch-depth: 0 |  | ||||||
|       - if: ${{ env.MIRROR_KEY != '' }} |  | ||||||
|         uses: pixta-dev/repository-mirroring-action@v1 |  | ||||||
|         with: |  | ||||||
|           target_repo_url: |  | ||||||
|             git@github.com:goauthentik/authentik-internal.git |  | ||||||
|           ssh_private_key: |  | ||||||
|             ${{ secrets.GH_MIRROR_KEY }} |  | ||||||
|         env: |  | ||||||
|           MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }} |  | ||||||
							
								
								
									
										9
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								.github/workflows/repo-stale.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,8 +1,8 @@ | |||||||
| name: "authentik-repo-stale" | name: 'authentik-repo-stale' | ||||||
|  |  | ||||||
| on: | on: | ||||||
|   schedule: |   schedule: | ||||||
|     - cron: "30 1 * * *" |     - cron: '30 1 * * *' | ||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
|  |  | ||||||
| permissions: | permissions: | ||||||
| @ -11,7 +11,6 @@ permissions: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   stale: |   stale: | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - id: generate_token |       - id: generate_token | ||||||
| @ -24,8 +23,8 @@ jobs: | |||||||
|           repo-token: ${{ steps.generate_token.outputs.token }} |           repo-token: ${{ steps.generate_token.outputs.token }} | ||||||
|           days-before-stale: 60 |           days-before-stale: 60 | ||||||
|           days-before-close: 7 |           days-before-close: 7 | ||||||
|           exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing |           exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question | ||||||
|           stale-issue-label: status/stale |           stale-issue-label: wontfix | ||||||
|           stale-issue-message: > |           stale-issue-message: > | ||||||
|             This issue has been automatically marked as stale because it has not had |             This issue has been automatically marked as stale because it has not had | ||||||
|             recent activity. It will be closed if no further activity occurs. Thank you |             recent activity. It will be closed if no further activity occurs. Thank you | ||||||
|  | |||||||
| @ -32,7 +32,7 @@ jobs: | |||||||
|           poetry run ak compilemessages |           poetry run ak compilemessages | ||||||
|           make web-check-compile |           make web-check-compile | ||||||
|       - name: Create Pull Request |       - name: Create Pull Request | ||||||
|         uses: peter-evans/create-pull-request@v7 |         uses: peter-evans/create-pull-request@v6 | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
|           branch: extract-compile-backend-translation |           branch: extract-compile-backend-translation | ||||||
|  | |||||||
| @ -1,4 +1,4 @@ | |||||||
| name: authentik-api-ts-publish | name: authentik-web-api-publish | ||||||
| on: | on: | ||||||
|   push: |   push: | ||||||
|     branches: [main] |     branches: [main] | ||||||
| @ -7,7 +7,6 @@ on: | |||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
| jobs: | jobs: | ||||||
|   build: |   build: | ||||||
|     if: ${{ github.repository != 'goauthentik/authentik-internal' }} |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - id: generate_token |       - id: generate_token | ||||||
| @ -32,16 +31,11 @@ jobs: | |||||||
|         env: |         env: | ||||||
|           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} |           NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} | ||||||
|       - name: Upgrade /web |       - name: Upgrade /web | ||||||
|         working-directory: web |         working-directory: web/ | ||||||
|         run: | |         run: | | ||||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` |           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` | ||||||
|           npm i @goauthentik/api@$VERSION |           npm i @goauthentik/api@$VERSION | ||||||
|       - name: Upgrade /web/packages/sfe |       - uses: peter-evans/create-pull-request@v6 | ||||||
|         working-directory: web/packages/sfe |  | ||||||
|         run: | |  | ||||||
|           export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` |  | ||||||
|           npm i @goauthentik/api@$VERSION |  | ||||||
|       - uses: peter-evans/create-pull-request@v7 |  | ||||||
|         id: cpr |         id: cpr | ||||||
|         with: |         with: | ||||||
|           token: ${{ steps.generate_token.outputs.token }} |           token: ${{ steps.generate_token.outputs.token }} | ||||||
							
								
								
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -209,6 +209,3 @@ source_docs/ | |||||||
|  |  | ||||||
| ### Golang ### | ### Golang ### | ||||||
| /vendor/ | /vendor/ | ||||||
|  |  | ||||||
| ### Docker ### |  | ||||||
| docker-compose.override.yml |  | ||||||
|  | |||||||
							
								
								
									
										6
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @ -2,7 +2,6 @@ | |||||||
|     "recommendations": [ |     "recommendations": [ | ||||||
|         "bashmish.es6-string-css", |         "bashmish.es6-string-css", | ||||||
|         "bpruitt-goddard.mermaid-markdown-syntax-highlighting", |         "bpruitt-goddard.mermaid-markdown-syntax-highlighting", | ||||||
|         "charliermarsh.ruff", |  | ||||||
|         "dbaeumer.vscode-eslint", |         "dbaeumer.vscode-eslint", | ||||||
|         "EditorConfig.EditorConfig", |         "EditorConfig.EditorConfig", | ||||||
|         "esbenp.prettier-vscode", |         "esbenp.prettier-vscode", | ||||||
| @ -11,10 +10,11 @@ | |||||||
|         "Gruntfuggly.todo-tree", |         "Gruntfuggly.todo-tree", | ||||||
|         "mechatroner.rainbow-csv", |         "mechatroner.rainbow-csv", | ||||||
|         "ms-python.black-formatter", |         "ms-python.black-formatter", | ||||||
|         "ms-python.black-formatter", |         "ms-python.isort", | ||||||
|         "ms-python.debugpy", |         "ms-python.pylint", | ||||||
|         "ms-python.python", |         "ms-python.python", | ||||||
|         "ms-python.vscode-pylance", |         "ms-python.vscode-pylance", | ||||||
|  |         "ms-python.black-formatter", | ||||||
|         "redhat.vscode-yaml", |         "redhat.vscode-yaml", | ||||||
|         "Tobermory.es6-string-html", |         "Tobermory.es6-string-html", | ||||||
|         "unifiedjs.vscode-mdx", |         "unifiedjs.vscode-mdx", | ||||||
|  | |||||||
							
								
								
									
										66
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										66
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							| @ -2,76 +2,26 @@ | |||||||
|     "version": "0.2.0", |     "version": "0.2.0", | ||||||
|     "configurations": [ |     "configurations": [ | ||||||
|         { |         { | ||||||
|             "name": "Debug: Attach Server Core", |             "name": "Python: PDB attach Server", | ||||||
|             "type": "debugpy", |             "type": "python", | ||||||
|             "request": "attach", |             "request": "attach", | ||||||
|             "connect": { |             "connect": { | ||||||
|                 "host": "localhost", |                 "host": "localhost", | ||||||
|                 "port": 9901 |                 "port": 6800 | ||||||
|             }, |             }, | ||||||
|             "pathMappings": [ |             "justMyCode": true, | ||||||
|                 { |  | ||||||
|                     "localRoot": "${workspaceFolder}", |  | ||||||
|                     "remoteRoot": "." |  | ||||||
|                 } |  | ||||||
|             ], |  | ||||||
|             "django": true |             "django": true | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "name": "Debug: Attach Worker", |             "name": "Python: PDB attach Worker", | ||||||
|             "type": "debugpy", |             "type": "python", | ||||||
|             "request": "attach", |             "request": "attach", | ||||||
|             "connect": { |             "connect": { | ||||||
|                 "host": "localhost", |                 "host": "localhost", | ||||||
|                 "port": 9901 |                 "port": 6900 | ||||||
|             }, |             }, | ||||||
|             "pathMappings": [ |             "justMyCode": true, | ||||||
|                 { |  | ||||||
|                     "localRoot": "${workspaceFolder}", |  | ||||||
|                     "remoteRoot": "." |  | ||||||
|                 } |  | ||||||
|             ], |  | ||||||
|             "django": true |             "django": true | ||||||
|         }, |         }, | ||||||
|         { |  | ||||||
|             "name": "Debug: Start Server Router", |  | ||||||
|             "type": "go", |  | ||||||
|             "request": "launch", |  | ||||||
|             "mode": "auto", |  | ||||||
|             "program": "${workspaceFolder}/cmd/server", |  | ||||||
|             "cwd": "${workspaceFolder}" |  | ||||||
|         }, |  | ||||||
|         { |  | ||||||
|             "name": "Debug: Start LDAP Outpost", |  | ||||||
|             "type": "go", |  | ||||||
|             "request": "launch", |  | ||||||
|             "mode": "auto", |  | ||||||
|             "program": "${workspaceFolder}/cmd/ldap", |  | ||||||
|             "cwd": "${workspaceFolder}" |  | ||||||
|         }, |  | ||||||
|         { |  | ||||||
|             "name": "Debug: Start Proxy Outpost", |  | ||||||
|             "type": "go", |  | ||||||
|             "request": "launch", |  | ||||||
|             "mode": "auto", |  | ||||||
|             "program": "${workspaceFolder}/cmd/proxy", |  | ||||||
|             "cwd": "${workspaceFolder}" |  | ||||||
|         }, |  | ||||||
|         { |  | ||||||
|             "name": "Debug: Start RAC Outpost", |  | ||||||
|             "type": "go", |  | ||||||
|             "request": "launch", |  | ||||||
|             "mode": "auto", |  | ||||||
|             "program": "${workspaceFolder}/cmd/rac", |  | ||||||
|             "cwd": "${workspaceFolder}" |  | ||||||
|         }, |  | ||||||
|         { |  | ||||||
|             "name": "Debug: Start Radius Outpost", |  | ||||||
|             "type": "go", |  | ||||||
|             "request": "launch", |  | ||||||
|             "mode": "auto", |  | ||||||
|             "program": "${workspaceFolder}/cmd/radius", |  | ||||||
|             "cwd": "${workspaceFolder}" |  | ||||||
|         } |  | ||||||
|     ] |     ] | ||||||
| } | } | ||||||
|  | |||||||
							
								
								
									
										34
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										34
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -4,37 +4,33 @@ | |||||||
|         "asgi", |         "asgi", | ||||||
|         "authentik", |         "authentik", | ||||||
|         "authn", |         "authn", | ||||||
|         "entra", |  | ||||||
|         "goauthentik", |         "goauthentik", | ||||||
|         "jwe", |  | ||||||
|         "jwks", |         "jwks", | ||||||
|         "kubernetes", |  | ||||||
|         "oidc", |         "oidc", | ||||||
|         "openid", |         "openid", | ||||||
|         "passwordless", |  | ||||||
|         "plex", |         "plex", | ||||||
|         "saml", |         "saml", | ||||||
|         "scim", |  | ||||||
|         "slo", |  | ||||||
|         "sso", |  | ||||||
|         "totp", |         "totp", | ||||||
|  |         "webauthn", | ||||||
|         "traefik", |         "traefik", | ||||||
|         "webauthn" |         "passwordless", | ||||||
|  |         "kubernetes", | ||||||
|  |         "sso", | ||||||
|  |         "slo", | ||||||
|  |         "scim", | ||||||
|     ], |     ], | ||||||
|     "todo-tree.tree.showCountsInTree": true, |     "todo-tree.tree.showCountsInTree": true, | ||||||
|     "todo-tree.tree.showBadges": true, |     "todo-tree.tree.showBadges": true, | ||||||
|     "yaml.customTags": [ |     "yaml.customTags": [ | ||||||
|         "!Condition sequence", |  | ||||||
|         "!Context scalar", |  | ||||||
|         "!Enumerate sequence", |  | ||||||
|         "!Env scalar", |  | ||||||
|         "!Find sequence", |         "!Find sequence", | ||||||
|         "!Format sequence", |  | ||||||
|         "!If sequence", |  | ||||||
|         "!Index scalar", |  | ||||||
|         "!KeyOf scalar", |         "!KeyOf scalar", | ||||||
|         "!Value scalar", |         "!Context scalar", | ||||||
|         "!AtIndex scalar" |         "!Context sequence", | ||||||
|  |         "!Format sequence", | ||||||
|  |         "!Condition sequence", | ||||||
|  |         "!Env sequence", | ||||||
|  |         "!Env scalar", | ||||||
|  |         "!If sequence" | ||||||
|     ], |     ], | ||||||
|     "typescript.preferences.importModuleSpecifier": "non-relative", |     "typescript.preferences.importModuleSpecifier": "non-relative", | ||||||
|     "typescript.preferences.importModuleSpecifierEnding": "index", |     "typescript.preferences.importModuleSpecifierEnding": "index", | ||||||
| @ -51,7 +47,9 @@ | |||||||
|             "ignoreCase": false |             "ignoreCase": false | ||||||
|         } |         } | ||||||
|     ], |     ], | ||||||
|     "go.testFlags": ["-count=1"], |     "go.testFlags": [ | ||||||
|  |         "-count=1" | ||||||
|  |     ], | ||||||
|     "github-actions.workflows.pinned.workflows": [ |     "github-actions.workflows.pinned.workflows": [ | ||||||
|         ".github/workflows/ci-main.yml" |         ".github/workflows/ci-main.yml" | ||||||
|     ] |     ] | ||||||
|  | |||||||
							
								
								
									
										62
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										62
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @ -2,67 +2,85 @@ | |||||||
|     "version": "2.0.0", |     "version": "2.0.0", | ||||||
|     "tasks": [ |     "tasks": [ | ||||||
|         { |         { | ||||||
|             "label": "authentik/core: make", |             "label": "authentik[core]: format & test", | ||||||
|             "command": "poetry", |             "command": "poetry", | ||||||
|             "args": ["run", "make", "lint-fix", "lint"], |             "args": [ | ||||||
|             "presentation": { |                 "run", | ||||||
|                 "panel": "new" |                 "make" | ||||||
|             }, |             ], | ||||||
|             "group": "test" |             "group": "build", | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/core: run", |             "label": "authentik[core]: run", | ||||||
|             "command": "poetry", |             "command": "poetry", | ||||||
|             "args": ["run", "ak", "server"], |             "args": [ | ||||||
|  |                 "run", | ||||||
|  |                 "make", | ||||||
|  |                 "run", | ||||||
|  |             ], | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
|                 "group": "running" |                 "group": "running" | ||||||
|             } |             }, | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/web: make", |             "label": "authentik[web]: format", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["web"], |             "args": ["web"], | ||||||
|             "group": "build" |             "group": "build", | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/web: watch", |             "label": "authentik[web]: watch", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["web-watch"], |             "args": ["web-watch"], | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
|                 "group": "running" |                 "group": "running" | ||||||
|             } |             }, | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik: install", |             "label": "authentik: install", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["install", "-j4"], |             "args": ["install"], | ||||||
|             "group": "build" |             "group": "build", | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/website: make", |             "label": "authentik: i18n-extract", | ||||||
|  |             "command": "poetry", | ||||||
|  |             "args": [ | ||||||
|  |                 "run", | ||||||
|  |                 "make", | ||||||
|  |                 "i18n-extract" | ||||||
|  |             ], | ||||||
|  |             "group": "build", | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             "label": "authentik[website]: format", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["website"], |             "args": ["website"], | ||||||
|             "group": "build" |             "group": "build", | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/website: watch", |             "label": "authentik[website]: watch", | ||||||
|             "command": "make", |             "command": "make", | ||||||
|             "args": ["website-watch"], |             "args": ["website-watch"], | ||||||
|             "group": "build", |             "group": "build", | ||||||
|             "presentation": { |             "presentation": { | ||||||
|                 "panel": "dedicated", |                 "panel": "dedicated", | ||||||
|                 "group": "running" |                 "group": "running" | ||||||
|             } |             }, | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "label": "authentik/api: generate", |             "label": "authentik[api]: generate", | ||||||
|             "command": "poetry", |             "command": "poetry", | ||||||
|             "args": ["run", "make", "gen"], |             "args": [ | ||||||
|  |                 "run", | ||||||
|  |                 "make", | ||||||
|  |                 "gen" | ||||||
|  |             ], | ||||||
|             "group": "build" |             "group": "build" | ||||||
|         } |         }, | ||||||
|     ] |     ] | ||||||
| } | } | ||||||
|  | |||||||
							
								
								
									
										11
									
								
								CODEOWNERS
									
									
									
									
									
								
							
							
						
						
									
										11
									
								
								CODEOWNERS
									
									
									
									
									
								
							| @ -15,23 +15,14 @@ go.mod                          @goauthentik/backend | |||||||
| go.sum                          @goauthentik/backend | go.sum                          @goauthentik/backend | ||||||
| # Infrastructure | # Infrastructure | ||||||
| .github/                        @goauthentik/infrastructure | .github/                        @goauthentik/infrastructure | ||||||
| lifecycle/aws/                  @goauthentik/infrastructure |  | ||||||
| Dockerfile                      @goauthentik/infrastructure | Dockerfile                      @goauthentik/infrastructure | ||||||
| *Dockerfile                     @goauthentik/infrastructure | *Dockerfile                     @goauthentik/infrastructure | ||||||
| .dockerignore                   @goauthentik/infrastructure | .dockerignore                   @goauthentik/infrastructure | ||||||
| docker-compose.yml              @goauthentik/infrastructure | docker-compose.yml              @goauthentik/infrastructure | ||||||
| Makefile                        @goauthentik/infrastructure |  | ||||||
| .editorconfig                   @goauthentik/infrastructure |  | ||||||
| CODEOWNERS                      @goauthentik/infrastructure |  | ||||||
| # Web | # Web | ||||||
| web/                            @goauthentik/frontend | web/                            @goauthentik/frontend | ||||||
| tests/wdio/                     @goauthentik/frontend | tests/wdio/                     @goauthentik/frontend | ||||||
| # Locale |  | ||||||
| locale/                         @goauthentik/backend @goauthentik/frontend |  | ||||||
| web/xliff/                      @goauthentik/backend @goauthentik/frontend |  | ||||||
| # Docs & Website | # Docs & Website | ||||||
| website/                        @goauthentik/docs | website/                        @goauthentik/docs | ||||||
| CODE_OF_CONDUCT.md              @goauthentik/docs |  | ||||||
| # Security | # Security | ||||||
| SECURITY.md                     @goauthentik/security @goauthentik/docs | website/docs/security/          @goauthentik/security | ||||||
| website/docs/security/          @goauthentik/security @goauthentik/docs |  | ||||||
|  | |||||||
| @ -1 +1 @@ | |||||||
| website/docs/developer-docs/index.md | website/developer-docs/index.md | ||||||
							
								
								
									
										84
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										84
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,7 +1,7 @@ | |||||||
| # syntax=docker/dockerfile:1 | # syntax=docker/dockerfile:1 | ||||||
|  |  | ||||||
| # Stage 1: Build website | # Stage 1: Build website | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder | FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder | ||||||
|  |  | ||||||
| ENV NODE_ENV=production | ENV NODE_ENV=production | ||||||
|  |  | ||||||
| @ -14,28 +14,23 @@ RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.js | |||||||
|  |  | ||||||
| COPY ./website /work/website/ | COPY ./website /work/website/ | ||||||
| COPY ./blueprints /work/blueprints/ | COPY ./blueprints /work/blueprints/ | ||||||
| COPY ./schema.yml /work/ |  | ||||||
| COPY ./SECURITY.md /work/ | COPY ./SECURITY.md /work/ | ||||||
|  |  | ||||||
| RUN npm run build-bundled | RUN npm run build-docs-only | ||||||
|  |  | ||||||
| # Stage 2: Build webui | # Stage 2: Build webui | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder | FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder | ||||||
|  |  | ||||||
| ARG GIT_BUILD_HASH |  | ||||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH |  | ||||||
| ENV NODE_ENV=production | ENV NODE_ENV=production | ||||||
|  |  | ||||||
| WORKDIR /work/web | WORKDIR /work/web | ||||||
|  |  | ||||||
| RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ | ||||||
|     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ |     --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ | ||||||
|     --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ |     --mount=type=bind,target=/work/.git,src=./.git,readonly \ | ||||||
|     --mount=type=bind,target=/work/web/scripts,src=./web/scripts \ |  | ||||||
|     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ |     --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ | ||||||
|     npm ci --include=dev |     npm ci --include=dev | ||||||
|  |  | ||||||
| COPY ./package.json /work |  | ||||||
| COPY ./web /work/web/ | COPY ./web /work/web/ | ||||||
| COPY ./website /work/website/ | COPY ./website /work/website/ | ||||||
| COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | ||||||
| @ -43,7 +38,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api | |||||||
| RUN npm run build | RUN npm run build | ||||||
|  |  | ||||||
| # Stage 3: Build go proxy | # Stage 3: Build go proxy | ||||||
| FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder | FROM --platform=${BUILDPLATFORM} docker.io/golang:1.22.0-bookworm AS go-builder | ||||||
|  |  | ||||||
| ARG TARGETOS | ARG TARGETOS | ||||||
| ARG TARGETARCH | ARG TARGETARCH | ||||||
| @ -54,11 +49,6 @@ ARG GOARCH=$TARGETARCH | |||||||
|  |  | ||||||
| WORKDIR /go/src/goauthentik.io | WORKDIR /go/src/goauthentik.io | ||||||
|  |  | ||||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ |  | ||||||
|     dpkg --add-architecture arm64 && \ |  | ||||||
|     apt-get update && \ |  | ||||||
|     apt-get install -y --no-install-recommends crossbuild-essential-arm64 gcc-aarch64-linux-gnu |  | ||||||
|  |  | ||||||
| RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ | ||||||
|     --mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \ |     --mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \ | ||||||
|     --mount=type=cache,target=/go/pkg/mod \ |     --mount=type=cache,target=/go/pkg/mod \ | ||||||
| @ -73,17 +63,17 @@ COPY ./internal /go/src/goauthentik.io/internal | |||||||
| COPY ./go.mod /go/src/goauthentik.io/go.mod | COPY ./go.mod /go/src/goauthentik.io/go.mod | ||||||
| COPY ./go.sum /go/src/goauthentik.io/go.sum | COPY ./go.sum /go/src/goauthentik.io/go.sum | ||||||
|  |  | ||||||
|  | ENV CGO_ENABLED=0 | ||||||
|  |  | ||||||
| RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ | ||||||
|     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ |     --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ | ||||||
|     if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ |     GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server | ||||||
|     CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \ |  | ||||||
|     go build -o /go/authentik ./cmd/server |  | ||||||
|  |  | ||||||
| # Stage 4: MaxMind GeoIP | # Stage 4: MaxMind GeoIP | ||||||
| FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip | FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.1 as geoip | ||||||
|  |  | ||||||
| ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" | ||||||
| ENV GEOIPUPDATE_VERBOSE="1" | ENV GEOIPUPDATE_VERBOSE="true" | ||||||
| ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" | ||||||
| ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY" | ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY" | ||||||
|  |  | ||||||
| @ -94,10 +84,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ | |||||||
|     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" |     /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" | ||||||
|  |  | ||||||
| # Stage 5: Python dependencies | # Stage 5: Python dependencies | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps | FROM docker.io/python:3.12.2-slim-bookworm AS python-deps | ||||||
|  |  | ||||||
| ARG TARGETARCH |  | ||||||
| ARG TARGETVARIANT |  | ||||||
|  |  | ||||||
| WORKDIR /ak-root/poetry | WORKDIR /ak-root/poetry | ||||||
|  |  | ||||||
| @ -110,57 +97,38 @@ RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloa | |||||||
| RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ | ||||||
|     apt-get update && \ |     apt-get update && \ | ||||||
|     # Required for installing pip packages |     # Required for installing pip packages | ||||||
|     apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev |     apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev | ||||||
|  |  | ||||||
| RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ | ||||||
|     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ |     --mount=type=bind,target=./poetry.lock,src=./poetry.lock \ | ||||||
|     --mount=type=cache,target=/root/.cache/pip \ |     --mount=type=cache,target=/root/.cache/pip \ | ||||||
|     --mount=type=cache,target=/root/.cache/pypoetry \ |     --mount=type=cache,target=/root/.cache/pypoetry \ | ||||||
|     pip install --no-cache cffi && \ |  | ||||||
|     apt-get update && \ |  | ||||||
|     apt-get install -y --no-install-recommends \ |  | ||||||
|         build-essential libffi-dev \ |  | ||||||
|         # Required for cryptography |  | ||||||
|         curl pkg-config \ |  | ||||||
|         # Required for lxml |  | ||||||
|         libxslt-dev zlib1g-dev \ |  | ||||||
|         # Required for xmlsec |  | ||||||
|         libltdl-dev \ |  | ||||||
|         # Required for kadmin |  | ||||||
|         sccache clang && \ |  | ||||||
|     curl https://sh.rustup.rs -sSf | sh -s -- -y && \ |  | ||||||
|     . "$HOME/.cargo/env" && \ |  | ||||||
|     python -m venv /ak-root/venv/ && \ |     python -m venv /ak-root/venv/ && \ | ||||||
|     bash -c "source ${VENV_PATH}/bin/activate && \ |     pip3 install --upgrade pip && \ | ||||||
|     pip3 install --upgrade pip poetry && \ |     pip3 install poetry && \ | ||||||
|     poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \ |     poetry install --only=main --no-ansi --no-interaction | ||||||
|     poetry install --only=main --no-ansi --no-interaction --no-root && \ |  | ||||||
|     pip uninstall cryptography -y && \ |  | ||||||
|     poetry install --only=main --no-ansi --no-interaction --no-root" |  | ||||||
|  |  | ||||||
| # Stage 6: Run | # Stage 6: Run | ||||||
| FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image | FROM docker.io/python:3.12.2-slim-bookworm AS final-image | ||||||
|  |  | ||||||
| ARG VERSION |  | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
|  | ARG VERSION | ||||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||||
|  |  | ||||||
| LABEL org.opencontainers.image.url=https://goauthentik.io | LABEL org.opencontainers.image.url https://goauthentik.io | ||||||
| LABEL org.opencontainers.image.description="goauthentik.io Main server image, see https://goauthentik.io for more info." | LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info. | ||||||
| LABEL org.opencontainers.image.source=https://github.com/goauthentik/authentik | LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik | ||||||
| LABEL org.opencontainers.image.version=${VERSION} | LABEL org.opencontainers.image.version ${VERSION} | ||||||
| LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH} | LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH} | ||||||
|  |  | ||||||
| WORKDIR / | WORKDIR / | ||||||
|  |  | ||||||
| # We cannot cache this layer otherwise we'll end up with a bigger image | # We cannot cache this layer otherwise we'll end up with a bigger image | ||||||
| RUN apt-get update && \ | RUN apt-get update && \ | ||||||
|     apt-get upgrade -y && \ |  | ||||||
|     # Required for runtime |     # Required for runtime | ||||||
|     apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \ |     apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 ca-certificates && \ | ||||||
|     # Required for bootstrap & healtcheck |     # Required for bootstrap & healtcheck | ||||||
|     apt-get install -y --no-install-recommends runit && \ |     apt-get install -y --no-install-recommends runit && \ | ||||||
|     pip3 install --no-cache-dir --upgrade pip && \ |  | ||||||
|     apt-get clean && \ |     apt-get clean && \ | ||||||
|     rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \ |     rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \ | ||||||
|     adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \ |     adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \ | ||||||
| @ -178,12 +146,11 @@ COPY ./tests /tests | |||||||
| COPY ./manage.py / | COPY ./manage.py / | ||||||
| COPY ./blueprints /blueprints | COPY ./blueprints /blueprints | ||||||
| COPY ./lifecycle/ /lifecycle | COPY ./lifecycle/ /lifecycle | ||||||
| COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf |  | ||||||
| COPY --from=go-builder /go/authentik /bin/authentik | COPY --from=go-builder /go/authentik /bin/authentik | ||||||
| COPY --from=python-deps /ak-root/venv /ak-root/venv | COPY --from=python-deps /ak-root/venv /ak-root/venv | ||||||
| COPY --from=web-builder /work/web/dist/ /web/dist/ | COPY --from=web-builder /work/web/dist/ /web/dist/ | ||||||
| COPY --from=web-builder /work/web/authentik/ /web/authentik/ | COPY --from=web-builder /work/web/authentik/ /web/authentik/ | ||||||
| COPY --from=website-builder /work/website/build/ /website/help/ | COPY --from=website-builder /work/website/help/ /website/help/ | ||||||
| COPY --from=geoip /usr/share/GeoIP /geoip | COPY --from=geoip /usr/share/GeoIP /geoip | ||||||
|  |  | ||||||
| USER 1000 | USER 1000 | ||||||
| @ -193,8 +160,7 @@ ENV TMPDIR=/dev/shm/ \ | |||||||
|     PYTHONUNBUFFERED=1 \ |     PYTHONUNBUFFERED=1 \ | ||||||
|     PATH="/ak-root/venv/bin:/lifecycle:$PATH" \ |     PATH="/ak-root/venv/bin:/lifecycle:$PATH" \ | ||||||
|     VENV_PATH="/ak-root/venv" \ |     VENV_PATH="/ak-root/venv" \ | ||||||
|     POETRY_VIRTUALENVS_CREATE=false \ |     POETRY_VIRTUALENVS_CREATE=false | ||||||
|     GOFIPS=1 |  | ||||||
|  |  | ||||||
| HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										85
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										85
									
								
								Makefile
									
									
									
									
									
								
							| @ -5,13 +5,10 @@ PWD = $(shell pwd) | |||||||
| UID = $(shell id -u) | UID = $(shell id -u) | ||||||
| GID = $(shell id -g) | GID = $(shell id -g) | ||||||
| NPM_VERSION = $(shell python -m scripts.npm_version) | NPM_VERSION = $(shell python -m scripts.npm_version) | ||||||
| PY_SOURCES = authentik tests scripts lifecycle .github | PY_SOURCES = authentik tests scripts lifecycle | ||||||
| GO_SOURCES = cmd internal |  | ||||||
| WEB_SOURCES = web/src web/packages |  | ||||||
| DOCKER_IMAGE ?= "authentik:test" | DOCKER_IMAGE ?= "authentik:test" | ||||||
|  |  | ||||||
| GEN_API_TS = "gen-ts-api" | GEN_API_TS = "gen-ts-api" | ||||||
| GEN_API_PY = "gen-py-api" |  | ||||||
| GEN_API_GO = "gen-go-api" | GEN_API_GO = "gen-go-api" | ||||||
|  |  | ||||||
| pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null) | pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null) | ||||||
| @ -21,14 +18,13 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null) | |||||||
| CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \ | CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \ | ||||||
| 		-I .github/codespell-words.txt \ | 		-I .github/codespell-words.txt \ | ||||||
| 		-S 'web/src/locales/**' \ | 		-S 'web/src/locales/**' \ | ||||||
| 		-S 'website/docs/developer-docs/api/reference/**' \ | 		authentik \ | ||||||
| 		-S '**/node_modules/**' \ | 		internal \ | ||||||
| 		-S '**/dist/**' \ | 		cmd \ | ||||||
| 		$(PY_SOURCES) \ | 		web/src \ | ||||||
| 		$(GO_SOURCES) \ |  | ||||||
| 		$(WEB_SOURCES) \ |  | ||||||
| 		website/src \ | 		website/src \ | ||||||
| 		website/blog \ | 		website/blog \ | ||||||
|  | 		website/developer-docs \ | ||||||
| 		website/docs \ | 		website/docs \ | ||||||
| 		website/integrations \ | 		website/integrations \ | ||||||
| 		website/src | 		website/src | ||||||
| @ -45,23 +41,33 @@ help:  ## Show this help | |||||||
| 		sort | 		sort | ||||||
| 	@echo "" | 	@echo "" | ||||||
|  |  | ||||||
| go-test: | test-go: | ||||||
| 	go test -timeout 0 -v -race -cover ./... | 	go test -timeout 0 -v -race -cover ./... | ||||||
|  |  | ||||||
|  | test-docker:  ## Run all tests in a docker-compose | ||||||
|  | 	echo "PG_PASS=$(openssl rand -base64 32)" >> .env | ||||||
|  | 	echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env | ||||||
|  | 	docker-compose pull -q | ||||||
|  | 	docker-compose up --no-start | ||||||
|  | 	docker-compose start postgresql redis | ||||||
|  | 	docker-compose run -u root server test-all | ||||||
|  | 	rm -f .env | ||||||
|  |  | ||||||
| test: ## Run the server tests and produce a coverage report (locally) | test: ## Run the server tests and produce a coverage report (locally) | ||||||
| 	coverage run manage.py test --keepdb authentik | 	coverage run manage.py test --keepdb authentik | ||||||
| 	coverage html | 	coverage html | ||||||
| 	coverage report | 	coverage report | ||||||
|  |  | ||||||
| lint-fix: lint-codespell  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | lint-fix:  ## Lint and automatically fix errors in the python source code. Reports spelling errors. | ||||||
|  | 	isort $(PY_SOURCES) | ||||||
| 	black $(PY_SOURCES) | 	black $(PY_SOURCES) | ||||||
| 	ruff check --fix $(PY_SOURCES) | 	ruff --fix $(PY_SOURCES) | ||||||
|  |  | ||||||
| lint-codespell:  ## Reports spelling errors. |  | ||||||
| 	codespell -w $(CODESPELL_ARGS) | 	codespell -w $(CODESPELL_ARGS) | ||||||
|  |  | ||||||
| lint: ## Lint the python and golang sources | lint: ## Lint the python and golang sources | ||||||
| 	bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules | 	bandit -r $(PY_SOURCES) -x node_modules | ||||||
|  | 	./web/node_modules/.bin/pyright $(PY_SOURCES) | ||||||
|  | 	pylint $(PY_SOURCES) | ||||||
| 	golangci-lint run -v | 	golangci-lint run -v | ||||||
|  |  | ||||||
| core-install: | core-install: | ||||||
| @ -72,9 +78,6 @@ migrate: ## Run the Authentik Django server's migrations | |||||||
|  |  | ||||||
| i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | i18n-extract: core-i18n-extract web-i18n-extract  ## Extract strings that require translation into files to send to a translation service | ||||||
|  |  | ||||||
| aws-cfn: |  | ||||||
| 	cd lifecycle/aws && npm run aws-cfn |  | ||||||
|  |  | ||||||
| core-i18n-extract: | core-i18n-extract: | ||||||
| 	ak makemessages \ | 	ak makemessages \ | ||||||
| 		--add-location file \ | 		--add-location file \ | ||||||
| @ -137,16 +140,13 @@ gen-clean-ts:  ## Remove generated API client for Typescript | |||||||
| gen-clean-go:  ## Remove generated API client for Go | gen-clean-go:  ## Remove generated API client for Go | ||||||
| 	rm -rf ./${GEN_API_GO}/ | 	rm -rf ./${GEN_API_GO}/ | ||||||
|  |  | ||||||
| gen-clean-py:  ## Remove generated API client for Python | gen-clean: gen-clean-ts gen-clean-go  ## Remove generated API clients | ||||||
| 	rm -rf ./${GEN_API_PY}/ |  | ||||||
|  |  | ||||||
| gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients |  | ||||||
|  |  | ||||||
| gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescript into the authentik UI Application | gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescript into the authentik UI Application | ||||||
| 	docker run \ | 	docker run \ | ||||||
| 		--rm -v ${PWD}:/local \ | 		--rm -v ${PWD}:/local \ | ||||||
| 		--user ${UID}:${GID} \ | 		--user ${UID}:${GID} \ | ||||||
| 		docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \ | 		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \ | ||||||
| 		-i /local/schema.yml \ | 		-i /local/schema.yml \ | ||||||
| 		-g typescript-fetch \ | 		-g typescript-fetch \ | ||||||
| 		-o /local/${GEN_API_TS} \ | 		-o /local/${GEN_API_TS} \ | ||||||
| @ -158,20 +158,6 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri | |||||||
| 	cd ./${GEN_API_TS} && npm i | 	cd ./${GEN_API_TS} && npm i | ||||||
| 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | 	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api | ||||||
|  |  | ||||||
| gen-client-py: gen-clean-py ## Build and install the authentik API for Python |  | ||||||
| 	docker run \ |  | ||||||
| 		--rm -v ${PWD}:/local \ |  | ||||||
| 		--user ${UID}:${GID} \ |  | ||||||
| 		docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \ |  | ||||||
| 		-i /local/schema.yml \ |  | ||||||
| 		-g python \ |  | ||||||
| 		-o /local/${GEN_API_PY} \ |  | ||||||
| 		-c /local/scripts/api-py-config.yaml \ |  | ||||||
| 		--additional-properties=packageVersion=${NPM_VERSION} \ |  | ||||||
| 		--git-repo-id authentik \ |  | ||||||
| 		--git-user-id goauthentik |  | ||||||
| 	pip install ./${GEN_API_PY} |  | ||||||
|  |  | ||||||
| gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang | ||||||
| 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | 	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates | ||||||
| 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | 	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml | ||||||
| @ -206,9 +192,6 @@ web: web-lint-fix web-lint web-check-compile  ## Automatically fix formatting is | |||||||
| web-install:  ## Install the necessary libraries to build the Authentik UI | web-install:  ## Install the necessary libraries to build the Authentik UI | ||||||
| 	cd web && npm ci | 	cd web && npm ci | ||||||
|  |  | ||||||
| web-test: ## Run tests for the Authentik UI |  | ||||||
| 	cd web && npm run test |  | ||||||
|  |  | ||||||
| web-watch:  ## Build and watch the Authentik UI for changes, updating automatically | web-watch:  ## Build and watch the Authentik UI for changes, updating automatically | ||||||
| 	rm -rf web/dist/ | 	rm -rf web/dist/ | ||||||
| 	mkdir web/dist/ | 	mkdir web/dist/ | ||||||
| @ -240,7 +223,7 @@ website: website-lint-fix website-build  ## Automatically fix formatting issues | |||||||
| website-install: | website-install: | ||||||
| 	cd website && npm ci | 	cd website && npm ci | ||||||
|  |  | ||||||
| website-lint-fix: lint-codespell | website-lint-fix: | ||||||
| 	cd website && npm run prettier | 	cd website && npm run prettier | ||||||
|  |  | ||||||
| website-build: | website-build: | ||||||
| @ -254,12 +237,8 @@ website-watch:  ## Build and watch the documentation website, updating automatic | |||||||
| ######################### | ######################### | ||||||
|  |  | ||||||
| docker:  ## Build a docker image of the current source tree | docker:  ## Build a docker image of the current source tree | ||||||
| 	mkdir -p ${GEN_API_TS} |  | ||||||
| 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | 	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} | ||||||
|  |  | ||||||
| test-docker: |  | ||||||
| 	BUILD=true ./scripts/test_docker.sh |  | ||||||
|  |  | ||||||
| ######################### | ######################### | ||||||
| ## CI | ## CI | ||||||
| ######################### | ######################### | ||||||
| @ -270,6 +249,9 @@ ci--meta-debug: | |||||||
| 	python -V | 	python -V | ||||||
| 	node --version | 	node --version | ||||||
|  |  | ||||||
|  | ci-pylint: ci--meta-debug | ||||||
|  | 	pylint $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-black: ci--meta-debug | ci-black: ci--meta-debug | ||||||
| 	black --check $(PY_SOURCES) | 	black --check $(PY_SOURCES) | ||||||
|  |  | ||||||
| @ -279,13 +261,14 @@ ci-ruff: ci--meta-debug | |||||||
| ci-codespell: ci--meta-debug | ci-codespell: ci--meta-debug | ||||||
| 	codespell $(CODESPELL_ARGS) -s | 	codespell $(CODESPELL_ARGS) -s | ||||||
|  |  | ||||||
|  | ci-isort: ci--meta-debug | ||||||
|  | 	isort --check $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-bandit: ci--meta-debug | ci-bandit: ci--meta-debug | ||||||
| 	bandit -r $(PY_SOURCES) | 	bandit -r $(PY_SOURCES) | ||||||
|  |  | ||||||
|  | ci-pyright: ci--meta-debug | ||||||
|  | 	./web/node_modules/.bin/pyright $(PY_SOURCES) | ||||||
|  |  | ||||||
| ci-pending-migrations: ci--meta-debug | ci-pending-migrations: ci--meta-debug | ||||||
| 	ak makemigrations --check | 	ak makemigrations --check | ||||||
|  |  | ||||||
| ci-test: ci--meta-debug |  | ||||||
| 	coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik |  | ||||||
| 	coverage report |  | ||||||
| 	coverage xml |  | ||||||
|  | |||||||
							
								
								
									
										14
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								README.md
									
									
									
									
									
								
							| @ -15,9 +15,7 @@ | |||||||
|  |  | ||||||
| ## What is authentik? | ## What is authentik? | ||||||
|  |  | ||||||
| authentik is an open-source Identity Provider that emphasizes flexibility and versatility, with support for a wide set of protocols. | authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them. | ||||||
|  |  | ||||||
| Our [enterprise offer](https://goauthentik.io/pricing) can also be used as a self-hosted replacement for large-scale deployments of Okta/Auth0, Entra ID, Ping Identity, or other legacy IdPs for employees and B2B2C use. |  | ||||||
|  |  | ||||||
| ## Installation | ## Installation | ||||||
|  |  | ||||||
| @ -27,14 +25,14 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h | |||||||
|  |  | ||||||
| ## Screenshots | ## Screenshots | ||||||
|  |  | ||||||
| | Light                                                       | Dark                                                       | | | Light                                                  | Dark                                                  | | ||||||
| | ----------------------------------------------------------- | ---------------------------------------------------------- | | | ------------------------------------------------------ | ----------------------------------------------------- | | ||||||
| |   |   | | |   |   | | ||||||
| |  |  | | |  |  | | ||||||
|  |  | ||||||
| ## Development | ## Development | ||||||
|  |  | ||||||
| See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github) | See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github) | ||||||
|  |  | ||||||
| ## Security | ## Security | ||||||
|  |  | ||||||
|  | |||||||
							
								
								
									
										22
									
								
								SECURITY.md
									
									
									
									
									
								
							
							
						
						
									
										22
									
								
								SECURITY.md
									
									
									
									
									
								
							| @ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di | |||||||
|  |  | ||||||
| ## Independent audits and pentests | ## Independent audits and pentests | ||||||
|  |  | ||||||
| We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specfic audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security). | In May/June of 2023 [Cure53](https://cure53.de) conducted an audit and pentest. The [results](https://cure53.de/pentest-report_authentik.pdf) are published on the [Cure53 website](https://cure53.de/#publications-2023). For more details about authentik's response to the findings of the audit refer to [2023-06 Cure53 Code audit](https://goauthentik.io/docs/security/2023-06-cure53). | ||||||
|  |  | ||||||
| ## What authentik classifies as a CVE | ## What authentik classifies as a CVE | ||||||
|  |  | ||||||
| @ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni | |||||||
|  |  | ||||||
| (.x being the latest patch release for each version) | (.x being the latest patch release for each version) | ||||||
|  |  | ||||||
| | Version   | Supported | | | Version | Supported | | ||||||
| | --------- | --------- | | | --- | --- | | ||||||
| | 2024.12.x | ✅        | | | 2023.6.x | ✅ | | ||||||
| | 2025.2.x  | ✅        | | | 2023.8.x | ✅ | | ||||||
|  |  | ||||||
| ## Reporting a Vulnerability | ## Reporting a Vulnerability | ||||||
|  |  | ||||||
| @ -31,12 +31,12 @@ To report a vulnerability, send an email to [security@goauthentik.io](mailto:se | |||||||
|  |  | ||||||
| authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories: | authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories: | ||||||
|  |  | ||||||
| | Score      | Severity | | | Score | Severity | | ||||||
| | ---------- | -------- | | | --- | --- | | ||||||
| | 0.0        | None     | | | 0.0 | None | | ||||||
| | 0.1 – 3.9  | Low      | | | 0.1 – 3.9 | Low | | ||||||
| | 4.0 – 6.9  | Medium   | | | 4.0 – 6.9 | Medium | | ||||||
| | 7.0 – 8.9  | High     | | | 7.0 – 8.9 | High | | ||||||
| | 9.0 – 10.0 | Critical | | | 9.0 – 10.0 | Critical | | ||||||
|  |  | ||||||
| ## Disclosure process | ## Disclosure process | ||||||
|  | |||||||
| @ -1,12 +1,13 @@ | |||||||
| """authentik root module""" | """authentik root module""" | ||||||
|  |  | ||||||
| from os import environ | from os import environ | ||||||
|  | from typing import Optional | ||||||
|  |  | ||||||
| __version__ = "2025.2.2" | __version__ = "2023.10.7" | ||||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_build_hash(fallback: str | None = None) -> str: | def get_build_hash(fallback: Optional[str] = None) -> str: | ||||||
|     """Get build hash""" |     """Get build hash""" | ||||||
|     build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "") |     build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "") | ||||||
|     return fallback if build_hash == "" and fallback else build_hash |     return fallback if build_hash == "" and fallback else build_hash | ||||||
| @ -16,5 +17,5 @@ def get_full_version() -> str: | |||||||
|     """Get full version, with build hash appended""" |     """Get full version, with build hash appended""" | ||||||
|     version = __version__ |     version = __version__ | ||||||
|     if (build_hash := get_build_hash()) != "": |     if (build_hash := get_build_hash()) != "": | ||||||
|         return f"{version}+{build_hash}" |         version += "." + build_hash | ||||||
|     return version |     return version | ||||||
|  | |||||||
| @ -2,23 +2,18 @@ | |||||||
|  |  | ||||||
| import platform | import platform | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from ssl import OPENSSL_VERSION |  | ||||||
| from sys import version as python_version | from sys import version as python_version | ||||||
| from typing import TypedDict | from typing import TypedDict | ||||||
|  |  | ||||||
| from cryptography.hazmat.backends.openssl.backend import backend |  | ||||||
| from django.conf import settings |  | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from django.views.debug import SafeExceptionReporterFilter |  | ||||||
| from drf_spectacular.utils import extend_schema | from drf_spectacular.utils import extend_schema | ||||||
|  | from gunicorn import version_info as gunicorn_version | ||||||
| from rest_framework.fields import SerializerMethodField | from rest_framework.fields import SerializerMethodField | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.views import APIView | from rest_framework.views import APIView | ||||||
|  |  | ||||||
| from authentik import get_full_version |  | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.enterprise.license import LicenseKey |  | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.utils.reflection import get_env | from authentik.lib.utils.reflection import get_env | ||||||
| from authentik.outposts.apps import MANAGED_OUTPOST | from authentik.outposts.apps import MANAGED_OUTPOST | ||||||
| @ -30,13 +25,11 @@ class RuntimeDict(TypedDict): | |||||||
|     """Runtime information""" |     """Runtime information""" | ||||||
|  |  | ||||||
|     python_version: str |     python_version: str | ||||||
|  |     gunicorn_version: str | ||||||
|     environment: str |     environment: str | ||||||
|     architecture: str |     architecture: str | ||||||
|     platform: str |     platform: str | ||||||
|     uname: str |     uname: str | ||||||
|     openssl_version: str |  | ||||||
|     openssl_fips_enabled: bool | None |  | ||||||
|     authentik_version: str |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SystemInfoSerializer(PassiveSerializer): | class SystemInfoSerializer(PassiveSerializer): | ||||||
| @ -54,16 +47,10 @@ class SystemInfoSerializer(PassiveSerializer): | |||||||
|     def get_http_headers(self, request: Request) -> dict[str, str]: |     def get_http_headers(self, request: Request) -> dict[str, str]: | ||||||
|         """Get HTTP Request headers""" |         """Get HTTP Request headers""" | ||||||
|         headers = {} |         headers = {} | ||||||
|         raw_session = request._request.COOKIES.get(settings.SESSION_COOKIE_NAME) |  | ||||||
|         for key, value in request.META.items(): |         for key, value in request.META.items(): | ||||||
|             if not isinstance(value, str): |             if not isinstance(value, str): | ||||||
|                 continue |                 continue | ||||||
|             actual_value = value |             headers[key] = value | ||||||
|             if raw_session in actual_value: |  | ||||||
|                 actual_value = actual_value.replace( |  | ||||||
|                     raw_session, SafeExceptionReporterFilter.cleansed_substitute |  | ||||||
|                 ) |  | ||||||
|             headers[key] = actual_value |  | ||||||
|         return headers |         return headers | ||||||
|  |  | ||||||
|     def get_http_host(self, request: Request) -> str: |     def get_http_host(self, request: Request) -> str: | ||||||
| @ -77,15 +64,11 @@ class SystemInfoSerializer(PassiveSerializer): | |||||||
|     def get_runtime(self, request: Request) -> RuntimeDict: |     def get_runtime(self, request: Request) -> RuntimeDict: | ||||||
|         """Get versions""" |         """Get versions""" | ||||||
|         return { |         return { | ||||||
|             "architecture": platform.machine(), |  | ||||||
|             "authentik_version": get_full_version(), |  | ||||||
|             "environment": get_env(), |  | ||||||
|             "openssl_fips_enabled": ( |  | ||||||
|                 backend._fips_enabled if LicenseKey.get_total().status().is_valid else None |  | ||||||
|             ), |  | ||||||
|             "openssl_version": OPENSSL_VERSION, |  | ||||||
|             "platform": platform.platform(), |  | ||||||
|             "python_version": python_version, |             "python_version": python_version, | ||||||
|  |             "gunicorn_version": ".".join(str(x) for x in gunicorn_version), | ||||||
|  |             "environment": get_env(), | ||||||
|  |             "architecture": platform.machine(), | ||||||
|  |             "platform": platform.platform(), | ||||||
|             "uname": " ".join(platform.uname()), |             "uname": " ".join(platform.uname()), | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  | |||||||
| @ -10,9 +10,8 @@ from rest_framework.response import Response | |||||||
| from rest_framework.views import APIView | from rest_framework.views import APIView | ||||||
|  |  | ||||||
| from authentik import __version__, get_build_hash | from authentik import __version__, get_build_hash | ||||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version | from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.outposts.models import Outpost |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionSerializer(PassiveSerializer): | class VersionSerializer(PassiveSerializer): | ||||||
| @ -20,10 +19,8 @@ class VersionSerializer(PassiveSerializer): | |||||||
|  |  | ||||||
|     version_current = SerializerMethodField() |     version_current = SerializerMethodField() | ||||||
|     version_latest = SerializerMethodField() |     version_latest = SerializerMethodField() | ||||||
|     version_latest_valid = SerializerMethodField() |  | ||||||
|     build_hash = SerializerMethodField() |     build_hash = SerializerMethodField() | ||||||
|     outdated = SerializerMethodField() |     outdated = SerializerMethodField() | ||||||
|     outpost_outdated = SerializerMethodField() |  | ||||||
|  |  | ||||||
|     def get_build_hash(self, _) -> str: |     def get_build_hash(self, _) -> str: | ||||||
|         """Get build hash, if version is not latest or released""" |         """Get build hash, if version is not latest or released""" | ||||||
| @ -41,23 +38,10 @@ class VersionSerializer(PassiveSerializer): | |||||||
|             return __version__ |             return __version__ | ||||||
|         return version_in_cache |         return version_in_cache | ||||||
|  |  | ||||||
|     def get_version_latest_valid(self, _) -> bool: |  | ||||||
|         """Check if latest version is valid""" |  | ||||||
|         return cache.get(VERSION_CACHE_KEY) != VERSION_NULL |  | ||||||
|  |  | ||||||
|     def get_outdated(self, instance) -> bool: |     def get_outdated(self, instance) -> bool: | ||||||
|         """Check if we're running the latest version""" |         """Check if we're running the latest version""" | ||||||
|         return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance)) |         return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance)) | ||||||
|  |  | ||||||
|     def get_outpost_outdated(self, _) -> bool: |  | ||||||
|         """Check if any outpost is outdated/has a version mismatch""" |  | ||||||
|         any_outdated = False |  | ||||||
|         for outpost in Outpost.objects.all(): |  | ||||||
|             for state in outpost.state: |  | ||||||
|                 if state.version_outdated: |  | ||||||
|                     any_outdated = True |  | ||||||
|         return any_outdated |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionView(APIView): | class VersionView(APIView): | ||||||
|     """Get running and latest version.""" |     """Get running and latest version.""" | ||||||
|  | |||||||
| @ -1,33 +0,0 @@ | |||||||
| from rest_framework.permissions import IsAdminUser |  | ||||||
| from rest_framework.viewsets import ReadOnlyModelViewSet |  | ||||||
|  |  | ||||||
| from authentik.admin.models import VersionHistory |  | ||||||
| from authentik.core.api.utils import ModelSerializer |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionHistorySerializer(ModelSerializer): |  | ||||||
|     """VersionHistory Serializer""" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         model = VersionHistory |  | ||||||
|         fields = [ |  | ||||||
|             "id", |  | ||||||
|             "timestamp", |  | ||||||
|             "version", |  | ||||||
|             "build", |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionHistoryViewSet(ReadOnlyModelViewSet): |  | ||||||
|     """VersionHistory Viewset""" |  | ||||||
|  |  | ||||||
|     queryset = VersionHistory.objects.all() |  | ||||||
|     serializer_class = VersionHistorySerializer |  | ||||||
|     permission_classes = [IsAdminUser] |  | ||||||
|     filterset_fields = [ |  | ||||||
|         "version", |  | ||||||
|         "build", |  | ||||||
|     ] |  | ||||||
|     search_fields = ["version", "build"] |  | ||||||
|     ordering = ["-timestamp"] |  | ||||||
|     pagination_class = None |  | ||||||
| @ -1,16 +1,12 @@ | |||||||
| """authentik administration overview""" | """authentik administration overview""" | ||||||
|  |  | ||||||
| from socket import gethostname |  | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from drf_spectacular.utils import extend_schema, inline_serializer | from drf_spectacular.utils import extend_schema, inline_serializer | ||||||
| from packaging.version import parse | from rest_framework.fields import IntegerField | ||||||
| from rest_framework.fields import BooleanField, CharField |  | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| from rest_framework.views import APIView | from rest_framework.views import APIView | ||||||
|  |  | ||||||
| from authentik import get_full_version |  | ||||||
| from authentik.rbac.permissions import HasPermission | from authentik.rbac.permissions import HasPermission | ||||||
| from authentik.root.celery import CELERY_APP | from authentik.root.celery import CELERY_APP | ||||||
|  |  | ||||||
| @ -20,38 +16,11 @@ class WorkerView(APIView): | |||||||
|  |  | ||||||
|     permission_classes = [HasPermission("authentik_rbac.view_system_info")] |     permission_classes = [HasPermission("authentik_rbac.view_system_info")] | ||||||
|  |  | ||||||
|     @extend_schema( |     @extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()})) | ||||||
|         responses=inline_serializer( |  | ||||||
|             "Worker", |  | ||||||
|             fields={ |  | ||||||
|                 "worker_id": CharField(), |  | ||||||
|                 "version": CharField(), |  | ||||||
|                 "version_matching": BooleanField(), |  | ||||||
|             }, |  | ||||||
|             many=True, |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
|     def get(self, request: Request) -> Response: |     def get(self, request: Request) -> Response: | ||||||
|         """Get currently connected worker count.""" |         """Get currently connected worker count.""" | ||||||
|         raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) |         count = len(CELERY_APP.control.ping(timeout=0.5)) | ||||||
|         our_version = parse(get_full_version()) |  | ||||||
|         response = [] |  | ||||||
|         for worker in raw: |  | ||||||
|             key = list(worker.keys())[0] |  | ||||||
|             version = worker[key].get("version") |  | ||||||
|             version_matching = False |  | ||||||
|             if version: |  | ||||||
|                 version_matching = parse(version) == our_version |  | ||||||
|             response.append( |  | ||||||
|                 {"worker_id": key, "version": version, "version_matching": version_matching} |  | ||||||
|             ) |  | ||||||
|         # In debug we run with `task_always_eager`, so tasks are ran on the main process |         # In debug we run with `task_always_eager`, so tasks are ran on the main process | ||||||
|         if settings.DEBUG:  # pragma: no cover |         if settings.DEBUG:  # pragma: no cover | ||||||
|             response.append( |             count += 1 | ||||||
|                 { |         return Response({"count": count}) | ||||||
|                     "worker_id": f"authentik-debug@{gethostname()}", |  | ||||||
|                     "version": get_full_version(), |  | ||||||
|                     "version_matching": True, |  | ||||||
|                 } |  | ||||||
|             ) |  | ||||||
|         return Response(response) |  | ||||||
|  | |||||||
| @ -1,10 +1,11 @@ | |||||||
| """authentik admin app config""" | """authentik admin app config""" | ||||||
|  |  | ||||||
| from prometheus_client import Info | from prometheus_client import Gauge, Info | ||||||
|  |  | ||||||
| from authentik.blueprints.apps import ManagedAppConfig | from authentik.blueprints.apps import ManagedAppConfig | ||||||
|  |  | ||||||
| PROM_INFO = Info("authentik_version", "Currently running authentik version") | PROM_INFO = Info("authentik_version", "Currently running authentik version") | ||||||
|  | GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers") | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikAdminConfig(ManagedAppConfig): | class AuthentikAdminConfig(ManagedAppConfig): | ||||||
|  | |||||||
| @ -1,22 +0,0 @@ | |||||||
| """authentik admin models""" |  | ||||||
|  |  | ||||||
| from django.db import models |  | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class VersionHistory(models.Model): |  | ||||||
|     id = models.BigAutoField(primary_key=True) |  | ||||||
|     timestamp = models.DateTimeField() |  | ||||||
|     version = models.TextField() |  | ||||||
|     build = models.TextField() |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         managed = False |  | ||||||
|         db_table = "authentik_version_history" |  | ||||||
|         ordering = ("-timestamp",) |  | ||||||
|         verbose_name = _("Version history") |  | ||||||
|         verbose_name_plural = _("Version history") |  | ||||||
|         default_permissions = [] |  | ||||||
|  |  | ||||||
|     def __str__(self): |  | ||||||
|         return f"{self.version}.{self.build} ({self.timestamp})" |  | ||||||
| @ -1,35 +1,14 @@ | |||||||
| """admin signals""" | """admin signals""" | ||||||
|  |  | ||||||
| from django.dispatch import receiver | from django.dispatch import receiver | ||||||
| from packaging.version import parse |  | ||||||
| from prometheus_client import Gauge |  | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik.admin.apps import GAUGE_WORKERS | ||||||
| from authentik.root.celery import CELERY_APP | from authentik.root.celery import CELERY_APP | ||||||
| from authentik.root.monitoring import monitoring_set | from authentik.root.monitoring import monitoring_set | ||||||
|  |  | ||||||
| GAUGE_WORKERS = Gauge( |  | ||||||
|     "authentik_admin_workers", |  | ||||||
|     "Currently connected workers, their versions and if they are the same version as authentik", |  | ||||||
|     ["version", "version_matched"], |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| _version = parse(get_full_version()) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @receiver(monitoring_set) | @receiver(monitoring_set) | ||||||
| def monitoring_set_workers(sender, **kwargs): | def monitoring_set_workers(sender, **kwargs): | ||||||
|     """Set worker gauge""" |     """Set worker gauge""" | ||||||
|     raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) |     count = len(CELERY_APP.control.ping(timeout=0.5)) | ||||||
|     worker_version_count = {} |     GAUGE_WORKERS.set(count) | ||||||
|     for worker in raw: |  | ||||||
|         key = list(worker.keys())[0] |  | ||||||
|         version = worker[key].get("version") |  | ||||||
|         version_matching = False |  | ||||||
|         if version: |  | ||||||
|             version_matching = parse(version) == _version |  | ||||||
|         worker_version_count.setdefault(version, {"count": 0, "matching": version_matching}) |  | ||||||
|         worker_version_count[version]["count"] += 1 |  | ||||||
|     for version, stats in worker_version_count.items(): |  | ||||||
|         GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"]) |  | ||||||
|  | |||||||
| @ -1,8 +1,10 @@ | |||||||
| """authentik admin tasks""" | """authentik admin tasks""" | ||||||
|  |  | ||||||
|  | import re | ||||||
|  |  | ||||||
| from django.core.cache import cache | from django.core.cache import cache | ||||||
|  | from django.core.validators import URLValidator | ||||||
| from django.db import DatabaseError, InternalError, ProgrammingError | from django.db import DatabaseError, InternalError, ProgrammingError | ||||||
| from django.utils.translation import gettext_lazy as _ |  | ||||||
| from packaging.version import parse | from packaging.version import parse | ||||||
| from requests import RequestException | from requests import RequestException | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| @ -16,9 +18,10 @@ from authentik.lib.utils.http import get_http_session | |||||||
| from authentik.root.celery import CELERY_APP | from authentik.root.celery import CELERY_APP | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| VERSION_NULL = "0.0.0" |  | ||||||
| VERSION_CACHE_KEY = "authentik_latest_version" | VERSION_CACHE_KEY = "authentik_latest_version" | ||||||
| VERSION_CACHE_TIMEOUT = 8 * 60 * 60  # 8 hours | VERSION_CACHE_TIMEOUT = 8 * 60 * 60  # 8 hours | ||||||
|  | # Chop of the first ^ because we want to search the entire string | ||||||
|  | URL_FINDER = URLValidator.regex.pattern[1:] | ||||||
| LOCAL_VERSION = parse(__version__) | LOCAL_VERSION = parse(__version__) | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -52,7 +55,7 @@ def clear_update_notifications(): | |||||||
| def update_latest_version(self: SystemTask): | def update_latest_version(self: SystemTask): | ||||||
|     """Update latest version info""" |     """Update latest version info""" | ||||||
|     if CONFIG.get_bool("disable_update_check"): |     if CONFIG.get_bool("disable_update_check"): | ||||||
|         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) |         cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT) | ||||||
|         self.set_status(TaskStatus.WARNING, "Version check disabled.") |         self.set_status(TaskStatus.WARNING, "Version check disabled.") | ||||||
|         return |         return | ||||||
|     try: |     try: | ||||||
| @ -74,18 +77,12 @@ def update_latest_version(self: SystemTask): | |||||||
|                 context__new_version=upstream_version, |                 context__new_version=upstream_version, | ||||||
|             ).exists(): |             ).exists(): | ||||||
|                 return |                 return | ||||||
|             Event.new( |             event_dict = {"new_version": upstream_version} | ||||||
|                 EventAction.UPDATE_AVAILABLE, |             if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")): | ||||||
|                 message=_( |                 event_dict["message"] = f"Changelog: {match.group()}" | ||||||
|                     "New version {version} available!".format( |             Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save() | ||||||
|                         version=upstream_version, |  | ||||||
|                     ) |  | ||||||
|                 ), |  | ||||||
|                 new_version=upstream_version, |  | ||||||
|                 changelog=data.get("stable", {}).get("changelog_url"), |  | ||||||
|             ).save() |  | ||||||
|     except (RequestException, IndexError) as exc: |     except (RequestException, IndexError) as exc: | ||||||
|         cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) |         cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT) | ||||||
|         self.set_error(exc) |         self.set_error(exc) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -34,7 +34,7 @@ class TestAdminAPI(TestCase): | |||||||
|         response = self.client.get(reverse("authentik_api:admin_workers")) |         response = self.client.get(reverse("authentik_api:admin_workers")) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         body = loads(response.content) |         body = loads(response.content) | ||||||
|         self.assertEqual(len(body), 0) |         self.assertEqual(body["count"], 0) | ||||||
|  |  | ||||||
|     def test_metrics(self): |     def test_metrics(self): | ||||||
|         """Test metrics API""" |         """Test metrics API""" | ||||||
|  | |||||||
| @ -17,7 +17,6 @@ RESPONSE_VALID = { | |||||||
|     "stable": { |     "stable": { | ||||||
|         "version": "99999999.9999999", |         "version": "99999999.9999999", | ||||||
|         "changelog": "See https://goauthentik.io/test", |         "changelog": "See https://goauthentik.io/test", | ||||||
|         "changelog_url": "https://goauthentik.io/test", |  | ||||||
|         "reason": "bugfix", |         "reason": "bugfix", | ||||||
|     }, |     }, | ||||||
| } | } | ||||||
| @ -36,7 +35,7 @@ class TestAdminTasks(TestCase): | |||||||
|                 Event.objects.filter( |                 Event.objects.filter( | ||||||
|                     action=EventAction.UPDATE_AVAILABLE, |                     action=EventAction.UPDATE_AVAILABLE, | ||||||
|                     context__new_version="99999999.9999999", |                     context__new_version="99999999.9999999", | ||||||
|                     context__message="New version 99999999.9999999 available!", |                     context__message="Changelog: https://goauthentik.io/test", | ||||||
|                 ).exists() |                 ).exists() | ||||||
|             ) |             ) | ||||||
|             # test that a consecutive check doesn't create a duplicate event |             # test that a consecutive check doesn't create a duplicate event | ||||||
| @ -46,7 +45,7 @@ class TestAdminTasks(TestCase): | |||||||
|                     Event.objects.filter( |                     Event.objects.filter( | ||||||
|                         action=EventAction.UPDATE_AVAILABLE, |                         action=EventAction.UPDATE_AVAILABLE, | ||||||
|                         context__new_version="99999999.9999999", |                         context__new_version="99999999.9999999", | ||||||
|                         context__message="New version 99999999.9999999 available!", |                         context__message="Changelog: https://goauthentik.io/test", | ||||||
|                     ) |                     ) | ||||||
|                 ), |                 ), | ||||||
|                 1, |                 1, | ||||||
|  | |||||||
| @ -6,7 +6,6 @@ from authentik.admin.api.meta import AppsViewSet, ModelViewSet | |||||||
| from authentik.admin.api.metrics import AdministrationMetricsViewSet | from authentik.admin.api.metrics import AdministrationMetricsViewSet | ||||||
| from authentik.admin.api.system import SystemView | from authentik.admin.api.system import SystemView | ||||||
| from authentik.admin.api.version import VersionView | from authentik.admin.api.version import VersionView | ||||||
| from authentik.admin.api.version_history import VersionHistoryViewSet |  | ||||||
| from authentik.admin.api.workers import WorkerView | from authentik.admin.api.workers import WorkerView | ||||||
|  |  | ||||||
| api_urlpatterns = [ | api_urlpatterns = [ | ||||||
| @ -18,7 +17,6 @@ api_urlpatterns = [ | |||||||
|         name="admin_metrics", |         name="admin_metrics", | ||||||
|     ), |     ), | ||||||
|     path("admin/version/", VersionView.as_view(), name="admin_version"), |     path("admin/version/", VersionView.as_view(), name="admin_version"), | ||||||
|     ("admin/version/history", VersionHistoryViewSet, "version_history"), |  | ||||||
|     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), |     path("admin/workers/", WorkerView.as_view(), name="admin_workers"), | ||||||
|     path("admin/system/", SystemView.as_view(), name="admin_system"), |     path("admin/system/", SystemView.as_view(), name="admin_system"), | ||||||
| ] | ] | ||||||
|  | |||||||
| @ -10,3 +10,26 @@ class AuthentikAPIConfig(AppConfig): | |||||||
|     label = "authentik_api" |     label = "authentik_api" | ||||||
|     mountpoint = "api/" |     mountpoint = "api/" | ||||||
|     verbose_name = "authentik API" |     verbose_name = "authentik API" | ||||||
|  |  | ||||||
|  |     def ready(self) -> None: | ||||||
|  |         from drf_spectacular.extensions import OpenApiAuthenticationExtension | ||||||
|  |  | ||||||
|  |         from authentik.api.authentication import TokenAuthentication | ||||||
|  |  | ||||||
|  |         # Class is defined here as it needs to be created early enough that drf-spectacular will | ||||||
|  |         # find it, but also won't cause any import issues | ||||||
|  |         # pylint: disable=unused-variable | ||||||
|  |         class TokenSchema(OpenApiAuthenticationExtension): | ||||||
|  |             """Auth schema""" | ||||||
|  |  | ||||||
|  |             target_class = TokenAuthentication | ||||||
|  |             name = "authentik" | ||||||
|  |  | ||||||
|  |             def get_security_definition(self, auto_schema): | ||||||
|  |                 """Auth schema""" | ||||||
|  |                 return { | ||||||
|  |                     "type": "apiKey", | ||||||
|  |                     "in": "header", | ||||||
|  |                     "name": "Authorization", | ||||||
|  |                     "scheme": "bearer", | ||||||
|  |                 } | ||||||
|  | |||||||
| @ -1,10 +1,9 @@ | |||||||
| """API Authentication""" | """API Authentication""" | ||||||
|  |  | ||||||
| from hmac import compare_digest | from hmac import compare_digest | ||||||
| from typing import Any | from typing import Any, Optional | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from drf_spectacular.extensions import OpenApiAuthenticationExtension |  | ||||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||||
| from rest_framework.exceptions import AuthenticationFailed | from rest_framework.exceptions import AuthenticationFailed | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| @ -18,7 +17,7 @@ from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API | |||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
|  |  | ||||||
|  |  | ||||||
| def validate_auth(header: bytes) -> str | None: | def validate_auth(header: bytes) -> Optional[str]: | ||||||
|     """Validate that the header is in a correct format, |     """Validate that the header is in a correct format, | ||||||
|     returns type and credentials""" |     returns type and credentials""" | ||||||
|     auth_credentials = header.decode().strip() |     auth_credentials = header.decode().strip() | ||||||
| @ -33,7 +32,7 @@ def validate_auth(header: bytes) -> str | None: | |||||||
|     return auth_credentials |     return auth_credentials | ||||||
|  |  | ||||||
|  |  | ||||||
| def bearer_auth(raw_header: bytes) -> User | None: | def bearer_auth(raw_header: bytes) -> Optional[User]: | ||||||
|     """raw_header in the Format of `Bearer ....`""" |     """raw_header in the Format of `Bearer ....`""" | ||||||
|     user = auth_user_lookup(raw_header) |     user = auth_user_lookup(raw_header) | ||||||
|     if not user: |     if not user: | ||||||
| @ -43,7 +42,7 @@ def bearer_auth(raw_header: bytes) -> User | None: | |||||||
|     return user |     return user | ||||||
|  |  | ||||||
|  |  | ||||||
| def auth_user_lookup(raw_header: bytes) -> User | None: | def auth_user_lookup(raw_header: bytes) -> Optional[User]: | ||||||
|     """raw_header in the Format of `Bearer ....`""" |     """raw_header in the Format of `Bearer ....`""" | ||||||
|     from authentik.providers.oauth2.models import AccessToken |     from authentik.providers.oauth2.models import AccessToken | ||||||
|  |  | ||||||
| @ -76,7 +75,7 @@ def auth_user_lookup(raw_header: bytes) -> User | None: | |||||||
|     raise AuthenticationFailed("Token invalid/expired") |     raise AuthenticationFailed("Token invalid/expired") | ||||||
|  |  | ||||||
|  |  | ||||||
| def token_secret_key(value: str) -> User | None: | def token_secret_key(value: str) -> Optional[User]: | ||||||
|     """Check if the token is the secret key |     """Check if the token is the secret key | ||||||
|     and return the service account for the managed outpost""" |     and return the service account for the managed outpost""" | ||||||
|     from authentik.outposts.apps import MANAGED_OUTPOST |     from authentik.outposts.apps import MANAGED_OUTPOST | ||||||
| @ -103,14 +102,3 @@ class TokenAuthentication(BaseAuthentication): | |||||||
|             return None |             return None | ||||||
|  |  | ||||||
|         return (user, None)  # pragma: no cover |         return (user, None)  # pragma: no cover | ||||||
|  |  | ||||||
|  |  | ||||||
| class TokenSchema(OpenApiAuthenticationExtension): |  | ||||||
|     """Auth schema""" |  | ||||||
|  |  | ||||||
|     target_class = TokenAuthentication |  | ||||||
|     name = "authentik" |  | ||||||
|  |  | ||||||
|     def get_security_definition(self, auto_schema): |  | ||||||
|         """Auth schema""" |  | ||||||
|         return {"type": "http", "scheme": "bearer"} |  | ||||||
|  | |||||||
							
								
								
									
										67
									
								
								authentik/api/authorization.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										67
									
								
								authentik/api/authorization.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,67 @@ | |||||||
|  | """API Authorization""" | ||||||
|  |  | ||||||
|  | from django.conf import settings | ||||||
|  | from django.db.models import Model | ||||||
|  | from django.db.models.query import QuerySet | ||||||
|  | from django_filters.rest_framework import DjangoFilterBackend | ||||||
|  | from rest_framework.authentication import get_authorization_header | ||||||
|  | from rest_framework.filters import BaseFilterBackend | ||||||
|  | from rest_framework.permissions import BasePermission | ||||||
|  | from rest_framework.request import Request | ||||||
|  |  | ||||||
|  | from authentik.api.authentication import validate_auth | ||||||
|  | from authentik.rbac.filters import ObjectFilter | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class OwnerFilter(BaseFilterBackend): | ||||||
|  |     """Filter objects by their owner""" | ||||||
|  |  | ||||||
|  |     owner_key = "user" | ||||||
|  |  | ||||||
|  |     def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet: | ||||||
|  |         if request.user.is_superuser: | ||||||
|  |             return queryset | ||||||
|  |         return queryset.filter(**{self.owner_key: request.user}) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class SecretKeyFilter(DjangoFilterBackend): | ||||||
|  |     """Allow access to all objects when authenticated with secret key as token. | ||||||
|  |  | ||||||
|  |     Replaces both DjangoFilterBackend and ObjectFilter""" | ||||||
|  |  | ||||||
|  |     def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet: | ||||||
|  |         auth_header = get_authorization_header(request) | ||||||
|  |         token = validate_auth(auth_header) | ||||||
|  |         if token and token == settings.SECRET_KEY: | ||||||
|  |             return queryset | ||||||
|  |         queryset = ObjectFilter().filter_queryset(request, queryset, view) | ||||||
|  |         return super().filter_queryset(request, queryset, view) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class OwnerPermissions(BasePermission): | ||||||
|  |     """Authorize requests by an object's owner matching the requesting user""" | ||||||
|  |  | ||||||
|  |     owner_key = "user" | ||||||
|  |  | ||||||
|  |     def has_permission(self, request: Request, view) -> bool: | ||||||
|  |         """If the user is authenticated, we allow all requests here. For listing, the | ||||||
|  |         object-level permissions are done by the filter backend""" | ||||||
|  |         return request.user.is_authenticated | ||||||
|  |  | ||||||
|  |     def has_object_permission(self, request: Request, view, obj: Model) -> bool: | ||||||
|  |         """Check if the object's owner matches the currently logged in user""" | ||||||
|  |         if not hasattr(obj, self.owner_key): | ||||||
|  |             return False | ||||||
|  |         owner = getattr(obj, self.owner_key) | ||||||
|  |         if owner != request.user: | ||||||
|  |             return False | ||||||
|  |         return True | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class OwnerSuperuserPermissions(OwnerPermissions): | ||||||
|  |     """Similar to OwnerPermissions, except always allow access for superusers""" | ||||||
|  |  | ||||||
|  |     def has_object_permission(self, request: Request, view, obj: Model) -> bool: | ||||||
|  |         if request.user.is_superuser: | ||||||
|  |             return True | ||||||
|  |         return super().has_object_permission(request, view, obj) | ||||||
| @ -1,7 +1,7 @@ | |||||||
| """API Decorators""" | """API Decorators""" | ||||||
| 
 | 
 | ||||||
| from collections.abc import Callable |  | ||||||
| from functools import wraps | from functools import wraps | ||||||
|  | from typing import Callable, Optional | ||||||
| 
 | 
 | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
| @ -11,26 +11,21 @@ from structlog.stdlib import get_logger | |||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def permission_required(obj_perm: str | None = None, global_perms: list[str] | None = None): | def permission_required(obj_perm: Optional[str] = None, global_perms: Optional[list[str]] = None): | ||||||
|     """Check permissions for a single custom action""" |     """Check permissions for a single custom action""" | ||||||
| 
 | 
 | ||||||
|     def _check_obj_perm(self: ModelViewSet, request: Request): |     def wrapper_outter(func: Callable): | ||||||
|         # Check obj_perm both globally and on the specific object |  | ||||||
|         # Having the global permission has higher priority |  | ||||||
|         if request.user.has_perm(obj_perm): |  | ||||||
|             return |  | ||||||
|         obj = self.get_object() |  | ||||||
|         if not request.user.has_perm(obj_perm, obj): |  | ||||||
|             LOGGER.debug("denying access for object", user=request.user, perm=obj_perm, obj=obj) |  | ||||||
|             self.permission_denied(request) |  | ||||||
| 
 |  | ||||||
|     def wrapper_outer(func: Callable): |  | ||||||
|         """Check permissions for a single custom action""" |         """Check permissions for a single custom action""" | ||||||
| 
 | 
 | ||||||
|         @wraps(func) |         @wraps(func) | ||||||
|         def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response: |         def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response: | ||||||
|             if obj_perm: |             if obj_perm: | ||||||
|                 _check_obj_perm(self, request) |                 obj = self.get_object() | ||||||
|  |                 if not request.user.has_perm(obj_perm, obj): | ||||||
|  |                     LOGGER.debug( | ||||||
|  |                         "denying access for object", user=request.user, perm=obj_perm, obj=obj | ||||||
|  |                     ) | ||||||
|  |                     return self.permission_denied(request) | ||||||
|             if global_perms: |             if global_perms: | ||||||
|                 for other_perm in global_perms: |                 for other_perm in global_perms: | ||||||
|                     if not request.user.has_perm(other_perm): |                     if not request.user.has_perm(other_perm): | ||||||
| @ -40,4 +35,4 @@ def permission_required(obj_perm: str | None = None, global_perms: list[str] | N | |||||||
| 
 | 
 | ||||||
|         return wrapper |         return wrapper | ||||||
| 
 | 
 | ||||||
|     return wrapper_outer |     return wrapper_outter | ||||||
| @ -12,7 +12,6 @@ from drf_spectacular.settings import spectacular_settings | |||||||
| from drf_spectacular.types import OpenApiTypes | from drf_spectacular.types import OpenApiTypes | ||||||
| from rest_framework.settings import api_settings | from rest_framework.settings import api_settings | ||||||
|  |  | ||||||
| from authentik.api.apps import AuthentikAPIConfig |  | ||||||
| from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA | from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -102,12 +101,3 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): | |||||||
|             comp = result["components"]["schemas"][component] |             comp = result["components"]["schemas"][component] | ||||||
|             comp["additionalProperties"] = {} |             comp["additionalProperties"] = {} | ||||||
|     return result |     return result | ||||||
|  |  | ||||||
|  |  | ||||||
| def preprocess_schema_exclude_non_api(endpoints, **kwargs): |  | ||||||
|     """Filter out all API Views which are not mounted under /api""" |  | ||||||
|     return [ |  | ||||||
|         (path, path_regex, method, callback) |  | ||||||
|         for path, path_regex, method, callback in endpoints |  | ||||||
|         if path.startswith("/" + AuthentikAPIConfig.mountpoint) |  | ||||||
|     ] |  | ||||||
|  | |||||||
| @ -1,13 +1,13 @@ | |||||||
| {% extends "base/skeleton.html" %} | {% extends "base/skeleton.html" %} | ||||||
|  |  | ||||||
| {% load authentik_core %} | {% load static %} | ||||||
|  |  | ||||||
| {% block title %} | {% block title %} | ||||||
| API Browser - {{ brand.branding_title }} | API Browser - {{ brand.branding_title }} | ||||||
| {% endblock %} | {% endblock %} | ||||||
|  |  | ||||||
| {% block head %} | {% block head %} | ||||||
| <script src="{% versioned_script 'dist/standalone/api-browser/index-%v.js' %}" type="module"></script> | <script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script> | ||||||
| <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> | <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> | ||||||
| <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> | <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> | ||||||
| {% endblock %} | {% endblock %} | ||||||
|  | |||||||
| @ -25,17 +25,17 @@ class TestAPIAuth(TestCase): | |||||||
|     def test_invalid_type(self): |     def test_invalid_type(self): | ||||||
|         """Test invalid type""" |         """Test invalid type""" | ||||||
|         with self.assertRaises(AuthenticationFailed): |         with self.assertRaises(AuthenticationFailed): | ||||||
|             bearer_auth(b"foo bar") |             bearer_auth("foo bar".encode()) | ||||||
|  |  | ||||||
|     def test_invalid_empty(self): |     def test_invalid_empty(self): | ||||||
|         """Test invalid type""" |         """Test invalid type""" | ||||||
|         self.assertIsNone(bearer_auth(b"Bearer ")) |         self.assertIsNone(bearer_auth("Bearer ".encode())) | ||||||
|         self.assertIsNone(bearer_auth(b"")) |         self.assertIsNone(bearer_auth("".encode())) | ||||||
|  |  | ||||||
|     def test_invalid_no_token(self): |     def test_invalid_no_token(self): | ||||||
|         """Test invalid with no token""" |         """Test invalid with no token""" | ||||||
|         with self.assertRaises(AuthenticationFailed): |         with self.assertRaises(AuthenticationFailed): | ||||||
|             auth = b64encode(b":abc").decode() |             auth = b64encode(":abc".encode()).decode() | ||||||
|             self.assertIsNone(bearer_auth(f"Basic :{auth}".encode())) |             self.assertIsNone(bearer_auth(f"Basic :{auth}".encode())) | ||||||
|  |  | ||||||
|     def test_bearer_valid(self): |     def test_bearer_valid(self): | ||||||
|  | |||||||
							
								
								
									
										35
									
								
								authentik/api/tests/test_decorators.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								authentik/api/tests/test_decorators.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,35 @@ | |||||||
|  | """test decorators api""" | ||||||
|  |  | ||||||
|  | from django.urls import reverse | ||||||
|  | from guardian.shortcuts import assign_perm | ||||||
|  | from rest_framework.test import APITestCase | ||||||
|  |  | ||||||
|  | from authentik.core.models import Application, User | ||||||
|  | from authentik.lib.generators import generate_id | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestAPIDecorators(APITestCase): | ||||||
|  |     """test decorators api""" | ||||||
|  |  | ||||||
|  |     def setUp(self) -> None: | ||||||
|  |         super().setUp() | ||||||
|  |         self.user = User.objects.create(username="test-user") | ||||||
|  |  | ||||||
|  |     def test_obj_perm_denied(self): | ||||||
|  |         """Test object perm denied""" | ||||||
|  |         self.client.force_login(self.user) | ||||||
|  |         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||||
|  |         response = self.client.get( | ||||||
|  |             reverse("authentik_api:application-metrics", kwargs={"slug": app.slug}) | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(response.status_code, 403) | ||||||
|  |  | ||||||
|  |     def test_other_perm_denied(self): | ||||||
|  |         """Test other perm denied""" | ||||||
|  |         self.client.force_login(self.user) | ||||||
|  |         app = Application.objects.create(name=generate_id(), slug=generate_id()) | ||||||
|  |         assign_perm("authentik_core.view_application", self.user, app) | ||||||
|  |         response = self.client.get( | ||||||
|  |             reverse("authentik_api:application-metrics", kwargs={"slug": app.slug}) | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(response.status_code, 403) | ||||||
| @ -1,6 +1,6 @@ | |||||||
| """authentik API Modelviewset tests""" | """authentik API Modelviewset tests""" | ||||||
|  |  | ||||||
| from collections.abc import Callable | from typing import Callable | ||||||
|  |  | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
| from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet | from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet | ||||||
| @ -26,6 +26,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable: | |||||||
|  |  | ||||||
|  |  | ||||||
| for _, viewset, _ in router.registry: | for _, viewset, _ in router.registry: | ||||||
|     if not issubclass(viewset, ModelViewSet | ReadOnlyModelViewSet): |     if not issubclass(viewset, (ModelViewSet, ReadOnlyModelViewSet)): | ||||||
|         continue |         continue | ||||||
|     setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset)) |     setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset)) | ||||||
|  | |||||||
| @ -68,11 +68,7 @@ class ConfigView(APIView): | |||||||
|         """Get all capabilities this server instance supports""" |         """Get all capabilities this server instance supports""" | ||||||
|         caps = [] |         caps = [] | ||||||
|         deb_test = settings.DEBUG or settings.TEST |         deb_test = settings.DEBUG or settings.TEST | ||||||
|         if ( |         if Path(settings.MEDIA_ROOT).is_mount() or deb_test: | ||||||
|             CONFIG.get("storage.media.backend", "file") == "s3" |  | ||||||
|             or Path(settings.STORAGES["default"]["OPTIONS"]["location"]).is_mount() |  | ||||||
|             or deb_test |  | ||||||
|         ): |  | ||||||
|             caps.append(Capabilities.CAN_SAVE_MEDIA) |             caps.append(Capabilities.CAN_SAVE_MEDIA) | ||||||
|         for processor in get_context_processors(): |         for processor in get_context_processors(): | ||||||
|             if cap := processor.capability(): |             if cap := processor.capability(): | ||||||
|  | |||||||
| @ -33,7 +33,7 @@ for _authentik_app in get_apps(): | |||||||
|             app_name=_authentik_app.name, |             app_name=_authentik_app.name, | ||||||
|         ) |         ) | ||||||
|         continue |         continue | ||||||
|     urls: list = api_urls.api_urlpatterns |     urls: list = getattr(api_urls, "api_urlpatterns") | ||||||
|     for url in urls: |     for url in urls: | ||||||
|         if isinstance(url, URLPattern): |         if isinstance(url, URLPattern): | ||||||
|             _other_urls.append(url) |             _other_urls.append(url) | ||||||
|  | |||||||
| @ -10,13 +10,13 @@ from rest_framework.response import Response | |||||||
| from rest_framework.serializers import ListSerializer, ModelSerializer | from rest_framework.serializers import ListSerializer, ModelSerializer | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
|  | from authentik.api.decorators import permission_required | ||||||
| from authentik.blueprints.models import BlueprintInstance | from authentik.blueprints.models import BlueprintInstance | ||||||
| from authentik.blueprints.v1.importer import Importer | from authentik.blueprints.v1.importer import Importer | ||||||
| from authentik.blueprints.v1.oci import OCI_PREFIX | from authentik.blueprints.v1.oci import OCI_PREFIX | ||||||
| from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict | from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import JSONDictField, PassiveSerializer | from authentik.core.api.utils import JSONDictField, PassiveSerializer | ||||||
| from authentik.rbac.decorators import permission_required |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ManagedSerializer: | class ManagedSerializer: | ||||||
| @ -51,12 +51,8 @@ class BlueprintInstanceSerializer(ModelSerializer): | |||||||
|         context = self.instance.context if self.instance else {} |         context = self.instance.context if self.instance else {} | ||||||
|         valid, logs = Importer.from_string(content, context).validate() |         valid, logs = Importer.from_string(content, context).validate() | ||||||
|         if not valid: |         if not valid: | ||||||
|             raise ValidationError( |             text_logs = "\n".join([x["event"] for x in logs]) | ||||||
|                 [ |             raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs})) | ||||||
|                     _("Failed to validate blueprint"), |  | ||||||
|                     *[f"- {x.event}" for x in logs], |  | ||||||
|                 ] |  | ||||||
|             ) |  | ||||||
|         return content |         return content | ||||||
|  |  | ||||||
|     def validate(self, attrs: dict) -> dict: |     def validate(self, attrs: dict) -> dict: | ||||||
|  | |||||||
| @ -1,6 +1,5 @@ | |||||||
| """authentik Blueprints app""" | """authentik Blueprints app""" | ||||||
|  |  | ||||||
| from collections.abc import Callable |  | ||||||
| from importlib import import_module | from importlib import import_module | ||||||
| from inspect import ismethod | from inspect import ismethod | ||||||
|  |  | ||||||
| @ -8,16 +7,14 @@ from django.apps import AppConfig | |||||||
| from django.db import DatabaseError, InternalError, ProgrammingError | from django.db import DatabaseError, InternalError, ProgrammingError | ||||||
| from structlog.stdlib import BoundLogger, get_logger | from structlog.stdlib import BoundLogger, get_logger | ||||||
|  |  | ||||||
| from authentik.root.signals import startup |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ManagedAppConfig(AppConfig): | class ManagedAppConfig(AppConfig): | ||||||
|     """Basic reconciliation logic for apps""" |     """Basic reconciliation logic for apps""" | ||||||
|  |  | ||||||
|     logger: BoundLogger |     logger: BoundLogger | ||||||
|  |  | ||||||
|     RECONCILE_GLOBAL_CATEGORY: str = "global" |     RECONCILE_GLOBAL_PREFIX: str = "reconcile_global_" | ||||||
|     RECONCILE_TENANT_CATEGORY: str = "tenant" |     RECONCILE_TENANT_PREFIX: str = "reconcile_tenant_" | ||||||
|  |  | ||||||
|     def __init__(self, app_name: str, *args, **kwargs) -> None: |     def __init__(self, app_name: str, *args, **kwargs) -> None: | ||||||
|         super().__init__(app_name, *args, **kwargs) |         super().__init__(app_name, *args, **kwargs) | ||||||
| @ -25,13 +22,10 @@ class ManagedAppConfig(AppConfig): | |||||||
|  |  | ||||||
|     def ready(self) -> None: |     def ready(self) -> None: | ||||||
|         self.import_related() |         self.import_related() | ||||||
|         startup.connect(self._on_startup_callback, dispatch_uid=self.label) |         self.reconcile_global() | ||||||
|  |         self.reconcile_tenant() | ||||||
|         return super().ready() |         return super().ready() | ||||||
|  |  | ||||||
|     def _on_startup_callback(self, sender, **_): |  | ||||||
|         self._reconcile_global() |  | ||||||
|         self._reconcile_tenant() |  | ||||||
|  |  | ||||||
|     def import_related(self): |     def import_related(self): | ||||||
|         """Automatically import related modules which rely on just being imported |         """Automatically import related modules which rely on just being imported | ||||||
|         to register themselves (mainly django signals and celery tasks)""" |         to register themselves (mainly django signals and celery tasks)""" | ||||||
| @ -57,8 +51,7 @@ class ManagedAppConfig(AppConfig): | |||||||
|             meth = getattr(self, meth_name) |             meth = getattr(self, meth_name) | ||||||
|             if not ismethod(meth): |             if not ismethod(meth): | ||||||
|                 continue |                 continue | ||||||
|             category = getattr(meth, "_authentik_managed_reconcile", None) |             if not meth_name.startswith(prefix): | ||||||
|             if category != prefix: |  | ||||||
|                 continue |                 continue | ||||||
|             name = meth_name.replace(prefix, "") |             name = meth_name.replace(prefix, "") | ||||||
|             try: |             try: | ||||||
| @ -68,19 +61,7 @@ class ManagedAppConfig(AppConfig): | |||||||
|             except (DatabaseError, ProgrammingError, InternalError) as exc: |             except (DatabaseError, ProgrammingError, InternalError) as exc: | ||||||
|                 self.logger.warning("Failed to run reconcile", name=name, exc=exc) |                 self.logger.warning("Failed to run reconcile", name=name, exc=exc) | ||||||
|  |  | ||||||
|     @staticmethod |     def reconcile_tenant(self) -> None: | ||||||
|     def reconcile_tenant(func: Callable): |  | ||||||
|         """Mark a function to be called on startup (for each tenant)""" |  | ||||||
|         func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_TENANT_CATEGORY |  | ||||||
|         return func |  | ||||||
|  |  | ||||||
|     @staticmethod |  | ||||||
|     def reconcile_global(func: Callable): |  | ||||||
|         """Mark a function to be called on startup (globally)""" |  | ||||||
|         func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY |  | ||||||
|         return func |  | ||||||
|  |  | ||||||
|     def _reconcile_tenant(self) -> None: |  | ||||||
|         """reconcile ourselves for tenanted methods""" |         """reconcile ourselves for tenanted methods""" | ||||||
|         from authentik.tenants.models import Tenant |         from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| @ -91,9 +72,9 @@ class ManagedAppConfig(AppConfig): | |||||||
|             return |             return | ||||||
|         for tenant in tenants: |         for tenant in tenants: | ||||||
|             with tenant: |             with tenant: | ||||||
|                 self._reconcile(self.RECONCILE_TENANT_CATEGORY) |                 self._reconcile(self.RECONCILE_TENANT_PREFIX) | ||||||
|  |  | ||||||
|     def _reconcile_global(self) -> None: |     def reconcile_global(self) -> None: | ||||||
|         """ |         """ | ||||||
|         reconcile ourselves for global methods. |         reconcile ourselves for global methods. | ||||||
|         Used for signals, tasks, etc. Database queries should not be made in here. |         Used for signals, tasks, etc. Database queries should not be made in here. | ||||||
| @ -101,7 +82,7 @@ class ManagedAppConfig(AppConfig): | |||||||
|         from django_tenants.utils import get_public_schema_name, schema_context |         from django_tenants.utils import get_public_schema_name, schema_context | ||||||
|  |  | ||||||
|         with schema_context(get_public_schema_name()): |         with schema_context(get_public_schema_name()): | ||||||
|             self._reconcile(self.RECONCILE_GLOBAL_CATEGORY) |             self._reconcile(self.RECONCILE_GLOBAL_PREFIX) | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthentikBlueprintsConfig(ManagedAppConfig): | class AuthentikBlueprintsConfig(ManagedAppConfig): | ||||||
| @ -112,13 +93,11 @@ class AuthentikBlueprintsConfig(ManagedAppConfig): | |||||||
|     verbose_name = "authentik Blueprints" |     verbose_name = "authentik Blueprints" | ||||||
|     default = True |     default = True | ||||||
|  |  | ||||||
|     @ManagedAppConfig.reconcile_global |     def reconcile_global_load_blueprints_v1_tasks(self): | ||||||
|     def load_blueprints_v1_tasks(self): |  | ||||||
|         """Load v1 tasks""" |         """Load v1 tasks""" | ||||||
|         self.import_module("authentik.blueprints.v1.tasks") |         self.import_module("authentik.blueprints.v1.tasks") | ||||||
|  |  | ||||||
|     @ManagedAppConfig.reconcile_tenant |     def reconcile_tenant_blueprints_discovery(self): | ||||||
|     def blueprints_discovery(self): |  | ||||||
|         """Run blueprint discovery""" |         """Run blueprint discovery""" | ||||||
|         from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints |         from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints | ||||||
|  |  | ||||||
|  | |||||||
| @ -23,11 +23,9 @@ class Command(BaseCommand): | |||||||
|                 for blueprint_path in options.get("blueprints", []): |                 for blueprint_path in options.get("blueprints", []): | ||||||
|                     content = BlueprintInstance(path=blueprint_path).retrieve() |                     content = BlueprintInstance(path=blueprint_path).retrieve() | ||||||
|                     importer = Importer.from_string(content) |                     importer = Importer.from_string(content) | ||||||
|                     valid, logs = importer.validate() |                     valid, _ = importer.validate() | ||||||
|                     if not valid: |                     if not valid: | ||||||
|                         self.stderr.write("Blueprint invalid") |                         self.stderr.write("blueprint invalid") | ||||||
|                         for log in logs: |  | ||||||
|                             self.stderr.write(f"\t{log.logger}: {log.event}: {log.attributes}") |  | ||||||
|                         sys_exit(1) |                         sys_exit(1) | ||||||
|                     importer.apply() |                     importer.apply() | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,68 +0,0 @@ | |||||||
| """Test and debug Blueprints""" |  | ||||||
|  |  | ||||||
| import atexit |  | ||||||
| import readline |  | ||||||
| from pathlib import Path |  | ||||||
| from pprint import pformat |  | ||||||
| from sys import exit as sysexit |  | ||||||
| from textwrap import indent |  | ||||||
|  |  | ||||||
| from django.core.management.base import BaseCommand, no_translations |  | ||||||
| from structlog.stdlib import get_logger |  | ||||||
| from yaml import load |  | ||||||
|  |  | ||||||
| from authentik.blueprints.v1.common import BlueprintLoader, EntryInvalidError |  | ||||||
| from authentik.core.management.commands.shell import get_banner_text |  | ||||||
| from authentik.lib.utils.errors import exception_to_string |  | ||||||
|  |  | ||||||
| LOGGER = get_logger() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Command(BaseCommand): |  | ||||||
|     """Test and debug Blueprints""" |  | ||||||
|  |  | ||||||
|     lines = [] |  | ||||||
|  |  | ||||||
|     def __init__(self, *args, **kwargs) -> None: |  | ||||||
|         super().__init__(*args, **kwargs) |  | ||||||
|         histfolder = Path("~").expanduser() / Path(".local/share/authentik") |  | ||||||
|         histfolder.mkdir(parents=True, exist_ok=True) |  | ||||||
|         histfile = histfolder / Path("blueprint_shell_history") |  | ||||||
|         readline.parse_and_bind("tab: complete") |  | ||||||
|         readline.parse_and_bind("set editing-mode vi") |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             readline.read_history_file(str(histfile)) |  | ||||||
|         except FileNotFoundError: |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         atexit.register(readline.write_history_file, str(histfile)) |  | ||||||
|  |  | ||||||
|     @no_translations |  | ||||||
|     def handle(self, *args, **options): |  | ||||||
|         """Interactively debug blueprint files""" |  | ||||||
|         self.stdout.write(get_banner_text("Blueprint shell")) |  | ||||||
|         self.stdout.write("Type '.eval' to evaluate previously entered statement(s).") |  | ||||||
|  |  | ||||||
|         def do_eval(): |  | ||||||
|             yaml_input = "\n".join([line for line in self.lines if line]) |  | ||||||
|             data = load(yaml_input, BlueprintLoader) |  | ||||||
|             self.stdout.write(pformat(data)) |  | ||||||
|             self.lines = [] |  | ||||||
|  |  | ||||||
|         while True: |  | ||||||
|             try: |  | ||||||
|                 line = input("> ") |  | ||||||
|                 if line == ".eval": |  | ||||||
|                     do_eval() |  | ||||||
|                 else: |  | ||||||
|                     self.lines.append(line) |  | ||||||
|             except EntryInvalidError as exc: |  | ||||||
|                 self.stdout.write("Failed to evaluate expression:") |  | ||||||
|                 self.stdout.write(indent(exception_to_string(exc), prefix="  ")) |  | ||||||
|             except EOFError: |  | ||||||
|                 break |  | ||||||
|             except KeyboardInterrupt: |  | ||||||
|                 self.stdout.write() |  | ||||||
|                 sysexit(0) |  | ||||||
|         self.stdout.write() |  | ||||||
| @ -4,14 +4,12 @@ from json import dumps | |||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from django.core.management.base import BaseCommand, no_translations | from django.core.management.base import BaseCommand, no_translations | ||||||
| from django.db.models import Model, fields | from django.db.models import Model | ||||||
| from drf_jsonschema_serializer.convert import converter, field_to_converter | from drf_jsonschema_serializer.convert import field_to_converter | ||||||
| from rest_framework.fields import Field, JSONField, UUIDField | from rest_framework.fields import Field, JSONField, UUIDField | ||||||
| from rest_framework.relations import PrimaryKeyRelatedField |  | ||||||
| from rest_framework.serializers import Serializer | from rest_framework.serializers import Serializer | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik import __version__ |  | ||||||
| from authentik.blueprints.v1.common import BlueprintEntryDesiredState | from authentik.blueprints.v1.common import BlueprintEntryDesiredState | ||||||
| from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed | from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed | ||||||
| from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | ||||||
| @ -20,23 +18,6 @@ from authentik.lib.models import SerializerModel | |||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
|  |  | ||||||
|  |  | ||||||
| @converter |  | ||||||
| class PrimaryKeyRelatedFieldConverter: |  | ||||||
|     """Custom primary key field converter which is aware of non-integer based PKs |  | ||||||
|  |  | ||||||
|     This is not an exhaustive fix for other non-int PKs, however in authentik we either |  | ||||||
|     use UUIDs or ints""" |  | ||||||
|  |  | ||||||
|     field_class = PrimaryKeyRelatedField |  | ||||||
|  |  | ||||||
|     def convert(self, field: PrimaryKeyRelatedField): |  | ||||||
|         model: Model = field.queryset.model |  | ||||||
|         pk_field = model._meta.pk |  | ||||||
|         if isinstance(pk_field, fields.UUIDField): |  | ||||||
|             return {"type": "string", "format": "uuid"} |  | ||||||
|         return {"type": "integer"} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Command(BaseCommand): | class Command(BaseCommand): | ||||||
|     """Generate JSON Schema for blueprints""" |     """Generate JSON Schema for blueprints""" | ||||||
|  |  | ||||||
| @ -48,7 +29,7 @@ class Command(BaseCommand): | |||||||
|             "$schema": "http://json-schema.org/draft-07/schema", |             "$schema": "http://json-schema.org/draft-07/schema", | ||||||
|             "$id": "https://goauthentik.io/blueprints/schema.json", |             "$id": "https://goauthentik.io/blueprints/schema.json", | ||||||
|             "type": "object", |             "type": "object", | ||||||
|             "title": f"authentik {__version__} Blueprint schema", |             "title": "authentik Blueprint schema", | ||||||
|             "required": ["version", "entries"], |             "required": ["version", "entries"], | ||||||
|             "properties": { |             "properties": { | ||||||
|                 "version": { |                 "version": { | ||||||
| @ -113,20 +94,17 @@ class Command(BaseCommand): | |||||||
|             ) |             ) | ||||||
|             model_path = f"{model._meta.app_label}.{model._meta.model_name}" |             model_path = f"{model._meta.app_label}.{model._meta.model_name}" | ||||||
|             self.schema["properties"]["entries"]["items"]["oneOf"].append( |             self.schema["properties"]["entries"]["items"]["oneOf"].append( | ||||||
|                 self.template_entry(model_path, model, serializer) |                 self.template_entry(model_path, serializer) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|     def template_entry(self, model_path: str, model: type[Model], serializer: Serializer) -> dict: |     def template_entry(self, model_path: str, serializer: Serializer) -> dict: | ||||||
|         """Template entry for a single model""" |         """Template entry for a single model""" | ||||||
|         model_schema = self.to_jsonschema(serializer) |         model_schema = self.to_jsonschema(serializer) | ||||||
|         model_schema["required"] = [] |         model_schema["required"] = [] | ||||||
|         def_name = f"model_{model_path}" |         def_name = f"model_{model_path}" | ||||||
|         def_path = f"#/$defs/{def_name}" |         def_path = f"#/$defs/{def_name}" | ||||||
|         self.schema["$defs"][def_name] = model_schema |         self.schema["$defs"][def_name] = model_schema | ||||||
|         def_name_perm = f"model_{model_path}_permissions" |         return { | ||||||
|         def_path_perm = f"#/$defs/{def_name_perm}" |  | ||||||
|         self.schema["$defs"][def_name_perm] = self.model_permissions(model) |  | ||||||
|         template = { |  | ||||||
|             "type": "object", |             "type": "object", | ||||||
|             "required": ["model", "identifiers"], |             "required": ["model", "identifiers"], | ||||||
|             "properties": { |             "properties": { | ||||||
| @ -138,16 +116,10 @@ class Command(BaseCommand): | |||||||
|                     "default": "present", |                     "default": "present", | ||||||
|                 }, |                 }, | ||||||
|                 "conditions": {"type": "array", "items": {"type": "boolean"}}, |                 "conditions": {"type": "array", "items": {"type": "boolean"}}, | ||||||
|                 "permissions": {"$ref": def_path_perm}, |  | ||||||
|                 "attrs": {"$ref": def_path}, |                 "attrs": {"$ref": def_path}, | ||||||
|                 "identifiers": {"$ref": def_path}, |                 "identifiers": {"$ref": def_path}, | ||||||
|             }, |             }, | ||||||
|         } |         } | ||||||
|         # Meta models don't require identifiers, as there's no matching database model to find |  | ||||||
|         if issubclass(model, BaseMetaModel): |  | ||||||
|             del template["properties"]["identifiers"] |  | ||||||
|             template["required"].remove("identifiers") |  | ||||||
|         return template |  | ||||||
|  |  | ||||||
|     def field_to_jsonschema(self, field: Field) -> dict: |     def field_to_jsonschema(self, field: Field) -> dict: | ||||||
|         """Convert a single field to json schema""" |         """Convert a single field to json schema""" | ||||||
| @ -194,20 +166,3 @@ class Command(BaseCommand): | |||||||
|         if required: |         if required: | ||||||
|             result["required"] = required |             result["required"] = required | ||||||
|         return result |         return result | ||||||
|  |  | ||||||
|     def model_permissions(self, model: type[Model]) -> dict: |  | ||||||
|         perms = [x[0] for x in model._meta.permissions] |  | ||||||
|         for action in model._meta.default_permissions: |  | ||||||
|             perms.append(f"{action}_{model._meta.model_name}") |  | ||||||
|         return { |  | ||||||
|             "type": "array", |  | ||||||
|             "items": { |  | ||||||
|                 "type": "object", |  | ||||||
|                 "required": ["permission"], |  | ||||||
|                 "properties": { |  | ||||||
|                     "permission": {"type": "string", "enum": perms}, |  | ||||||
|                     "user": {"type": "integer"}, |  | ||||||
|                     "role": {"type": "string"}, |  | ||||||
|                 }, |  | ||||||
|             }, |  | ||||||
|         } |  | ||||||
|  | |||||||
| @ -29,7 +29,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path): | |||||||
|         if version != 1: |         if version != 1: | ||||||
|             return |             return | ||||||
|         blueprint_file.seek(0) |         blueprint_file.seek(0) | ||||||
|     instance = BlueprintInstance.objects.using(db_alias).filter(path=path).first() |     instance: BlueprintInstance = ( | ||||||
|  |         BlueprintInstance.objects.using(db_alias).filter(path=path).first() | ||||||
|  |     ) | ||||||
|     rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir"))) |     rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir"))) | ||||||
|     meta = None |     meta = None | ||||||
|     if metadata: |     if metadata: | ||||||
|  | |||||||
| @ -71,19 +71,6 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel): | |||||||
|     enabled = models.BooleanField(default=True) |     enabled = models.BooleanField(default=True) | ||||||
|     managed_models = ArrayField(models.TextField(), default=list) |     managed_models = ArrayField(models.TextField(), default=list) | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _("Blueprint Instance") |  | ||||||
|         verbose_name_plural = _("Blueprint Instances") |  | ||||||
|         unique_together = ( |  | ||||||
|             ( |  | ||||||
|                 "name", |  | ||||||
|                 "path", |  | ||||||
|             ), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |  | ||||||
|         return f"Blueprint Instance {self.name}" |  | ||||||
|  |  | ||||||
|     def retrieve_oci(self) -> str: |     def retrieve_oci(self) -> str: | ||||||
|         """Get blueprint from an OCI registry""" |         """Get blueprint from an OCI registry""" | ||||||
|         client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://")) |         client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://")) | ||||||
| @ -102,7 +89,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel): | |||||||
|                 raise BlueprintRetrievalFailed("Invalid blueprint path") |                 raise BlueprintRetrievalFailed("Invalid blueprint path") | ||||||
|             with full_path.open("r", encoding="utf-8") as _file: |             with full_path.open("r", encoding="utf-8") as _file: | ||||||
|                 return _file.read() |                 return _file.read() | ||||||
|         except OSError as exc: |         except (IOError, OSError) as exc: | ||||||
|             raise BlueprintRetrievalFailed(exc) from exc |             raise BlueprintRetrievalFailed(exc) from exc | ||||||
|  |  | ||||||
|     def retrieve(self) -> str: |     def retrieve(self) -> str: | ||||||
| @ -118,3 +105,16 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel): | |||||||
|         from authentik.blueprints.api import BlueprintInstanceSerializer |         from authentik.blueprints.api import BlueprintInstanceSerializer | ||||||
|  |  | ||||||
|         return BlueprintInstanceSerializer |         return BlueprintInstanceSerializer | ||||||
|  |  | ||||||
|  |     def __str__(self) -> str: | ||||||
|  |         return f"Blueprint Instance {self.name}" | ||||||
|  |  | ||||||
|  |     class Meta: | ||||||
|  |         verbose_name = _("Blueprint Instance") | ||||||
|  |         verbose_name_plural = _("Blueprint Instances") | ||||||
|  |         unique_together = ( | ||||||
|  |             ( | ||||||
|  |                 "name", | ||||||
|  |                 "path", | ||||||
|  |             ), | ||||||
|  |         ) | ||||||
|  | |||||||
| @ -1,7 +1,7 @@ | |||||||
| """Blueprint helpers""" | """Blueprint helpers""" | ||||||
|  |  | ||||||
| from collections.abc import Callable |  | ||||||
| from functools import wraps | from functools import wraps | ||||||
|  | from typing import Callable | ||||||
|  |  | ||||||
| from django.apps import apps | from django.apps import apps | ||||||
|  |  | ||||||
| @ -39,7 +39,7 @@ def reconcile_app(app_name: str): | |||||||
|         def wrapper(*args, **kwargs): |         def wrapper(*args, **kwargs): | ||||||
|             config = apps.get_app_config(app_name) |             config = apps.get_app_config(app_name) | ||||||
|             if isinstance(config, ManagedAppConfig): |             if isinstance(config, ManagedAppConfig): | ||||||
|                 config._on_startup_callback(None) |                 config.ready() | ||||||
|             return func(*args, **kwargs) |             return func(*args, **kwargs) | ||||||
|  |  | ||||||
|         return wrapper |         return wrapper | ||||||
|  | |||||||
| @ -1,24 +0,0 @@ | |||||||
| version: 1 |  | ||||||
| entries: |  | ||||||
|   - model: authentik_core.user |  | ||||||
|     id: user |  | ||||||
|     identifiers: |  | ||||||
|       username: "%(id)s" |  | ||||||
|     attrs: |  | ||||||
|       name: "%(id)s" |  | ||||||
|   - model: authentik_rbac.role |  | ||||||
|     id: role |  | ||||||
|     identifiers: |  | ||||||
|       name: "%(id)s" |  | ||||||
|   - model: authentik_flows.flow |  | ||||||
|     identifiers: |  | ||||||
|       slug: "%(id)s" |  | ||||||
|     attrs: |  | ||||||
|       designation: authentication |  | ||||||
|       name: foo |  | ||||||
|       title: foo |  | ||||||
|     permissions: |  | ||||||
|       - permission: view_flow |  | ||||||
|         user: !KeyOf user |  | ||||||
|       - permission: view_flow |  | ||||||
|         role: !KeyOf role |  | ||||||
| @ -1,8 +0,0 @@ | |||||||
| version: 1 |  | ||||||
| entries: |  | ||||||
|   - model: authentik_rbac.role |  | ||||||
|     identifiers: |  | ||||||
|       name: "%(id)s" |  | ||||||
|     attrs: |  | ||||||
|       permissions: |  | ||||||
|         - authentik_blueprints.view_blueprintinstance |  | ||||||
| @ -1,9 +0,0 @@ | |||||||
| version: 1 |  | ||||||
| entries: |  | ||||||
|   - model: authentik_core.user |  | ||||||
|     identifiers: |  | ||||||
|       username: "%(id)s" |  | ||||||
|     attrs: |  | ||||||
|       name: "%(id)s" |  | ||||||
|       permissions: |  | ||||||
|         - authentik_blueprints.view_blueprintinstance |  | ||||||
| @ -146,10 +146,6 @@ entries: | |||||||
|                   ] |                   ] | ||||||
|               ] |               ] | ||||||
|               nested_context: !Context context2 |               nested_context: !Context context2 | ||||||
|               at_index_sequence: !AtIndex [!Context sequence, 0] |  | ||||||
|               at_index_sequence_default: !AtIndex [!Context sequence, 100, "non existent"] |  | ||||||
|               at_index_mapping: !AtIndex [!Context mapping, "key2"] |  | ||||||
|               at_index_mapping_default: !AtIndex [!Context mapping, "invalid", "non existent"] |  | ||||||
|       identifiers: |       identifiers: | ||||||
|           name: test |           name: test | ||||||
|       conditions: |       conditions: | ||||||
|  | |||||||
| @ -1,7 +1,7 @@ | |||||||
| """test packaged blueprints""" | """test packaged blueprints""" | ||||||
|  |  | ||||||
| from collections.abc import Callable |  | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
|  | from typing import Callable | ||||||
|  |  | ||||||
| from django.test import TransactionTestCase | from django.test import TransactionTestCase | ||||||
|  |  | ||||||
| @ -27,8 +27,7 @@ def blueprint_tester(file_name: Path) -> Callable: | |||||||
|         base = Path("blueprints/") |         base = Path("blueprints/") | ||||||
|         rel_path = Path(file_name).relative_to(base) |         rel_path = Path(file_name).relative_to(base) | ||||||
|         importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve()) |         importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve()) | ||||||
|         validation, logs = importer.validate() |         self.assertTrue(importer.validate()[0]) | ||||||
|         self.assertTrue(validation, logs) |  | ||||||
|         self.assertTrue(importer.apply()) |         self.assertTrue(importer.apply()) | ||||||
|  |  | ||||||
|     return tester |     return tester | ||||||
|  | |||||||
| @ -1,6 +1,6 @@ | |||||||
| """authentik managed models tests""" | """authentik managed models tests""" | ||||||
|  |  | ||||||
| from collections.abc import Callable | from typing import Callable, Type | ||||||
|  |  | ||||||
| from django.apps import apps | from django.apps import apps | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
| @ -14,7 +14,7 @@ class TestModels(TestCase): | |||||||
|     """Test Models""" |     """Test Models""" | ||||||
|  |  | ||||||
|  |  | ||||||
| def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable: | def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable: | ||||||
|     """Test serializer""" |     """Test serializer""" | ||||||
|  |  | ||||||
|     def tester(self: TestModels): |     def tester(self: TestModels): | ||||||
|  | |||||||
| @ -215,10 +215,6 @@ class TestBlueprintsV1(TransactionTestCase): | |||||||
|                     }, |                     }, | ||||||
|                     "nested_context": "context-nested-value", |                     "nested_context": "context-nested-value", | ||||||
|                     "env_null": None, |                     "env_null": None, | ||||||
|                     "at_index_sequence": "foo", |  | ||||||
|                     "at_index_sequence_default": "non existent", |  | ||||||
|                     "at_index_mapping": 2, |  | ||||||
|                     "at_index_mapping_default": "non existent", |  | ||||||
|                 } |                 } | ||||||
|             ).exists() |             ).exists() | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -78,5 +78,5 @@ class TestBlueprintsV1API(APITestCase): | |||||||
|         self.assertEqual(res.status_code, 400) |         self.assertEqual(res.status_code, 400) | ||||||
|         self.assertJSONEqual( |         self.assertJSONEqual( | ||||||
|             res.content.decode(), |             res.content.decode(), | ||||||
|             {"content": ["Failed to validate blueprint", "- Invalid blueprint version"]}, |             {"content": ["Failed to validate blueprint: Invalid blueprint version"]}, | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -1,57 +0,0 @@ | |||||||
| """Test blueprints v1""" |  | ||||||
|  |  | ||||||
| from django.test import TransactionTestCase |  | ||||||
| from guardian.shortcuts import get_perms |  | ||||||
|  |  | ||||||
| from authentik.blueprints.v1.importer import Importer |  | ||||||
| from authentik.core.models import User |  | ||||||
| from authentik.flows.models import Flow |  | ||||||
| from authentik.lib.generators import generate_id |  | ||||||
| from authentik.lib.tests.utils import load_fixture |  | ||||||
| from authentik.rbac.models import Role |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestBlueprintsV1RBAC(TransactionTestCase): |  | ||||||
|     """Test Blueprints rbac attribute""" |  | ||||||
|  |  | ||||||
|     def test_user_permission(self): |  | ||||||
|         """Test permissions""" |  | ||||||
|         uid = generate_id() |  | ||||||
|         import_yaml = load_fixture("fixtures/rbac_user.yaml", id=uid) |  | ||||||
|  |  | ||||||
|         importer = Importer.from_string(import_yaml) |  | ||||||
|         self.assertTrue(importer.validate()[0]) |  | ||||||
|         self.assertTrue(importer.apply()) |  | ||||||
|         user = User.objects.filter(username=uid).first() |  | ||||||
|         self.assertIsNotNone(user) |  | ||||||
|         self.assertTrue(user.has_perms(["authentik_blueprints.view_blueprintinstance"])) |  | ||||||
|  |  | ||||||
|     def test_role_permission(self): |  | ||||||
|         """Test permissions""" |  | ||||||
|         uid = generate_id() |  | ||||||
|         import_yaml = load_fixture("fixtures/rbac_role.yaml", id=uid) |  | ||||||
|  |  | ||||||
|         importer = Importer.from_string(import_yaml) |  | ||||||
|         self.assertTrue(importer.validate()[0]) |  | ||||||
|         self.assertTrue(importer.apply()) |  | ||||||
|         role = Role.objects.filter(name=uid).first() |  | ||||||
|         self.assertIsNotNone(role) |  | ||||||
|         self.assertEqual( |  | ||||||
|             list(role.group.permissions.all().values_list("codename", flat=True)), |  | ||||||
|             ["view_blueprintinstance"], |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_object_permission(self): |  | ||||||
|         """Test permissions""" |  | ||||||
|         uid = generate_id() |  | ||||||
|         import_yaml = load_fixture("fixtures/rbac_object.yaml", id=uid) |  | ||||||
|  |  | ||||||
|         importer = Importer.from_string(import_yaml) |  | ||||||
|         self.assertTrue(importer.validate()[0]) |  | ||||||
|         self.assertTrue(importer.apply()) |  | ||||||
|         flow = Flow.objects.filter(slug=uid).first() |  | ||||||
|         user = User.objects.filter(username=uid).first() |  | ||||||
|         role = Role.objects.filter(name=uid).first() |  | ||||||
|         self.assertIsNotNone(flow) |  | ||||||
|         self.assertEqual(get_perms(user, flow), ["view_flow"]) |  | ||||||
|         self.assertEqual(get_perms(role.group, flow), ["view_flow"]) |  | ||||||
| @ -54,7 +54,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | |||||||
|             file.seek(0) |             file.seek(0) | ||||||
|             file_hash = sha512(file.read().encode()).hexdigest() |             file_hash = sha512(file.read().encode()).hexdigest() | ||||||
|             file.flush() |             file.flush() | ||||||
|             blueprints_discovery() |             blueprints_discovery()  # pylint: disable=no-value-for-parameter | ||||||
|             instance = BlueprintInstance.objects.filter(name=blueprint_id).first() |             instance = BlueprintInstance.objects.filter(name=blueprint_id).first() | ||||||
|             self.assertEqual(instance.last_applied_hash, file_hash) |             self.assertEqual(instance.last_applied_hash, file_hash) | ||||||
|             self.assertEqual( |             self.assertEqual( | ||||||
| @ -82,7 +82,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | |||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|             file.flush() |             file.flush() | ||||||
|             blueprints_discovery() |             blueprints_discovery()  # pylint: disable=no-value-for-parameter | ||||||
|             blueprint = BlueprintInstance.objects.filter(name="foo").first() |             blueprint = BlueprintInstance.objects.filter(name="foo").first() | ||||||
|             self.assertEqual( |             self.assertEqual( | ||||||
|                 blueprint.last_applied_hash, |                 blueprint.last_applied_hash, | ||||||
| @ -107,7 +107,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | |||||||
|                 ) |                 ) | ||||||
|             ) |             ) | ||||||
|             file.flush() |             file.flush() | ||||||
|             blueprints_discovery() |             blueprints_discovery()  # pylint: disable=no-value-for-parameter | ||||||
|             blueprint.refresh_from_db() |             blueprint.refresh_from_db() | ||||||
|             self.assertEqual( |             self.assertEqual( | ||||||
|                 blueprint.last_applied_hash, |                 blueprint.last_applied_hash, | ||||||
| @ -149,7 +149,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase): | |||||||
|                 instance.status, |                 instance.status, | ||||||
|                 BlueprintInstanceStatus.UNKNOWN, |                 BlueprintInstanceStatus.UNKNOWN, | ||||||
|             ) |             ) | ||||||
|             apply_blueprint(instance.pk) |             apply_blueprint(instance.pk)  # pylint: disable=no-value-for-parameter | ||||||
|             instance.refresh_from_db() |             instance.refresh_from_db() | ||||||
|             self.assertEqual(instance.last_applied_hash, "") |             self.assertEqual(instance.last_applied_hash, "") | ||||||
|             self.assertEqual( |             self.assertEqual( | ||||||
|  | |||||||
| @ -1,14 +1,13 @@ | |||||||
| """transfer common classes""" | """transfer common classes""" | ||||||
|  |  | ||||||
| from collections import OrderedDict | from collections import OrderedDict | ||||||
| from collections.abc import Generator, Iterable, Mapping |  | ||||||
| from copy import copy | from copy import copy | ||||||
| from dataclasses import asdict, dataclass, field, is_dataclass | from dataclasses import asdict, dataclass, field, is_dataclass | ||||||
| from enum import Enum | from enum import Enum | ||||||
| from functools import reduce | from functools import reduce | ||||||
| from operator import ixor | from operator import ixor | ||||||
| from os import getenv | from os import getenv | ||||||
| from typing import Any, Literal, Union | from typing import Any, Iterable, Literal, Mapping, Optional, Union | ||||||
| from uuid import UUID | from uuid import UUID | ||||||
|  |  | ||||||
| from deepmerge import always_merger | from deepmerge import always_merger | ||||||
| @ -24,10 +23,6 @@ from authentik.lib.sentry import SentryIgnoredException | |||||||
| from authentik.policies.models import PolicyBindingModel | from authentik.policies.models import PolicyBindingModel | ||||||
|  |  | ||||||
|  |  | ||||||
| class UNSET: |  | ||||||
|     """Used to test whether a key has not been set.""" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_attrs(obj: SerializerModel) -> dict[str, Any]: | def get_attrs(obj: SerializerModel) -> dict[str, Any]: | ||||||
|     """Get object's attributes via their serializer, and convert it to a normal dict""" |     """Get object's attributes via their serializer, and convert it to a normal dict""" | ||||||
|     serializer: Serializer = obj.serializer(obj) |     serializer: Serializer = obj.serializer(obj) | ||||||
| @ -50,7 +45,7 @@ def get_attrs(obj: SerializerModel) -> dict[str, Any]: | |||||||
| class BlueprintEntryState: | class BlueprintEntryState: | ||||||
|     """State of a single instance""" |     """State of a single instance""" | ||||||
|  |  | ||||||
|     instance: Model | None = None |     instance: Optional[Model] = None | ||||||
|  |  | ||||||
|  |  | ||||||
| class BlueprintEntryDesiredState(Enum): | class BlueprintEntryDesiredState(Enum): | ||||||
| @ -62,15 +57,6 @@ class BlueprintEntryDesiredState(Enum): | |||||||
|     MUST_CREATED = "must_created" |     MUST_CREATED = "must_created" | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass |  | ||||||
| class BlueprintEntryPermission: |  | ||||||
|     """Describe object-level permissions""" |  | ||||||
|  |  | ||||||
|     permission: Union[str, "YAMLTag"] |  | ||||||
|     user: Union[int, "YAMLTag", None] = field(default=None) |  | ||||||
|     role: Union[str, "YAMLTag", None] = field(default=None) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass | @dataclass | ||||||
| class BlueprintEntry: | class BlueprintEntry: | ||||||
|     """Single entry of a blueprint""" |     """Single entry of a blueprint""" | ||||||
| @ -81,15 +67,14 @@ class BlueprintEntry: | |||||||
|     ) |     ) | ||||||
|     conditions: list[Any] = field(default_factory=list) |     conditions: list[Any] = field(default_factory=list) | ||||||
|     identifiers: dict[str, Any] = field(default_factory=dict) |     identifiers: dict[str, Any] = field(default_factory=dict) | ||||||
|     attrs: dict[str, Any] | None = field(default_factory=dict) |     attrs: Optional[dict[str, Any]] = field(default_factory=dict) | ||||||
|     permissions: list[BlueprintEntryPermission] = field(default_factory=list) |  | ||||||
|  |  | ||||||
|     id: str | None = None |     id: Optional[str] = None | ||||||
|  |  | ||||||
|     _state: BlueprintEntryState = field(default_factory=BlueprintEntryState) |     _state: BlueprintEntryState = field(default_factory=BlueprintEntryState) | ||||||
|  |  | ||||||
|     def __post_init__(self, *args, **kwargs) -> None: |     def __post_init__(self, *args, **kwargs) -> None: | ||||||
|         self.__tag_contexts: list[YAMLTagContext] = [] |         self.__tag_contexts: list["YAMLTagContext"] = [] | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry": |     def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry": | ||||||
| @ -107,10 +92,10 @@ class BlueprintEntry: | |||||||
|             attrs=all_attrs, |             attrs=all_attrs, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def get_tag_context( |     def _get_tag_context( | ||||||
|         self, |         self, | ||||||
|         depth: int = 0, |         depth: int = 0, | ||||||
|         context_tag_type: type["YAMLTagContext"] | tuple["YAMLTagContext", ...] | None = None, |         context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None, | ||||||
|     ) -> "YAMLTagContext": |     ) -> "YAMLTagContext": | ||||||
|         """Get a YAMLTagContext object located at a certain depth in the tag tree""" |         """Get a YAMLTagContext object located at a certain depth in the tag tree""" | ||||||
|         if depth < 0: |         if depth < 0: | ||||||
| @ -123,8 +108,8 @@ class BlueprintEntry: | |||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             return contexts[-(depth + 1)] |             return contexts[-(depth + 1)] | ||||||
|         except IndexError as exc: |         except IndexError: | ||||||
|             raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}") from exc |             raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}") | ||||||
|  |  | ||||||
|     def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any: |     def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any: | ||||||
|         """Check if we have any special tags that need handling""" |         """Check if we have any special tags that need handling""" | ||||||
| @ -164,17 +149,6 @@ class BlueprintEntry: | |||||||
|         """Get the blueprint model, with yaml tags resolved if present""" |         """Get the blueprint model, with yaml tags resolved if present""" | ||||||
|         return str(self.tag_resolver(self.model, blueprint)) |         return str(self.tag_resolver(self.model, blueprint)) | ||||||
|  |  | ||||||
|     def get_permissions( |  | ||||||
|         self, blueprint: "Blueprint" |  | ||||||
|     ) -> Generator[BlueprintEntryPermission, None, None]: |  | ||||||
|         """Get permissions of this entry, with all yaml tags resolved""" |  | ||||||
|         for perm in self.permissions: |  | ||||||
|             yield BlueprintEntryPermission( |  | ||||||
|                 permission=self.tag_resolver(perm.permission, blueprint), |  | ||||||
|                 user=self.tag_resolver(perm.user, blueprint), |  | ||||||
|                 role=self.tag_resolver(perm.role, blueprint), |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     def check_all_conditions_match(self, blueprint: "Blueprint") -> bool: |     def check_all_conditions_match(self, blueprint: "Blueprint") -> bool: | ||||||
|         """Check all conditions of this entry match (evaluate to True)""" |         """Check all conditions of this entry match (evaluate to True)""" | ||||||
|         return all(self.tag_resolver(self.conditions, blueprint)) |         return all(self.tag_resolver(self.conditions, blueprint)) | ||||||
| @ -196,15 +170,12 @@ class Blueprint: | |||||||
|     entries: list[BlueprintEntry] = field(default_factory=list) |     entries: list[BlueprintEntry] = field(default_factory=list) | ||||||
|     context: dict = field(default_factory=dict) |     context: dict = field(default_factory=dict) | ||||||
|  |  | ||||||
|     metadata: BlueprintMetadata | None = field(default=None) |     metadata: Optional[BlueprintMetadata] = field(default=None) | ||||||
|  |  | ||||||
|  |  | ||||||
| class YAMLTag: | class YAMLTag: | ||||||
|     """Base class for all YAML Tags""" |     """Base class for all YAML Tags""" | ||||||
|  |  | ||||||
|     def __repr__(self) -> str: |  | ||||||
|         return str(self.resolve(BlueprintEntry(""), Blueprint())) |  | ||||||
|  |  | ||||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: |     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||||
|         """Implement yaml tag logic""" |         """Implement yaml tag logic""" | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
| @ -247,7 +218,7 @@ class Env(YAMLTag): | |||||||
|     """Lookup environment variable with optional default""" |     """Lookup environment variable with optional default""" | ||||||
|  |  | ||||||
|     key: str |     key: str | ||||||
|     default: Any | None |     default: Optional[Any] | ||||||
|  |  | ||||||
|     def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None: |     def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None: | ||||||
|         super().__init__() |         super().__init__() | ||||||
| @ -266,7 +237,7 @@ class Context(YAMLTag): | |||||||
|     """Lookup key from instance context""" |     """Lookup key from instance context""" | ||||||
|  |  | ||||||
|     key: str |     key: str | ||||||
|     default: Any | None |     default: Optional[Any] | ||||||
|  |  | ||||||
|     def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None: |     def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None: | ||||||
|         super().__init__() |         super().__init__() | ||||||
| @ -310,7 +281,7 @@ class Format(YAMLTag): | |||||||
|         try: |         try: | ||||||
|             return self.format_string % tuple(args) |             return self.format_string % tuple(args) | ||||||
|         except TypeError as exc: |         except TypeError as exc: | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |             raise EntryInvalidError.from_entry(exc, entry) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Find(YAMLTag): | class Find(YAMLTag): | ||||||
| @ -335,10 +306,7 @@ class Find(YAMLTag): | |||||||
|         else: |         else: | ||||||
|             model_name = self.model_name |             model_name = self.model_name | ||||||
|  |  | ||||||
|         try: |         model_class = apps.get_model(*model_name.split(".")) | ||||||
|             model_class = apps.get_model(*model_name.split(".")) |  | ||||||
|         except LookupError as exc: |  | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |  | ||||||
|  |  | ||||||
|         query = Q() |         query = Q() | ||||||
|         for cond in self.conditions: |         for cond in self.conditions: | ||||||
| @ -398,7 +366,7 @@ class Condition(YAMLTag): | |||||||
|             comparator = self._COMPARATORS[self.mode.upper()] |             comparator = self._COMPARATORS[self.mode.upper()] | ||||||
|             return comparator(tuple(bool(x) for x in args)) |             return comparator(tuple(bool(x) for x in args)) | ||||||
|         except (TypeError, KeyError) as exc: |         except (TypeError, KeyError) as exc: | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |             raise EntryInvalidError.from_entry(exc, entry) | ||||||
|  |  | ||||||
|  |  | ||||||
| class If(YAMLTag): | class If(YAMLTag): | ||||||
| @ -430,7 +398,7 @@ class If(YAMLTag): | |||||||
|                 blueprint, |                 blueprint, | ||||||
|             ) |             ) | ||||||
|         except TypeError as exc: |         except TypeError as exc: | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |             raise EntryInvalidError.from_entry(exc, entry) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Enumerate(YAMLTag, YAMLTagContext): | class Enumerate(YAMLTag, YAMLTagContext): | ||||||
| @ -444,7 +412,9 @@ class Enumerate(YAMLTag, YAMLTagContext): | |||||||
|         "SEQ": (list, lambda a, b: [*a, b]), |         "SEQ": (list, lambda a, b: [*a, b]), | ||||||
|         "MAP": ( |         "MAP": ( | ||||||
|             dict, |             dict, | ||||||
|             lambda a, b: always_merger.merge(a, {b[0]: b[1]} if isinstance(b, tuple | list) else b), |             lambda a, b: always_merger.merge( | ||||||
|  |                 a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b | ||||||
|  |             ), | ||||||
|         ), |         ), | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @ -486,7 +456,7 @@ class Enumerate(YAMLTag, YAMLTagContext): | |||||||
|         try: |         try: | ||||||
|             output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()] |             output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()] | ||||||
|         except KeyError as exc: |         except KeyError as exc: | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |             raise EntryInvalidError.from_entry(exc, entry) | ||||||
|  |  | ||||||
|         result = output_class() |         result = output_class() | ||||||
|  |  | ||||||
| @ -514,13 +484,13 @@ class EnumeratedItem(YAMLTag): | |||||||
|  |  | ||||||
|     _SUPPORTED_CONTEXT_TAGS = (Enumerate,) |     _SUPPORTED_CONTEXT_TAGS = (Enumerate,) | ||||||
|  |  | ||||||
|     def __init__(self, _loader: "BlueprintLoader", node: ScalarNode) -> None: |     def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None: | ||||||
|         super().__init__() |         super().__init__() | ||||||
|         self.depth = int(node.value) |         self.depth = int(node.value) | ||||||
|  |  | ||||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: |     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: | ||||||
|         try: |         try: | ||||||
|             context_tag: Enumerate = entry.get_tag_context( |             context_tag: Enumerate = entry._get_tag_context( | ||||||
|                 depth=self.depth, |                 depth=self.depth, | ||||||
|                 context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS, |                 context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS, | ||||||
|             ) |             ) | ||||||
| @ -530,11 +500,9 @@ class EnumeratedItem(YAMLTag): | |||||||
|                     f"{self.__class__.__name__} tags are only usable " |                     f"{self.__class__.__name__} tags are only usable " | ||||||
|                     f"inside an {Enumerate.__name__} tag", |                     f"inside an {Enumerate.__name__} tag", | ||||||
|                     entry, |                     entry, | ||||||
|                 ) from exc |                 ) | ||||||
|  |  | ||||||
|             raise EntryInvalidError.from_entry( |             raise EntryInvalidError.from_entry(f"{self.__class__.__name__} tag: {exc}", entry) | ||||||
|                 f"{self.__class__.__name__} tag: {exc}", entry |  | ||||||
|             ) from exc |  | ||||||
|  |  | ||||||
|         return context_tag.get_context(entry, blueprint) |         return context_tag.get_context(entry, blueprint) | ||||||
|  |  | ||||||
| @ -547,8 +515,8 @@ class Index(EnumeratedItem): | |||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             return context[0] |             return context[0] | ||||||
|         except IndexError as exc:  # pragma: no cover |         except IndexError:  # pragma: no cover | ||||||
|             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc |             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Value(EnumeratedItem): | class Value(EnumeratedItem): | ||||||
| @ -559,55 +527,8 @@ class Value(EnumeratedItem): | |||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             return context[1] |             return context[1] | ||||||
|         except IndexError as exc:  # pragma: no cover |         except IndexError:  # pragma: no cover | ||||||
|             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc |             raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) | ||||||
|  |  | ||||||
|  |  | ||||||
| class AtIndex(YAMLTag): |  | ||||||
|     """Get value at index of a sequence or mapping""" |  | ||||||
|  |  | ||||||
|     obj: YAMLTag | dict | list | tuple |  | ||||||
|     attribute: int | str | YAMLTag |  | ||||||
|     default: Any | UNSET |  | ||||||
|  |  | ||||||
|     def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None: |  | ||||||
|         super().__init__() |  | ||||||
|         self.obj = loader.construct_object(node.value[0]) |  | ||||||
|         self.attribute = loader.construct_object(node.value[1]) |  | ||||||
|         if len(node.value) == 2:  # noqa: PLR2004 |  | ||||||
|             self.default = UNSET |  | ||||||
|         else: |  | ||||||
|             self.default = loader.construct_object(node.value[2]) |  | ||||||
|  |  | ||||||
|     def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: |  | ||||||
|         if isinstance(self.obj, YAMLTag): |  | ||||||
|             obj = self.obj.resolve(entry, blueprint) |  | ||||||
|         else: |  | ||||||
|             obj = self.obj |  | ||||||
|         if isinstance(self.attribute, YAMLTag): |  | ||||||
|             attribute = self.attribute.resolve(entry, blueprint) |  | ||||||
|         else: |  | ||||||
|             attribute = self.attribute |  | ||||||
|  |  | ||||||
|         if isinstance(obj, list | tuple): |  | ||||||
|             try: |  | ||||||
|                 return obj[attribute] |  | ||||||
|             except TypeError as exc: |  | ||||||
|                 raise EntryInvalidError.from_entry( |  | ||||||
|                     f"Invalid index for list: {attribute}", entry |  | ||||||
|                 ) from exc |  | ||||||
|             except IndexError as exc: |  | ||||||
|                 if self.default is UNSET: |  | ||||||
|                     raise EntryInvalidError.from_entry( |  | ||||||
|                         f"Index out of range: {attribute}", entry |  | ||||||
|                     ) from exc |  | ||||||
|                 return self.default |  | ||||||
|         if attribute in obj: |  | ||||||
|             return obj[attribute] |  | ||||||
|         else: |  | ||||||
|             if self.default is UNSET: |  | ||||||
|                 raise EntryInvalidError.from_entry(f"Key does not exist: {attribute}", entry) |  | ||||||
|             return self.default |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class BlueprintDumper(SafeDumper): | class BlueprintDumper(SafeDumper): | ||||||
| @ -634,11 +555,7 @@ class BlueprintDumper(SafeDumper): | |||||||
|  |  | ||||||
|             def factory(items): |             def factory(items): | ||||||
|                 final_dict = dict(items) |                 final_dict = dict(items) | ||||||
|                 # Remove internal state variables |  | ||||||
|                 final_dict.pop("_state", None) |                 final_dict.pop("_state", None) | ||||||
|                 # Future-proof to only remove the ID if we don't set a value |  | ||||||
|                 if "id" in final_dict and final_dict.get("id") is None: |  | ||||||
|                     final_dict.pop("id") |  | ||||||
|                 return final_dict |                 return final_dict | ||||||
|  |  | ||||||
|             data = asdict(data, dict_factory=factory) |             data = asdict(data, dict_factory=factory) | ||||||
| @ -660,19 +577,18 @@ class BlueprintLoader(SafeLoader): | |||||||
|         self.add_constructor("!Enumerate", Enumerate) |         self.add_constructor("!Enumerate", Enumerate) | ||||||
|         self.add_constructor("!Value", Value) |         self.add_constructor("!Value", Value) | ||||||
|         self.add_constructor("!Index", Index) |         self.add_constructor("!Index", Index) | ||||||
|         self.add_constructor("!AtIndex", AtIndex) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class EntryInvalidError(SentryIgnoredException): | class EntryInvalidError(SentryIgnoredException): | ||||||
|     """Error raised when an entry is invalid""" |     """Error raised when an entry is invalid""" | ||||||
|  |  | ||||||
|     entry_model: str | None |     entry_model: Optional[str] | ||||||
|     entry_id: str | None |     entry_id: Optional[str] | ||||||
|     validation_error: ValidationError | None |     validation_error: Optional[ValidationError] | ||||||
|     serializer: Serializer | None = None |     serializer: Optional[Serializer] = None | ||||||
|  |  | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, *args: object, validation_error: ValidationError | None = None, **kwargs |         self, *args: object, validation_error: Optional[ValidationError] = None, **kwargs | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         super().__init__(*args) |         super().__init__(*args) | ||||||
|         self.entry_model = None |         self.entry_model = None | ||||||
|  | |||||||
| @ -1,6 +1,6 @@ | |||||||
| """Blueprint exporter""" | """Blueprint exporter""" | ||||||
|  |  | ||||||
| from collections.abc import Iterable | from typing import Iterable | ||||||
| from uuid import UUID | from uuid import UUID | ||||||
|  |  | ||||||
| from django.apps import apps | from django.apps import apps | ||||||
| @ -59,7 +59,7 @@ class Exporter: | |||||||
|         blueprint = Blueprint() |         blueprint = Blueprint() | ||||||
|         self._pre_export(blueprint) |         self._pre_export(blueprint) | ||||||
|         blueprint.metadata = BlueprintMetadata( |         blueprint.metadata = BlueprintMetadata( | ||||||
|             name=_("authentik Export - {date}".format_map({"date": str(now())})), |             name=_("authentik Export - %(date)s" % {"date": str(now())}), | ||||||
|             labels={ |             labels={ | ||||||
|                 LABEL_AUTHENTIK_GENERATED: "true", |                 LABEL_AUTHENTIK_GENERATED: "true", | ||||||
|             }, |             }, | ||||||
| @ -74,7 +74,7 @@ class Exporter: | |||||||
|  |  | ||||||
|  |  | ||||||
| class FlowExporter(Exporter): | class FlowExporter(Exporter): | ||||||
|     """Exporter customized to only return objects related to `flow`""" |     """Exporter customised to only return objects related to `flow`""" | ||||||
|  |  | ||||||
|     flow: Flow |     flow: Flow | ||||||
|     with_policies: bool |     with_policies: bool | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
|  |  | ||||||
| from contextlib import contextmanager | from contextlib import contextmanager | ||||||
| from copy import deepcopy | from copy import deepcopy | ||||||
| from typing import Any | from typing import Any, Optional | ||||||
|  |  | ||||||
| from dacite.config import Config | from dacite.config import Config | ||||||
| from dacite.core import from_dict | from dacite.core import from_dict | ||||||
| @ -16,10 +16,11 @@ from django.db.models.query_utils import Q | |||||||
| from django.db.transaction import atomic | from django.db.transaction import atomic | ||||||
| from django.db.utils import IntegrityError | from django.db.utils import IntegrityError | ||||||
| from guardian.models import UserObjectPermission | from guardian.models import UserObjectPermission | ||||||
| from guardian.shortcuts import assign_perm |  | ||||||
| from rest_framework.exceptions import ValidationError | from rest_framework.exceptions import ValidationError | ||||||
| from rest_framework.serializers import BaseSerializer, Serializer | from rest_framework.serializers import BaseSerializer, Serializer | ||||||
| from structlog.stdlib import BoundLogger, get_logger | from structlog.stdlib import BoundLogger, get_logger | ||||||
|  | from structlog.testing import capture_logs | ||||||
|  | from structlog.types import EventDict | ||||||
| from yaml import load | from yaml import load | ||||||
|  |  | ||||||
| from authentik.blueprints.v1.common import ( | from authentik.blueprints.v1.common import ( | ||||||
| @ -33,60 +34,35 @@ from authentik.blueprints.v1.common import ( | |||||||
| from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry | ||||||
| from authentik.core.models import ( | from authentik.core.models import ( | ||||||
|     AuthenticatedSession, |     AuthenticatedSession, | ||||||
|     GroupSourceConnection, |  | ||||||
|     PropertyMapping, |     PropertyMapping, | ||||||
|     Provider, |     Provider, | ||||||
|     Source, |     Source, | ||||||
|     User, |  | ||||||
|     UserSourceConnection, |     UserSourceConnection, | ||||||
| ) | ) | ||||||
| from authentik.enterprise.license import LicenseKey | from authentik.enterprise.license import LicenseKey | ||||||
| from authentik.enterprise.models import LicenseUsage | from authentik.enterprise.models import LicenseUsage | ||||||
| from authentik.enterprise.providers.google_workspace.models import ( | from authentik.enterprise.providers.rac.models import ConnectionToken | ||||||
|     GoogleWorkspaceProviderGroup, |  | ||||||
|     GoogleWorkspaceProviderUser, |  | ||||||
| ) |  | ||||||
| from authentik.enterprise.providers.microsoft_entra.models import ( |  | ||||||
|     MicrosoftEntraProviderGroup, |  | ||||||
|     MicrosoftEntraProviderUser, |  | ||||||
| ) |  | ||||||
| from authentik.enterprise.providers.ssf.models import StreamEvent |  | ||||||
| from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import ( |  | ||||||
|     EndpointDevice, |  | ||||||
|     EndpointDeviceConnection, |  | ||||||
| ) |  | ||||||
| from authentik.events.logs import LogEvent, capture_logs |  | ||||||
| from authentik.events.models import SystemTask | from authentik.events.models import SystemTask | ||||||
| from authentik.events.utils import cleanse_dict | from authentik.events.utils import cleanse_dict | ||||||
| from authentik.flows.models import FlowToken, Stage | from authentik.flows.models import FlowToken, Stage | ||||||
| from authentik.lib.models import SerializerModel | from authentik.lib.models import SerializerModel | ||||||
| from authentik.lib.sentry import SentryIgnoredException | from authentik.lib.sentry import SentryIgnoredException | ||||||
| from authentik.lib.utils.reflection import get_apps |  | ||||||
| from authentik.outposts.models import OutpostServiceConnection | from authentik.outposts.models import OutpostServiceConnection | ||||||
| from authentik.policies.models import Policy, PolicyBindingModel | from authentik.policies.models import Policy, PolicyBindingModel | ||||||
| from authentik.policies.reputation.models import Reputation | from authentik.policies.reputation.models import Reputation | ||||||
| from authentik.providers.oauth2.models import ( | from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken | ||||||
|     AccessToken, | from authentik.providers.scim.models import SCIMGroup, SCIMUser | ||||||
|     AuthorizationCode, |  | ||||||
|     DeviceToken, |  | ||||||
|     RefreshToken, |  | ||||||
| ) |  | ||||||
| from authentik.providers.rac.models import ConnectionToken |  | ||||||
| from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser |  | ||||||
| from authentik.rbac.models import Role |  | ||||||
| from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser |  | ||||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType |  | ||||||
| from authentik.tenants.models import Tenant | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| # Context set when the serializer is created in a blueprint context | # Context set when the serializer is created in a blueprint context | ||||||
| # Update website/docs/customize/blueprints/v1/models.md when used | # Update website/developer-docs/blueprints/v1/models.md when used | ||||||
| SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry" | SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry" | ||||||
|  |  | ||||||
|  |  | ||||||
| def excluded_models() -> list[type[Model]]: | def excluded_models() -> list[type[Model]]: | ||||||
|     """Return a list of all excluded models that shouldn't be exposed via API |     """Return a list of all excluded models that shouldn't be exposed via API | ||||||
|     or other means (internal only, base classes, non-used objects, etc)""" |     or other means (internal only, base classes, non-used objects, etc)""" | ||||||
|  |     # pylint: disable=imported-auth-user | ||||||
|     from django.contrib.auth.models import Group as DjangoGroup |     from django.contrib.auth.models import Group as DjangoGroup | ||||||
|     from django.contrib.auth.models import User as DjangoUser |     from django.contrib.auth.models import User as DjangoUser | ||||||
|  |  | ||||||
| @ -102,7 +78,6 @@ def excluded_models() -> list[type[Model]]: | |||||||
|         Source, |         Source, | ||||||
|         PropertyMapping, |         PropertyMapping, | ||||||
|         UserSourceConnection, |         UserSourceConnection, | ||||||
|         GroupSourceConnection, |  | ||||||
|         Stage, |         Stage, | ||||||
|         OutpostServiceConnection, |         OutpostServiceConnection, | ||||||
|         Policy, |         Policy, | ||||||
| @ -110,11 +85,10 @@ def excluded_models() -> list[type[Model]]: | |||||||
|         # Classes that have other dependencies |         # Classes that have other dependencies | ||||||
|         AuthenticatedSession, |         AuthenticatedSession, | ||||||
|         # Classes which are only internally managed |         # Classes which are only internally managed | ||||||
|         # FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin |  | ||||||
|         FlowToken, |         FlowToken, | ||||||
|         LicenseUsage, |         LicenseUsage, | ||||||
|         SCIMProviderGroup, |         SCIMGroup, | ||||||
|         SCIMProviderUser, |         SCIMUser, | ||||||
|         Tenant, |         Tenant, | ||||||
|         SystemTask, |         SystemTask, | ||||||
|         ConnectionToken, |         ConnectionToken, | ||||||
| @ -122,23 +96,12 @@ def excluded_models() -> list[type[Model]]: | |||||||
|         AccessToken, |         AccessToken, | ||||||
|         RefreshToken, |         RefreshToken, | ||||||
|         Reputation, |         Reputation, | ||||||
|         WebAuthnDeviceType, |  | ||||||
|         SCIMSourceUser, |  | ||||||
|         SCIMSourceGroup, |  | ||||||
|         GoogleWorkspaceProviderUser, |  | ||||||
|         GoogleWorkspaceProviderGroup, |  | ||||||
|         MicrosoftEntraProviderUser, |  | ||||||
|         MicrosoftEntraProviderGroup, |  | ||||||
|         EndpointDevice, |  | ||||||
|         EndpointDeviceConnection, |  | ||||||
|         DeviceToken, |  | ||||||
|         StreamEvent, |  | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| def is_model_allowed(model: type[Model]) -> bool: | def is_model_allowed(model: type[Model]) -> bool: | ||||||
|     """Check if model is allowed""" |     """Check if model is allowed""" | ||||||
|     return model not in excluded_models() and issubclass(model, SerializerModel | BaseMetaModel) |     return model not in excluded_models() and issubclass(model, (SerializerModel, BaseMetaModel)) | ||||||
|  |  | ||||||
|  |  | ||||||
| class DoRollback(SentryIgnoredException): | class DoRollback(SentryIgnoredException): | ||||||
| @ -156,23 +119,13 @@ def transaction_rollback(): | |||||||
|         pass |         pass | ||||||
|  |  | ||||||
|  |  | ||||||
| def rbac_models() -> dict: |  | ||||||
|     models = {} |  | ||||||
|     for app in get_apps(): |  | ||||||
|         for model in app.get_models(): |  | ||||||
|             if not is_model_allowed(model): |  | ||||||
|                 continue |  | ||||||
|             models[model._meta.model_name] = app.label |  | ||||||
|     return models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Importer: | class Importer: | ||||||
|     """Import Blueprint from raw dict or YAML/JSON""" |     """Import Blueprint from raw dict or YAML/JSON""" | ||||||
|  |  | ||||||
|     logger: BoundLogger |     logger: BoundLogger | ||||||
|     _import: Blueprint |     _import: Blueprint | ||||||
|  |  | ||||||
|     def __init__(self, blueprint: Blueprint, context: dict | None = None): |     def __init__(self, blueprint: Blueprint, context: Optional[dict] = None): | ||||||
|         self.__pk_map: dict[Any, Model] = {} |         self.__pk_map: dict[Any, Model] = {} | ||||||
|         self._import = blueprint |         self._import = blueprint | ||||||
|         self.logger = get_logger() |         self.logger = get_logger() | ||||||
| @ -184,10 +137,7 @@ class Importer: | |||||||
|  |  | ||||||
|     def default_context(self): |     def default_context(self): | ||||||
|         """Default context""" |         """Default context""" | ||||||
|         return { |         return {"goauthentik.io/enterprise/licensed": LicenseKey.get_total().is_valid()} | ||||||
|             "goauthentik.io/enterprise/licensed": LicenseKey.get_total().status().is_valid, |  | ||||||
|             "goauthentik.io/rbac/models": rbac_models(), |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def from_string(yaml_input: str, context: dict | None = None) -> "Importer": |     def from_string(yaml_input: str, context: dict | None = None) -> "Importer": | ||||||
| @ -211,14 +161,14 @@ class Importer: | |||||||
|  |  | ||||||
|         def updater(value) -> Any: |         def updater(value) -> Any: | ||||||
|             if value in self.__pk_map: |             if value in self.__pk_map: | ||||||
|                 self.logger.debug("Updating reference in entry", value=value) |                 self.logger.debug("updating reference in entry", value=value) | ||||||
|                 return self.__pk_map[value] |                 return self.__pk_map[value] | ||||||
|             return value |             return value | ||||||
|  |  | ||||||
|         for key, value in attrs.items(): |         for key, value in attrs.items(): | ||||||
|             try: |             try: | ||||||
|                 if isinstance(value, dict): |                 if isinstance(value, dict): | ||||||
|                     for _, _inner_key in enumerate(value): |                     for idx, _inner_key in enumerate(value): | ||||||
|                         value[_inner_key] = updater(value[_inner_key]) |                         value[_inner_key] = updater(value[_inner_key]) | ||||||
|                 elif isinstance(value, list): |                 elif isinstance(value, list): | ||||||
|                     for idx, _inner_value in enumerate(value): |                     for idx, _inner_value in enumerate(value): | ||||||
| @ -247,17 +197,15 @@ class Importer: | |||||||
|  |  | ||||||
|         return main_query | sub_query |         return main_query | sub_query | ||||||
|  |  | ||||||
|     def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None:  # noqa: PLR0915 |     # pylint: disable-msg=too-many-locals | ||||||
|  |     def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]: | ||||||
|         """Validate a single entry""" |         """Validate a single entry""" | ||||||
|         if not entry.check_all_conditions_match(self._import): |         if not entry.check_all_conditions_match(self._import): | ||||||
|             self.logger.debug("One or more conditions of this entry are not fulfilled, skipping") |             self.logger.debug("One or more conditions of this entry are not fulfilled, skipping") | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
|         model_app_label, model_name = entry.get_model(self._import).split(".") |         model_app_label, model_name = entry.get_model(self._import).split(".") | ||||||
|         try: |         model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||||
|             model: type[SerializerModel] = registry.get_model(model_app_label, model_name) |  | ||||||
|         except LookupError as exc: |  | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |  | ||||||
|         # Don't use isinstance since we don't want to check for inheritance |         # Don't use isinstance since we don't want to check for inheritance | ||||||
|         if not is_model_allowed(model): |         if not is_model_allowed(model): | ||||||
|             raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry) |             raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry) | ||||||
| @ -301,13 +249,9 @@ class Importer: | |||||||
|  |  | ||||||
|         serializer_kwargs = {} |         serializer_kwargs = {} | ||||||
|         model_instance = existing_models.first() |         model_instance = existing_models.first() | ||||||
|         if ( |         if not isinstance(model(), BaseMetaModel) and model_instance: | ||||||
|             not isinstance(model(), BaseMetaModel) |  | ||||||
|             and model_instance |  | ||||||
|             and entry.state != BlueprintEntryDesiredState.MUST_CREATED |  | ||||||
|         ): |  | ||||||
|             self.logger.debug( |             self.logger.debug( | ||||||
|                 "Initialise serializer with instance", |                 "initialise serializer with instance", | ||||||
|                 model=model, |                 model=model, | ||||||
|                 instance=model_instance, |                 instance=model_instance, | ||||||
|                 pk=model_instance.pk, |                 pk=model_instance.pk, | ||||||
| @ -315,17 +259,16 @@ class Importer: | |||||||
|             serializer_kwargs["instance"] = model_instance |             serializer_kwargs["instance"] = model_instance | ||||||
|             serializer_kwargs["partial"] = True |             serializer_kwargs["partial"] = True | ||||||
|         elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED: |         elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED: | ||||||
|             msg = ( |  | ||||||
|                 f"State is set to {BlueprintEntryDesiredState.MUST_CREATED.value} " |  | ||||||
|                 "and object exists already", |  | ||||||
|             ) |  | ||||||
|             raise EntryInvalidError.from_entry( |             raise EntryInvalidError.from_entry( | ||||||
|                 ValidationError({k: msg for k in entry.identifiers.keys()}, "unique"), |                 ( | ||||||
|  |                     f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} " | ||||||
|  |                     "and object exists already", | ||||||
|  |                 ), | ||||||
|                 entry, |                 entry, | ||||||
|             ) |             ) | ||||||
|         else: |         else: | ||||||
|             self.logger.debug( |             self.logger.debug( | ||||||
|                 "Initialised new serializer instance", |                 "initialised new serializer instance", | ||||||
|                 model=model, |                 model=model, | ||||||
|                 **cleanse_dict(updated_identifiers), |                 **cleanse_dict(updated_identifiers), | ||||||
|             ) |             ) | ||||||
| @ -337,7 +280,10 @@ class Importer: | |||||||
|         try: |         try: | ||||||
|             full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import)) |             full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import)) | ||||||
|         except ValueError as exc: |         except ValueError as exc: | ||||||
|             raise EntryInvalidError.from_entry(exc, entry) from exc |             raise EntryInvalidError.from_entry( | ||||||
|  |                 exc, | ||||||
|  |                 entry, | ||||||
|  |             ) from exc | ||||||
|         always_merger.merge(full_data, updated_identifiers) |         always_merger.merge(full_data, updated_identifiers) | ||||||
|         serializer_kwargs["data"] = full_data |         serializer_kwargs["data"] = full_data | ||||||
|  |  | ||||||
| @ -358,15 +304,6 @@ class Importer: | |||||||
|             ) from exc |             ) from exc | ||||||
|         return serializer |         return serializer | ||||||
|  |  | ||||||
|     def _apply_permissions(self, instance: Model, entry: BlueprintEntry): |  | ||||||
|         """Apply object-level permissions for an entry""" |  | ||||||
|         for perm in entry.get_permissions(self._import): |  | ||||||
|             if perm.user is not None: |  | ||||||
|                 assign_perm(perm.permission, User.objects.get(pk=perm.user), instance) |  | ||||||
|             if perm.role is not None: |  | ||||||
|                 role = Role.objects.get(pk=perm.role) |  | ||||||
|                 role.assign_permission(perm.permission, obj=instance) |  | ||||||
|  |  | ||||||
|     def apply(self) -> bool: |     def apply(self) -> bool: | ||||||
|         """Apply (create/update) models yaml, in database transaction""" |         """Apply (create/update) models yaml, in database transaction""" | ||||||
|         try: |         try: | ||||||
| @ -388,7 +325,7 @@ class Importer: | |||||||
|                 model: type[SerializerModel] = registry.get_model(model_app_label, model_name) |                 model: type[SerializerModel] = registry.get_model(model_app_label, model_name) | ||||||
|             except LookupError: |             except LookupError: | ||||||
|                 self.logger.warning( |                 self.logger.warning( | ||||||
|                     "App or Model does not exist", app=model_app_label, model=model_name |                     "app or model does not exist", app=model_app_label, model=model_name | ||||||
|                 ) |                 ) | ||||||
|                 return False |                 return False | ||||||
|             # Validate each single entry |             # Validate each single entry | ||||||
| @ -400,7 +337,7 @@ class Importer: | |||||||
|                 if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT: |                 if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT: | ||||||
|                     serializer = exc.serializer |                     serializer = exc.serializer | ||||||
|                 else: |                 else: | ||||||
|                     self.logger.warning(f"Entry invalid: {exc}", entry=entry, error=exc) |                     self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc) | ||||||
|                     if raise_errors: |                     if raise_errors: | ||||||
|                         raise exc |                         raise exc | ||||||
|                     return False |                     return False | ||||||
| @ -420,42 +357,43 @@ class Importer: | |||||||
|                     and state == BlueprintEntryDesiredState.CREATED |                     and state == BlueprintEntryDesiredState.CREATED | ||||||
|                 ): |                 ): | ||||||
|                     self.logger.debug( |                     self.logger.debug( | ||||||
|                         "Instance exists, skipping", |                         "instance exists, skipping", | ||||||
|                         model=model, |                         model=model, | ||||||
|                         instance=instance, |                         instance=instance, | ||||||
|                         pk=instance.pk, |                         pk=instance.pk, | ||||||
|                     ) |                     ) | ||||||
|                 else: |                 else: | ||||||
|                     instance = serializer.save() |                     instance = serializer.save() | ||||||
|                     self.logger.debug("Updated model", model=instance) |                     self.logger.debug("updated model", model=instance) | ||||||
|                 if "pk" in entry.identifiers: |                 if "pk" in entry.identifiers: | ||||||
|                     self.__pk_map[entry.identifiers["pk"]] = instance.pk |                     self.__pk_map[entry.identifiers["pk"]] = instance.pk | ||||||
|                 entry._state = BlueprintEntryState(instance) |                 entry._state = BlueprintEntryState(instance) | ||||||
|                 self._apply_permissions(instance, entry) |  | ||||||
|             elif state == BlueprintEntryDesiredState.ABSENT: |             elif state == BlueprintEntryDesiredState.ABSENT: | ||||||
|                 instance: Model | None = serializer.instance |                 instance: Optional[Model] = serializer.instance | ||||||
|                 if instance.pk: |                 if instance.pk: | ||||||
|                     instance.delete() |                     instance.delete() | ||||||
|                     self.logger.debug("Deleted model", mode=instance) |                     self.logger.debug("deleted model", mode=instance) | ||||||
|                     continue |                     continue | ||||||
|                 self.logger.debug("Entry to delete with no instance, skipping") |                 self.logger.debug("entry to delete with no instance, skipping") | ||||||
|         return True |         return True | ||||||
|  |  | ||||||
|     def validate(self, raise_validation_errors=False) -> tuple[bool, list[LogEvent]]: |     def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]: | ||||||
|         """Validate loaded blueprint export, ensure all models are allowed |         """Validate loaded blueprint export, ensure all models are allowed | ||||||
|         and serializers have no errors""" |         and serializers have no errors""" | ||||||
|         self.logger.debug("Starting blueprint import validation") |         self.logger.debug("Starting blueprint import validation") | ||||||
|         orig_import = deepcopy(self._import) |         orig_import = deepcopy(self._import) | ||||||
|         if self._import.version != 1: |         if self._import.version != 1: | ||||||
|             self.logger.warning("Invalid blueprint version") |             self.logger.warning("Invalid blueprint version") | ||||||
|             return False, [LogEvent("Invalid blueprint version", log_level="warning", logger=None)] |             return False, [{"event": "Invalid blueprint version"}] | ||||||
|         with ( |         with ( | ||||||
|             transaction_rollback(), |             transaction_rollback(), | ||||||
|             capture_logs() as logs, |             capture_logs() as logs, | ||||||
|         ): |         ): | ||||||
|             successful = self._apply_models(raise_errors=raise_validation_errors) |             successful = self._apply_models(raise_errors=raise_validation_errors) | ||||||
|             if not successful: |             if not successful: | ||||||
|                 self.logger.warning("Blueprint validation failed") |                 self.logger.debug("Blueprint validation failed") | ||||||
|  |         for log in logs: | ||||||
|  |             getattr(self.logger, log.get("log_level"))(**log) | ||||||
|         self.logger.debug("Finished blueprint import validation") |         self.logger.debug("Finished blueprint import validation") | ||||||
|         self._import = orig_import |         self._import = orig_import | ||||||
|         return successful, logs |         return successful, logs | ||||||
|  | |||||||
| @ -43,7 +43,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer): | |||||||
|             LOGGER.info("Blueprint does not exist, but not required") |             LOGGER.info("Blueprint does not exist, but not required") | ||||||
|             return MetaResult() |             return MetaResult() | ||||||
|         LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance) |         LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance) | ||||||
|  |         # pylint: disable=no-value-for-parameter | ||||||
|         apply_blueprint(str(self.blueprint_instance.pk)) |         apply_blueprint(str(self.blueprint_instance.pk)) | ||||||
|         return MetaResult() |         return MetaResult() | ||||||
|  |  | ||||||
|  | |||||||
| @ -8,15 +8,15 @@ from rest_framework.serializers import Serializer | |||||||
| class BaseMetaModel(Model): | class BaseMetaModel(Model): | ||||||
|     """Base models""" |     """Base models""" | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         abstract = True |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def serializer() -> Serializer: |     def serializer() -> Serializer: | ||||||
|         """Serializer similar to SerializerModel, but as a static method since |         """Serializer similar to SerializerModel, but as a static method since | ||||||
|         this is an abstract model""" |         this is an abstract model""" | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|  |     class Meta: | ||||||
|  |         abstract = True | ||||||
|  |  | ||||||
|  |  | ||||||
| class MetaResult: | class MetaResult: | ||||||
|     """Result returned by Meta Models' serializers. Empty class but we can't return none as |     """Result returned by Meta Models' serializers. Empty class but we can't return none as | ||||||
|  | |||||||
| @ -4,6 +4,7 @@ from dataclasses import asdict, dataclass, field | |||||||
| from hashlib import sha512 | from hashlib import sha512 | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| from sys import platform | from sys import platform | ||||||
|  | from typing import Optional | ||||||
|  |  | ||||||
| from dacite.core import from_dict | from dacite.core import from_dict | ||||||
| from django.db import DatabaseError, InternalError, ProgrammingError | from django.db import DatabaseError, InternalError, ProgrammingError | ||||||
| @ -30,7 +31,6 @@ from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, E | |||||||
| from authentik.blueprints.v1.importer import Importer | from authentik.blueprints.v1.importer import Importer | ||||||
| from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE | from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE | ||||||
| from authentik.blueprints.v1.oci import OCI_PREFIX | from authentik.blueprints.v1.oci import OCI_PREFIX | ||||||
| from authentik.events.logs import capture_logs |  | ||||||
| from authentik.events.models import TaskStatus | from authentik.events.models import TaskStatus | ||||||
| from authentik.events.system_tasks import SystemTask, prefill_task | from authentik.events.system_tasks import SystemTask, prefill_task | ||||||
| from authentik.events.utils import sanitize_dict | from authentik.events.utils import sanitize_dict | ||||||
| @ -50,14 +50,14 @@ class BlueprintFile: | |||||||
|     version: int |     version: int | ||||||
|     hash: str |     hash: str | ||||||
|     last_m: int |     last_m: int | ||||||
|     meta: BlueprintMetadata | None = field(default=None) |     meta: Optional[BlueprintMetadata] = field(default=None) | ||||||
|  |  | ||||||
|  |  | ||||||
| def start_blueprint_watcher(): | def start_blueprint_watcher(): | ||||||
|     """Start blueprint watcher, if it's not running already.""" |     """Start blueprint watcher, if it's not running already.""" | ||||||
|     # This function might be called twice since it's called on celery startup |     # This function might be called twice since it's called on celery startup | ||||||
|  |     # pylint: disable=global-statement | ||||||
|     global _file_watcher_started  # noqa: PLW0603 |     global _file_watcher_started | ||||||
|     if _file_watcher_started: |     if _file_watcher_started: | ||||||
|         return |         return | ||||||
|     observer = Observer() |     observer = Observer() | ||||||
| @ -126,7 +126,7 @@ def blueprints_find() -> list[BlueprintFile]: | |||||||
|         # Check if any part in the path starts with a dot and assume a hidden file |         # Check if any part in the path starts with a dot and assume a hidden file | ||||||
|         if any(part for part in path.parts if part.startswith(".")): |         if any(part for part in path.parts if part.startswith(".")): | ||||||
|             continue |             continue | ||||||
|         with open(path, encoding="utf-8") as blueprint_file: |         with open(path, "r", encoding="utf-8") as blueprint_file: | ||||||
|             try: |             try: | ||||||
|                 raw_blueprint = load(blueprint_file.read(), BlueprintLoader) |                 raw_blueprint = load(blueprint_file.read(), BlueprintLoader) | ||||||
|             except YAMLError as exc: |             except YAMLError as exc: | ||||||
| @ -150,7 +150,7 @@ def blueprints_find() -> list[BlueprintFile]: | |||||||
|     throws=(DatabaseError, ProgrammingError, InternalError), base=SystemTask, bind=True |     throws=(DatabaseError, ProgrammingError, InternalError), base=SystemTask, bind=True | ||||||
| ) | ) | ||||||
| @prefill_task | @prefill_task | ||||||
| def blueprints_discovery(self: SystemTask, path: str | None = None): | def blueprints_discovery(self: SystemTask, path: Optional[str] = None): | ||||||
|     """Find blueprints and check if they need to be created in the database""" |     """Find blueprints and check if they need to be created in the database""" | ||||||
|     count = 0 |     count = 0 | ||||||
|     for blueprint in blueprints_find(): |     for blueprint in blueprints_find(): | ||||||
| @ -159,7 +159,7 @@ def blueprints_discovery(self: SystemTask, path: str | None = None): | |||||||
|         check_blueprint_v1_file(blueprint) |         check_blueprint_v1_file(blueprint) | ||||||
|         count += 1 |         count += 1 | ||||||
|     self.set_status( |     self.set_status( | ||||||
|         TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=count)) |         TaskStatus.SUCCESSFUL, _("Successfully imported %(count)d files." % {"count": count}) | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -197,7 +197,7 @@ def check_blueprint_v1_file(blueprint: BlueprintFile): | |||||||
| def apply_blueprint(self: SystemTask, instance_pk: str): | def apply_blueprint(self: SystemTask, instance_pk: str): | ||||||
|     """Apply single blueprint""" |     """Apply single blueprint""" | ||||||
|     self.save_on_success = False |     self.save_on_success = False | ||||||
|     instance: BlueprintInstance | None = None |     instance: Optional[BlueprintInstance] = None | ||||||
|     try: |     try: | ||||||
|         instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first() |         instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first() | ||||||
|         if not instance or not instance.enabled: |         if not instance or not instance.enabled: | ||||||
| @ -212,24 +212,23 @@ def apply_blueprint(self: SystemTask, instance_pk: str): | |||||||
|         if not valid: |         if not valid: | ||||||
|             instance.status = BlueprintInstanceStatus.ERROR |             instance.status = BlueprintInstanceStatus.ERROR | ||||||
|             instance.save() |             instance.save() | ||||||
|             self.set_status(TaskStatus.ERROR, *logs) |             self.set_status(TaskStatus.ERROR, *[x["event"] for x in logs]) | ||||||
|  |             return | ||||||
|  |         applied = importer.apply() | ||||||
|  |         if not applied: | ||||||
|  |             instance.status = BlueprintInstanceStatus.ERROR | ||||||
|  |             instance.save() | ||||||
|  |             self.set_status(TaskStatus.ERROR, "Failed to apply") | ||||||
|             return |             return | ||||||
|         with capture_logs() as logs: |  | ||||||
|             applied = importer.apply() |  | ||||||
|             if not applied: |  | ||||||
|                 instance.status = BlueprintInstanceStatus.ERROR |  | ||||||
|                 instance.save() |  | ||||||
|                 self.set_status(TaskStatus.ERROR, *logs) |  | ||||||
|                 return |  | ||||||
|         instance.status = BlueprintInstanceStatus.SUCCESSFUL |         instance.status = BlueprintInstanceStatus.SUCCESSFUL | ||||||
|         instance.last_applied_hash = file_hash |         instance.last_applied_hash = file_hash | ||||||
|         instance.last_applied = now() |         instance.last_applied = now() | ||||||
|         self.set_status(TaskStatus.SUCCESSFUL) |         self.set_status(TaskStatus.SUCCESSFUL) | ||||||
|     except ( |     except ( | ||||||
|         OSError, |  | ||||||
|         DatabaseError, |         DatabaseError, | ||||||
|         ProgrammingError, |         ProgrammingError, | ||||||
|         InternalError, |         InternalError, | ||||||
|  |         IOError, | ||||||
|         BlueprintRetrievalFailed, |         BlueprintRetrievalFailed, | ||||||
|         EntryInvalidError, |         EntryInvalidError, | ||||||
|     ) as exc: |     ) as exc: | ||||||
|  | |||||||
| @ -11,20 +11,21 @@ from rest_framework.filters import OrderingFilter, SearchFilter | |||||||
| from rest_framework.permissions import AllowAny | from rest_framework.permissions import AllowAny | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| from rest_framework.response import Response | from rest_framework.response import Response | ||||||
|  | from rest_framework.serializers import ModelSerializer | ||||||
| from rest_framework.validators import UniqueValidator | from rest_framework.validators import UniqueValidator | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
|  | from authentik.api.authorization import SecretKeyFilter | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import ModelSerializer, PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.rbac.filters import SecretKeyFilter |  | ||||||
| from authentik.tenants.utils import get_current_tenant | from authentik.tenants.utils import get_current_tenant | ||||||
|  |  | ||||||
|  |  | ||||||
| class FooterLinkSerializer(PassiveSerializer): | class FooterLinkSerializer(PassiveSerializer): | ||||||
|     """Links returned in Config API""" |     """Links returned in Config API""" | ||||||
|  |  | ||||||
|     href = CharField(read_only=True, allow_null=True) |     href = CharField(read_only=True) | ||||||
|     name = CharField(read_only=True) |     name = CharField(read_only=True) | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -55,7 +56,6 @@ class BrandSerializer(ModelSerializer): | |||||||
|             "flow_unenrollment", |             "flow_unenrollment", | ||||||
|             "flow_user_settings", |             "flow_user_settings", | ||||||
|             "flow_device_code", |             "flow_device_code", | ||||||
|             "default_application", |  | ||||||
|             "web_certificate", |             "web_certificate", | ||||||
|             "attributes", |             "attributes", | ||||||
|         ] |         ] | ||||||
| @ -84,8 +84,8 @@ class CurrentBrandSerializer(PassiveSerializer): | |||||||
|  |  | ||||||
|     matched_domain = CharField(source="domain") |     matched_domain = CharField(source="domain") | ||||||
|     branding_title = CharField() |     branding_title = CharField() | ||||||
|     branding_logo = CharField(source="branding_logo_url") |     branding_logo = CharField() | ||||||
|     branding_favicon = CharField(source="branding_favicon_url") |     branding_favicon = CharField() | ||||||
|     ui_footer_links = ListField( |     ui_footer_links = ListField( | ||||||
|         child=FooterLinkSerializer(), |         child=FooterLinkSerializer(), | ||||||
|         read_only=True, |         read_only=True, | ||||||
|  | |||||||
| @ -9,6 +9,3 @@ class AuthentikBrandsConfig(AppConfig): | |||||||
|     name = "authentik.brands" |     name = "authentik.brands" | ||||||
|     label = "authentik_brands" |     label = "authentik_brands" | ||||||
|     verbose_name = "authentik Brands" |     verbose_name = "authentik Brands" | ||||||
|     mountpoints = { |  | ||||||
|         "authentik.brands.urls_root": "", |  | ||||||
|     } |  | ||||||
|  | |||||||
| @ -1,10 +1,10 @@ | |||||||
| """Inject brand into current request""" | """Inject brand into current request""" | ||||||
|  |  | ||||||
| from collections.abc import Callable | from typing import Callable | ||||||
|  |  | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
| from django.http.response import HttpResponse | from django.http.response import HttpResponse | ||||||
| from django.utils.translation import override | from django.utils.translation import activate | ||||||
|  |  | ||||||
| from authentik.brands.utils import get_brand_for_request | from authentik.brands.utils import get_brand_for_request | ||||||
|  |  | ||||||
| @ -18,14 +18,10 @@ class BrandMiddleware: | |||||||
|         self.get_response = get_response |         self.get_response = get_response | ||||||
|  |  | ||||||
|     def __call__(self, request: HttpRequest) -> HttpResponse: |     def __call__(self, request: HttpRequest) -> HttpResponse: | ||||||
|         locale_to_set = None |  | ||||||
|         if not hasattr(request, "brand"): |         if not hasattr(request, "brand"): | ||||||
|             brand = get_brand_for_request(request) |             brand = get_brand_for_request(request) | ||||||
|             request.brand = brand |             setattr(request, "brand", brand) | ||||||
|             locale = brand.default_locale |             locale = brand.default_locale | ||||||
|             if locale != "": |             if locale != "": | ||||||
|                 locale_to_set = locale |                 activate(locale) | ||||||
|         if locale_to_set: |  | ||||||
|             with override(locale_to_set): |  | ||||||
|                 return self.get_response(request) |  | ||||||
|         return self.get_response(request) |         return self.get_response(request) | ||||||
|  | |||||||
| @ -1,21 +0,0 @@ | |||||||
| # Generated by Django 5.0.4 on 2024-04-18 18:56 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0005_tenantuuid_to_branduuid"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="brand", |  | ||||||
|             index=models.Index(fields=["domain"], name="authentik_b_domain_b9b24a_idx"), |  | ||||||
|         ), |  | ||||||
|         migrations.AddIndex( |  | ||||||
|             model_name="brand", |  | ||||||
|             index=models.Index(fields=["default"], name="authentik_b_default_3ccf12_idx"), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -1,26 +0,0 @@ | |||||||
| # Generated by Django 5.0.6 on 2024-07-04 20:32 |  | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|  |  | ||||||
|     dependencies = [ |  | ||||||
|         ("authentik_brands", "0006_brand_authentik_b_domain_b9b24a_idx_and_more"), |  | ||||||
|         ("authentik_core", "0035_alter_group_options_and_more"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AddField( |  | ||||||
|             model_name="brand", |  | ||||||
|             name="default_application", |  | ||||||
|             field=models.ForeignKey( |  | ||||||
|                 default=None, |  | ||||||
|                 help_text="When set, external users will be redirected to this application after authenticating.", |  | ||||||
|                 null=True, |  | ||||||
|                 on_delete=django.db.models.deletion.SET_DEFAULT, |  | ||||||
|                 to="authentik_core.application", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @ -3,14 +3,12 @@ | |||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from django.db import models | from django.db import models | ||||||
| from django.http import HttpRequest |  | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from rest_framework.serializers import Serializer | from rest_framework.serializers import Serializer | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.crypto.models import CertificateKeyPair | from authentik.crypto.models import CertificateKeyPair | ||||||
| from authentik.flows.models import Flow | from authentik.flows.models import Flow | ||||||
| from authentik.lib.config import CONFIG |  | ||||||
| from authentik.lib.models import SerializerModel | from authentik.lib.models import SerializerModel | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| @ -53,16 +51,6 @@ class Brand(SerializerModel): | |||||||
|         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_device_code" |         Flow, null=True, on_delete=models.SET_NULL, related_name="brand_device_code" | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     default_application = models.ForeignKey( |  | ||||||
|         "authentik_core.Application", |  | ||||||
|         null=True, |  | ||||||
|         default=None, |  | ||||||
|         on_delete=models.SET_DEFAULT, |  | ||||||
|         help_text=_( |  | ||||||
|             "When set, external users will be redirected to this application after authenticating." |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     web_certificate = models.ForeignKey( |     web_certificate = models.ForeignKey( | ||||||
|         CertificateKeyPair, |         CertificateKeyPair, | ||||||
|         null=True, |         null=True, | ||||||
| @ -72,18 +60,6 @@ class Brand(SerializerModel): | |||||||
|     ) |     ) | ||||||
|     attributes = models.JSONField(default=dict, blank=True) |     attributes = models.JSONField(default=dict, blank=True) | ||||||
|  |  | ||||||
|     def branding_logo_url(self) -> str: |  | ||||||
|         """Get branding_logo with the correct prefix""" |  | ||||||
|         if self.branding_logo.startswith("/static"): |  | ||||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_logo |  | ||||||
|         return self.branding_logo |  | ||||||
|  |  | ||||||
|     def branding_favicon_url(self) -> str: |  | ||||||
|         """Get branding_favicon with the correct prefix""" |  | ||||||
|         if self.branding_favicon.startswith("/static"): |  | ||||||
|             return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon |  | ||||||
|         return self.branding_favicon |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> Serializer: |     def serializer(self) -> Serializer: | ||||||
|         from authentik.brands.api import BrandSerializer |         from authentik.brands.api import BrandSerializer | ||||||
| @ -95,7 +71,7 @@ class Brand(SerializerModel): | |||||||
|         """Get default locale""" |         """Get default locale""" | ||||||
|         try: |         try: | ||||||
|             return self.attributes.get("settings", {}).get("locale", "") |             return self.attributes.get("settings", {}).get("locale", "") | ||||||
|  |         # pylint: disable=broad-except | ||||||
|         except Exception as exc: |         except Exception as exc: | ||||||
|             LOGGER.warning("Failed to get default locale", exc=exc) |             LOGGER.warning("Failed to get default locale", exc=exc) | ||||||
|             return "" |             return "" | ||||||
| @ -108,17 +84,3 @@ class Brand(SerializerModel): | |||||||
|     class Meta: |     class Meta: | ||||||
|         verbose_name = _("Brand") |         verbose_name = _("Brand") | ||||||
|         verbose_name_plural = _("Brands") |         verbose_name_plural = _("Brands") | ||||||
|         indexes = [ |  | ||||||
|             models.Index(fields=["domain"]), |  | ||||||
|             models.Index(fields=["default"]), |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class WebfingerProvider(models.Model): |  | ||||||
|     """Provider which supports webfinger discovery""" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         abstract = True |  | ||||||
|  |  | ||||||
|     def webfinger(self, resource: str, request: HttpRequest) -> dict: |  | ||||||
|         raise NotImplementedError() |  | ||||||
|  | |||||||
| @ -5,11 +5,7 @@ from rest_framework.test import APITestCase | |||||||
|  |  | ||||||
| from authentik.brands.api import Themes | from authentik.brands.api import Themes | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.core.models import Application |  | ||||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_brand | from authentik.core.tests.utils import create_test_admin_user, create_test_brand | ||||||
| from authentik.lib.generators import generate_id |  | ||||||
| from authentik.providers.oauth2.models import OAuth2Provider |  | ||||||
| from authentik.providers.saml.models import SAMLProvider |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestBrands(APITestCase): | class TestBrands(APITestCase): | ||||||
| @ -79,45 +75,3 @@ class TestBrands(APITestCase): | |||||||
|             reverse("authentik_api:brand-list"), data={"domain": "bar", "default": True} |             reverse("authentik_api:brand-list"), data={"domain": "bar", "default": True} | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(response.status_code, 400) |         self.assertEqual(response.status_code, 400) | ||||||
|  |  | ||||||
|     def test_webfinger_no_app(self): |  | ||||||
|         """Test Webfinger""" |  | ||||||
|         create_test_brand() |  | ||||||
|         self.assertJSONEqual( |  | ||||||
|             self.client.get(reverse("authentik_brands:webfinger")).content.decode(), {} |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_webfinger_not_supported(self): |  | ||||||
|         """Test Webfinger""" |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         provider = SAMLProvider.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|         ) |  | ||||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id(), provider=provider) |  | ||||||
|         brand.default_application = app |  | ||||||
|         brand.save() |  | ||||||
|         self.assertJSONEqual( |  | ||||||
|             self.client.get(reverse("authentik_brands:webfinger")).content.decode(), {} |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_webfinger_oidc(self): |  | ||||||
|         """Test Webfinger""" |  | ||||||
|         brand = create_test_brand() |  | ||||||
|         provider = OAuth2Provider.objects.create( |  | ||||||
|             name=generate_id(), |  | ||||||
|         ) |  | ||||||
|         app = Application.objects.create(name=generate_id(), slug=generate_id(), provider=provider) |  | ||||||
|         brand.default_application = app |  | ||||||
|         brand.save() |  | ||||||
|         self.assertJSONEqual( |  | ||||||
|             self.client.get(reverse("authentik_brands:webfinger")).content.decode(), |  | ||||||
|             { |  | ||||||
|                 "links": [ |  | ||||||
|                     { |  | ||||||
|                         "href": f"http://testserver/application/o/{app.slug}/", |  | ||||||
|                         "rel": "http://openid.net/specs/connect/1.0/issuer", |  | ||||||
|                     } |  | ||||||
|                 ], |  | ||||||
|                 "subject": None, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  | |||||||
| @ -1,9 +0,0 @@ | |||||||
| """authentik brand root URLs""" |  | ||||||
|  |  | ||||||
| from django.urls import path |  | ||||||
|  |  | ||||||
| from authentik.brands.views.webfinger import WebFingerView |  | ||||||
|  |  | ||||||
| urlpatterns = [ |  | ||||||
|     path(".well-known/webfinger", WebFingerView.as_view(), name="webfinger"), |  | ||||||
| ] |  | ||||||
| @ -5,11 +5,10 @@ from typing import Any | |||||||
| from django.db.models import F, Q | from django.db.models import F, Q | ||||||
| from django.db.models import Value as V | from django.db.models import Value as V | ||||||
| from django.http.request import HttpRequest | from django.http.request import HttpRequest | ||||||
| from sentry_sdk import get_current_span | from sentry_sdk.hub import Hub | ||||||
|  |  | ||||||
| from authentik import get_full_version | from authentik import get_full_version | ||||||
| from authentik.brands.models import Brand | from authentik.brands.models import Brand | ||||||
| from authentik.tenants.models import Tenant |  | ||||||
|  |  | ||||||
| _q_default = Q(default=True) | _q_default = Q(default=True) | ||||||
| DEFAULT_BRAND = Brand(domain="fallback") | DEFAULT_BRAND = Brand(domain="fallback") | ||||||
| @ -31,14 +30,13 @@ def get_brand_for_request(request: HttpRequest) -> Brand: | |||||||
| def context_processor(request: HttpRequest) -> dict[str, Any]: | def context_processor(request: HttpRequest) -> dict[str, Any]: | ||||||
|     """Context Processor that injects brand object into every template""" |     """Context Processor that injects brand object into every template""" | ||||||
|     brand = getattr(request, "brand", DEFAULT_BRAND) |     brand = getattr(request, "brand", DEFAULT_BRAND) | ||||||
|     tenant = getattr(request, "tenant", Tenant()) |  | ||||||
|     trace = "" |     trace = "" | ||||||
|     span = get_current_span() |     span = Hub.current.scope.span | ||||||
|     if span: |     if span: | ||||||
|         trace = span.to_traceparent() |         trace = span.to_traceparent() | ||||||
|     return { |     return { | ||||||
|         "brand": brand, |         "brand": brand, | ||||||
|         "footer_links": tenant.footer_links, |         "footer_links": request.tenant.footer_links, | ||||||
|         "sentry_trace": trace, |         "sentry_trace": trace, | ||||||
|         "version": get_full_version(), |         "version": get_full_version(), | ||||||
|     } |     } | ||||||
|  | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	