Compare commits

..

10 Commits

3029 changed files with 117113 additions and 376843 deletions

View File

@ -1,36 +1,28 @@
[bumpversion] [bumpversion]
current_version = 2025.6.3 current_version = 2023.10.7
tag = True tag = True
commit = True commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
serialize = serialize =
{major}.{minor}.{patch}-{rc_t}{rc_n} {major}.{minor}.{patch}-{rc_t}{rc_n}
{major}.{minor}.{patch} {major}.{minor}.{patch}
message = release: {new_version} message = release: {new_version}
tag_name = version/{new_version} tag_name = version/{new_version}
[bumpversion:part:rc_t] [bumpversion:part:rc_t]
values = values =
rc rc
final final
optional_value = final optional_value = final
[bumpversion:file:pyproject.toml] [bumpversion:file:pyproject.toml]
[bumpversion:file:uv.lock]
[bumpversion:file:package.json]
[bumpversion:file:package-lock.json]
[bumpversion:file:docker-compose.yml] [bumpversion:file:docker-compose.yml]
[bumpversion:file:schema.yml] [bumpversion:file:schema.yml]
[bumpversion:file:blueprints/schema.json]
[bumpversion:file:authentik/__init__.py] [bumpversion:file:authentik/__init__.py]
[bumpversion:file:internal/constants/constants.go] [bumpversion:file:internal/constants/constants.go]
[bumpversion:file:lifecycle/aws/template.yaml] [bumpversion:file:web/src/common/constants.ts]

View File

@ -5,10 +5,8 @@ dist/**
build/** build/**
build_docs/** build_docs/**
*Dockerfile *Dockerfile
**/*Dockerfile
blueprints/local blueprints/local
.git .git
!gen-ts-api/node_modules !gen-ts-api/node_modules
!gen-ts-api/dist/** !gen-ts-api/dist/**
!gen-go-api/ !gen-go-api/
.venv

View File

@ -7,9 +7,6 @@ charset = utf-8
trim_trailing_whitespace = true trim_trailing_whitespace = true
insert_final_newline = true insert_final_newline = true
[*.toml]
indent_size = 2
[*.html] [*.html]
indent_size = 2 indent_size = 2

2
.github/FUNDING.yml vendored
View File

@ -1 +1 @@
custom: https://goauthentik.io/pricing/ github: [BeryJu]

View File

@ -28,11 +28,7 @@ Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):** **Version and Deployment (please complete the following information):**
<!-- - authentik version: [e.g. 2021.8.5]
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
-->
- authentik version: [e.g. 2025.2.0]
- Deployment: [e.g. docker-compose, helm] - Deployment: [e.g. docker-compose, helm]
**Additional context** **Additional context**

View File

@ -1,22 +0,0 @@
---
name: Documentation issue
about: Suggest an improvement or report a problem
title: ""
labels: documentation
assignees: ""
---
**Do you see an area that can be clarified or expanded, a technical inaccuracy, or a broken link? Please describe.**
A clear and concise description of what the problem is, or where the document can be improved. Ex. I believe we need more details about [...]
**Provide the URL or link to the exact page in the documentation to which you are referring.**
If there are multiple pages, list them all, and be sure to state the header or section where the content is.
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Additional context**
Add any other context or screenshots about the documentation issue here.
**Consider opening a PR!**
If the issue is one that you can fix, or even make a good pass at, we'd appreciate a PR. For more information about making a contribution to the docs, and using our Style Guide and our templates, refer to ["Writing documentation"](https://docs.goauthentik.io/docs/developer-docs/docs/writing-documentation).

View File

@ -9,7 +9,7 @@ assignees: ""
**Describe your question/** **Describe your question/**
A clear and concise description of what you're trying to do. A clear and concise description of what you're trying to do.
**Relevant info** **Relevant infos**
i.e. Version of other software you're using, specifics of your setup i.e. Version of other software you're using, specifics of your setup
**Screenshots** **Screenshots**
@ -20,12 +20,7 @@ Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):** **Version and Deployment (please complete the following information):**
<!-- - authentik version: [e.g. 2021.8.5]
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
-->
- authentik version: [e.g. 2025.2.0]
- Deployment: [e.g. docker-compose, helm] - Deployment: [e.g. docker-compose, helm]
**Additional context** **Additional context**

View File

@ -35,6 +35,14 @@ runs:
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
``` ```
For arm64, use these values:
```shell
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
Afterwards, run the upgrade commands from the latest release notes. Afterwards, run the upgrade commands from the latest release notes.
</details> </details>
<details> <details>
@ -46,10 +54,20 @@ runs:
authentik: authentik:
outposts: outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
global: image:
image: repository: ghcr.io/goauthentik/dev-server
repository: ghcr.io/goauthentik/dev-server tag: ${{ inputs.tag }}
tag: ${{ inputs.tag }} ```
For arm64, use these values:
```yaml
authentik:
outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
image:
repository: ghcr.io/goauthentik/dev-server
tag: ${{ inputs.tag }}-arm64
``` ```
Afterwards, run the upgrade commands from the latest release notes. Afterwards, run the upgrade commands from the latest release notes.

View File

@ -9,15 +9,8 @@ inputs:
image-arch: image-arch:
required: false required: false
description: "Docker image arch" description: "Docker image arch"
release:
required: true
description: "True if this is a release build, false if this is a dev/PR build"
outputs: outputs:
shouldPush:
description: "Whether to push the image or not"
value: ${{ steps.ev.outputs.shouldPush }}
sha: sha:
description: "sha" description: "sha"
value: ${{ steps.ev.outputs.sha }} value: ${{ steps.ev.outputs.sha }}
@ -32,36 +25,69 @@ outputs:
imageTags: imageTags:
description: "Docker image tags" description: "Docker image tags"
value: ${{ steps.ev.outputs.imageTags }} value: ${{ steps.ev.outputs.imageTags }}
imageTagsJSON:
description: "Docker image tags, as a JSON array"
value: ${{ steps.ev.outputs.imageTagsJSON }}
attestImageNames:
description: "Docker image names used for attestation"
value: ${{ steps.ev.outputs.attestImageNames }}
cacheTo:
description: "cache-to value for the docker build step"
value: ${{ steps.ev.outputs.cacheTo }}
imageMainTag: imageMainTag:
description: "Docker image main tag" description: "Docker image main tag"
value: ${{ steps.ev.outputs.imageMainTag }} value: ${{ steps.ev.outputs.imageMainTag }}
imageMainName:
description: "Docker image main name"
value: ${{ steps.ev.outputs.imageMainName }}
imageBuildArgs:
description: "Docker image build args"
value: ${{ steps.ev.outputs.imageBuildArgs }}
runs: runs:
using: "composite" using: "composite"
steps: steps:
- name: Generate config - name: Generate config
id: ev id: ev
shell: bash shell: python
env:
IMAGE_NAME: ${{ inputs.image-name }}
IMAGE_ARCH: ${{ inputs.image-arch }}
RELEASE: ${{ inputs.release }}
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
REF: ${{ github.ref }}
run: | run: |
python3 ${{ github.action_path }}/push_vars.py """Helper script to get the actual branch name, docker safe"""
import configparser
import os
from time import time
parser = configparser.ConfigParser()
parser.read(".bumpversion.cfg")
branch_name = os.environ["GITHUB_REF"]
if os.environ.get("GITHUB_HEAD_REF", "") != "":
branch_name = os.environ["GITHUB_HEAD_REF"]
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
image_names = "${{ inputs.image-name }}".split(",")
image_arch = "${{ inputs.image-arch }}" or None
is_pull_request = bool("${{ github.event.pull_request.head.sha }}")
is_release = "dev" not in image_names[0]
sha = os.environ["GITHUB_SHA"] if not is_pull_request else "${{ github.event.pull_request.head.sha }}"
# 2042.1.0 or 2042.1.0-rc1
version = parser.get("bumpversion", "current_version")
# 2042.1
version_family = ".".join(version.split("-", 1)[0].split(".")[:-1])
prerelease = "-" in version
image_tags = []
if is_release:
for name in image_names:
image_tags += [
f"{name}:{version}",
f"{name}:{version_family}",
]
if not prerelease:
image_tags += [f"{name}:latest"]
else:
suffix = ""
if image_arch and image_arch != "amd64":
suffix = f"-{image_arch}"
for name in image_names:
image_tags += [
f"{name}:gh-{sha}{suffix}",
f"{name}:gh-{safe_branch_name}{suffix}",
]
image_main_tag = image_tags[0]
image_tags_rendered = ",".join(image_tags)
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print("sha=%s" % sha, file=_output)
print("version=%s" % version, file=_output)
print("prerelease=%s" % prerelease, file=_output)
print("imageTags=%s" % image_tags_rendered, file=_output)
print("imageMainTag=%s" % image_main_tag, file=_output)

View File

@ -1,100 +0,0 @@
"""Helper script to get the actual branch name, docker safe"""
import configparser
import os
from json import dumps
from time import time
parser = configparser.ConfigParser()
parser.read(".bumpversion.cfg")
# Decide if we should push the image or not
should_push = True
if len(os.environ.get("DOCKER_USERNAME", "")) < 1:
# Don't push if we don't have DOCKER_USERNAME, i.e. no secrets are available
should_push = False
if os.environ.get("GITHUB_REPOSITORY").lower() == "goauthentik/authentik-internal":
# Don't push on the internal repo
should_push = False
branch_name = os.environ["GITHUB_REF"]
if os.environ.get("GITHUB_HEAD_REF", "") != "":
branch_name = os.environ["GITHUB_HEAD_REF"]
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-").replace("'", "-")
image_names = os.getenv("IMAGE_NAME").split(",")
image_arch = os.getenv("IMAGE_ARCH") or None
is_pull_request = bool(os.getenv("PR_HEAD_SHA"))
is_release = "dev" not in image_names[0]
sha = os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA")
# 2042.1.0 or 2042.1.0-rc1
version = parser.get("bumpversion", "current_version")
# 2042.1
version_family = ".".join(version.split("-", 1)[0].split(".")[:-1])
prerelease = "-" in version
image_tags = []
if is_release:
for name in image_names:
image_tags += [
f"{name}:{version}",
]
if not prerelease:
image_tags += [
f"{name}:{version_family}",
]
else:
suffix = ""
if image_arch:
suffix = f"-{image_arch}"
for name in image_names:
image_tags += [
f"{name}:gh-{sha}{suffix}", # Used for ArgoCD and PR comments
f"{name}:gh-{safe_branch_name}{suffix}", # For convenience
f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}", # Use by FluxCD
]
image_main_tag = image_tags[0].split(":")[-1]
def get_attest_image_names(image_with_tags: list[str]):
"""Attestation only for GHCR"""
image_tags = []
for image_name in set(name.split(":")[0] for name in image_with_tags):
if not image_name.startswith("ghcr.io"):
continue
image_tags.append(image_name)
return ",".join(set(image_tags))
# Generate `cache-to` param
cache_to = ""
if should_push:
_cache_tag = "buildcache"
if image_arch:
_cache_tag += f"-{image_arch}"
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
image_build_args = []
if os.getenv("RELEASE", "false").lower() == "true":
image_build_args = [f"VERSION={os.getenv('REF')}"]
else:
image_build_args = [f"GIT_BUILD_HASH={sha}"]
image_build_args = "\n".join(image_build_args)
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"shouldPush={str(should_push).lower()}", file=_output)
print(f"sha={sha}", file=_output)
print(f"version={version}", file=_output)
print(f"prerelease={prerelease}", file=_output)
print(f"imageTags={','.join(image_tags)}", file=_output)
print(f"imageTagsJSON={dumps(image_tags)}", file=_output)
print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output)
print(f"imageMainTag={image_main_tag}", file=_output)
print(f"imageMainName={image_tags[0]}", file=_output)
print(f"cacheTo={cache_to}", file=_output)
print(f"imageBuildArgs={image_build_args}", file=_output)

View File

@ -1,18 +0,0 @@
#!/bin/bash -x
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
# Non-pushing PR
GITHUB_OUTPUT=/dev/stdout \
GITHUB_REF=ref \
GITHUB_SHA=sha \
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
GITHUB_REPOSITORY=goauthentik/authentik \
python $SCRIPT_DIR/push_vars.py
# Pushing PR/main
GITHUB_OUTPUT=/dev/stdout \
GITHUB_REF=ref \
GITHUB_SHA=sha \
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
GITHUB_REPOSITORY=goauthentik/authentik \
DOCKER_USERNAME=foo \
python $SCRIPT_DIR/push_vars.py

View File

@ -9,44 +9,36 @@ inputs:
runs: runs:
using: "composite" using: "composite"
steps: steps:
- name: Install apt deps - name: Install poetry & deps
shell: bash shell: bash
run: | run: |
pipx install poetry || true
sudo apt-get update sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
- name: Install uv - name: Setup python and restore poetry
uses: astral-sh/setup-uv@v5 uses: actions/setup-python@v4
with:
enable-cache: true
- name: Setup python
uses: actions/setup-python@v5
with: with:
python-version-file: "pyproject.toml" python-version-file: "pyproject.toml"
- name: Install Python deps cache: "poetry"
shell: bash
run: uv sync --all-extras --dev --frozen
- name: Setup node - name: Setup node
uses: actions/setup-node@v4 uses: actions/setup-node@v3
with: with:
node-version-file: web/package.json node-version-file: web/package.json
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: web/package-lock.json
- name: Setup go - name: Setup go
uses: actions/setup-go@v5 uses: actions/setup-go@v4
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- name: Setup docker cache
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
with:
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
- name: Setup dependencies - name: Setup dependencies
shell: bash shell: bash
run: | run: |
export PSQL_TAG=${{ inputs.postgresql_version }} export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/docker-compose.yml up -d docker-compose -f .github/actions/setup/docker-compose.yml up -d
poetry install
cd web && npm ci cd web && npm ci
- name: Generate config - name: Generate config
shell: uv run python {0} shell: poetry run python {0}
run: | run: |
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
from yaml import safe_dump from yaml import safe_dump

View File

@ -1,3 +1,5 @@
version: "3.7"
services: services:
postgresql: postgresql:
image: docker.io/library/postgres:${PSQL_TAG:-16} image: docker.io/library/postgres:${PSQL_TAG:-16}
@ -11,7 +13,7 @@ services:
- 5432:5432 - 5432:5432
restart: always restart: always
redis: redis:
image: docker.io/library/redis:7 image: docker.io/library/redis
ports: ports:
- 6379:6379 - 6379:6379
restart: always restart: always

View File

@ -1,32 +1,6 @@
akadmin
asgi
assertIn
authentik
authn
crate
docstrings
entra
goauthentik
gunicorn
hass
jwe
jwks
keypair keypair
keypairs keypairs
kubernetes hass
oidc
ontext
openid
passwordless
plex
saml
scim
singed
slo
sso
totp
traefik
# https://github.com/codespell-project/codespell/issues/1224
upToDate
warmup warmup
webauthn ontext
singed

View File

@ -21,15 +21,7 @@ updates:
labels: labels:
- dependencies - dependencies
- package-ecosystem: npm - package-ecosystem: npm
directories: directory: "/web"
- "/web"
- "/web/packages/sfe"
- "/web/packages/core"
- "/web/packages/esbuild-plugin-live-reload"
- "/packages/prettier-config"
- "/packages/tsconfig"
- "/packages/docusaurus-config"
- "/packages/eslint-config"
schedule: schedule:
interval: daily interval: daily
time: "04:00" time: "04:00"
@ -38,6 +30,7 @@ updates:
open-pull-requests-limit: 10 open-pull-requests-limit: 10
commit-message: commit-message:
prefix: "web:" prefix: "web:"
# TODO: deduplicate these groups
groups: groups:
sentry: sentry:
patterns: patterns:
@ -49,11 +42,9 @@ updates:
- "babel-*" - "babel-*"
eslint: eslint:
patterns: patterns:
- "@eslint/*"
- "@typescript-eslint/*" - "@typescript-eslint/*"
- "eslint-*"
- "eslint" - "eslint"
- "typescript-eslint" - "eslint-*"
storybook: storybook:
patterns: patterns:
- "@storybook/*" - "@storybook/*"
@ -61,22 +52,41 @@ updates:
esbuild: esbuild:
patterns: patterns:
- "@esbuild/*" - "@esbuild/*"
- "esbuild*" - package-ecosystem: npm
rollup: directory: "/tests/wdio"
schedule:
interval: daily
time: "04:00"
labels:
- dependencies
open-pull-requests-limit: 10
commit-message:
prefix: "web:"
# TODO: deduplicate these groups
groups:
sentry:
patterns: patterns:
- "@rollup/*" - "@sentry/*"
- "rollup-*" - "@spotlightjs/*"
- "rollup*" babel:
swc:
patterns: patterns:
- "@swc/*" - "@babel/*"
- "swc-*" - "babel-*"
eslint:
patterns:
- "@typescript-eslint/*"
- "eslint"
- "eslint-*"
storybook:
patterns:
- "@storybook/*"
- "*storybook*"
esbuild:
patterns:
- "@esbuild/*"
wdio: wdio:
patterns: patterns:
- "@wdio/*" - "@wdio/*"
goauthentik:
patterns:
- "@goauthentik/*"
- package-ecosystem: npm - package-ecosystem: npm
directory: "/website" directory: "/website"
schedule: schedule:
@ -91,33 +101,7 @@ updates:
docusaurus: docusaurus:
patterns: patterns:
- "@docusaurus/*" - "@docusaurus/*"
build: - package-ecosystem: pip
patterns:
- "@swc/*"
- "swc-*"
- "lightningcss*"
- "@rspack/binding*"
goauthentik:
patterns:
- "@goauthentik/*"
eslint:
patterns:
- "@eslint/*"
- "@typescript-eslint/*"
- "eslint-*"
- "eslint"
- "typescript-eslint"
- package-ecosystem: npm
directory: "/lifecycle/aws"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "lifecycle/aws:"
labels:
- dependencies
- package-ecosystem: uv
directory: "/" directory: "/"
schedule: schedule:
interval: daily interval: daily
@ -137,15 +121,3 @@ updates:
prefix: "core:" prefix: "core:"
labels: labels:
- dependencies - dependencies
- package-ecosystem: docker-compose
directories:
# - /scripts # Maybe
- /tests/e2e
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies

View File

@ -1,7 +1,7 @@
<!-- <!--
👋 Hi there! Welcome. 👋 Hi there! Welcome.
Please check the Contributing guidelines: https://docs.goauthentik.io/docs/developer-docs/#how-can-i-contribute Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute
--> -->
## Details ## Details

View File

@ -1,98 +0,0 @@
# Re-usable workflow for a single-architecture build
name: Single-arch Container build
on:
workflow_call:
inputs:
image_name:
required: true
type: string
image_arch:
required: true
type: string
runs-on:
required: true
type: string
registry_dockerhub:
default: false
type: boolean
registry_ghcr:
default: false
type: boolean
release:
default: false
type: boolean
outputs:
image-digest:
value: ${{ jobs.build.outputs.image-digest }}
jobs:
build:
name: Build ${{ inputs.image_arch }}
runs-on: ${{ inputs.runs-on }}
outputs:
image-digest: ${{ steps.push.outputs.digest }}
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
# Needed for checkout
contents: read
steps:
- uses: actions/checkout@v4
- uses: docker/setup-qemu-action@v3.6.0
- uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
image-arch: ${{ inputs.image_arch }}
release: ${{ inputs.release }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
if: ${{ inputs.release }}
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: generate ts client
if: ${{ !inputs.release }}
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@v6
id: push
with:
context: .
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
build-args: |
${{ steps.ev.outputs.imageBuildArgs }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/${{ inputs.image_arch }}
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
cache-to: ${{ steps.ev.outputs.cacheTo }}
- uses: actions/attest-build-provenance@v2
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

View File

@ -1,104 +0,0 @@
# Re-usable workflow for a multi-architecture build
name: Multi-arch container build
on:
workflow_call:
inputs:
image_name:
required: true
type: string
registry_dockerhub:
default: false
type: boolean
registry_ghcr:
default: true
type: boolean
release:
default: false
type: boolean
outputs: {}
jobs:
build-server-amd64:
uses: ./.github/workflows/_reusable-docker-build-single.yaml
secrets: inherit
with:
image_name: ${{ inputs.image_name }}
image_arch: amd64
runs-on: ubuntu-latest
registry_dockerhub: ${{ inputs.registry_dockerhub }}
registry_ghcr: ${{ inputs.registry_ghcr }}
release: ${{ inputs.release }}
build-server-arm64:
uses: ./.github/workflows/_reusable-docker-build-single.yaml
secrets: inherit
with:
image_name: ${{ inputs.image_name }}
image_arch: arm64
runs-on: ubuntu-22.04-arm
registry_dockerhub: ${{ inputs.registry_dockerhub }}
registry_ghcr: ${{ inputs.registry_ghcr }}
release: ${{ inputs.release }}
get-tags:
runs-on: ubuntu-latest
needs:
- build-server-amd64
- build-server-arm64
outputs:
tags: ${{ steps.ev.outputs.imageTagsJSON }}
shouldPush: ${{ steps.ev.outputs.shouldPush }}
steps:
- uses: actions/checkout@v4
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
merge-server:
runs-on: ubuntu-latest
if: ${{ needs.get-tags.outputs.shouldPush == 'true' }}
needs:
- get-tags
- build-server-amd64
- build-server-arm64
strategy:
fail-fast: false
matrix:
tag: ${{ fromJson(needs.get-tags.outputs.tags) }}
steps:
- uses: actions/checkout@v4
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@v2
id: build
with:
tags: ${{ matrix.tag }}
sources: |
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
- uses: actions/attest-build-provenance@v2
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.build.outputs.digest }}
push-to-registry: true

View File

@ -1,65 +0,0 @@
name: authentik-api-py-publish
on:
push:
branches: [main]
paths:
- "schema.yml"
workflow_dispatch:
jobs:
build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
permissions:
id-token: write
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Install poetry & deps
shell: bash
run: |
pipx install poetry || true
sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry
uses: actions/setup-python@v5
with:
python-version-file: "pyproject.toml"
- name: Generate API Client
run: make gen-client-py
- name: Publish package
working-directory: gen-py-api/
run: |
poetry build
- name: Publish package to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
packages-dir: gen-py-api/dist/
# We can't easily upgrade the API client being used due to poetry being poetry
# so we'll have to rely on dependabot
# - name: Upgrade /
# run: |
# export VERSION=$(cd gen-py-api && poetry version -s)
# poetry add "authentik_client=$VERSION" --allow-prereleases --lock
# - uses: peter-evans/create-pull-request@v6
# id: cpr
# with:
# token: ${{ steps.generate_token.outputs.token }}
# branch: update-root-api-client
# commit-message: "root: bump API Client version"
# title: "root: bump API Client version"
# body: "root: bump API Client version"
# delete-branch: true
# signoff: true
# # ID from https://api.github.com/users/authentik-automation[bot]
# author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
# - uses: peter-evans/enable-pull-request-automerge@v3
# with:
# token: ${{ steps.generate_token.outputs.token }}
# pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
# merge-method: squash

View File

@ -1,46 +0,0 @@
name: authentik-ci-aws-cfn
on:
push:
branches:
- main
- next
- version-*
pull_request:
branches:
- main
- version-*
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
check-changes-applied:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: actions/setup-node@v4
with:
node-version-file: lifecycle/aws/package.json
cache: "npm"
cache-dependency-path: lifecycle/aws/package-lock.json
- working-directory: lifecycle/aws/
run: |
npm ci
- name: Check changes have been applied
run: |
uv run make aws-cfn
git diff --exit-code
ci-aws-cfn-mark:
if: always()
needs:
- check-changes-applied
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}

View File

@ -1,29 +0,0 @@
---
name: authentik-ci-main-daily
on:
workflow_dispatch:
schedule:
# Every night at 3am
- cron: "0 3 * * *"
jobs:
test-container:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
version:
- docs
- version-2025-4
- version-2025-2
steps:
- uses: actions/checkout@v4
- run: |
current="$(pwd)"
dir="/tmp/authentik/${{ matrix.version }}"
mkdir -p $dir
cd $dir
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
${current}/scripts/test_docker.sh

View File

@ -7,6 +7,8 @@ on:
- main - main
- next - next
- version-* - version-*
paths-ignore:
- website/**
pull_request: pull_request:
branches: branches:
- main - main
@ -26,7 +28,10 @@ jobs:
- bandit - bandit
- black - black
- codespell - codespell
- isort
- pending-migrations - pending-migrations
# - pylint
- pyright
- ruff - ruff
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@ -34,7 +39,7 @@ jobs:
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: run job - name: run job
run: uv run make ci-${{ matrix.job }} run: poetry run make ci-${{ matrix.job }}
test-migrations: test-migrations:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@ -42,39 +47,30 @@ jobs:
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: run migrations - name: run migrations
run: uv run python -m lifecycle.migrate run: poetry run python -m lifecycle.migrate
test-make-seed:
runs-on: ubuntu-latest
steps:
- id: seed
run: |
echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT"
outputs:
seed: ${{ steps.seed.outputs.seed }}
test-migrations-from-stable: test-migrations-from-stable:
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20
needs: test-make-seed
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
psql: psql:
- 12-alpine
- 15-alpine - 15-alpine
- 16-alpine - 16-alpine
- 17-alpine
run_id: [1, 2, 3, 4, 5]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: checkout stable - name: checkout stable
run: | run: |
# Delete all poetry envs
rm -rf /home/runner/.cache/pypoetry
# Copy current, latest config to local # Copy current, latest config to local
cp authentik/lib/default.yml local.env.yml cp authentik/lib/default.yml local.env.yml
cp -R .github .. cp -R .github ..
cp -R scripts .. cp -R scripts ..
git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) git checkout version/$(python -c "from authentik import __version__; print(__version__)")
rm -rf .github/ scripts/ rm -rf .github/ scripts/
mv ../.github ../scripts . mv ../.github ../scripts .
- name: Setup authentik env (stable) - name: Setup authentik env (stable)
@ -82,7 +78,7 @@ jobs:
with: with:
postgresql_version: ${{ matrix.psql }} postgresql_version: ${{ matrix.psql }}
- name: run migrations to stable - name: run migrations to stable
run: uv run python -m lifecycle.migrate run: poetry run python -m lifecycle.migrate
- name: checkout current code - name: checkout current code
run: | run: |
set -x set -x
@ -90,35 +86,32 @@ jobs:
git reset --hard HEAD git reset --hard HEAD
git clean -d -fx . git clean -d -fx .
git checkout $GITHUB_SHA git checkout $GITHUB_SHA
# Delete previous poetry env
rm -rf /home/runner/.cache/pypoetry/virtualenvs/*
- name: Setup authentik env (ensure latest deps are installed) - name: Setup authentik env (ensure latest deps are installed)
uses: ./.github/actions/setup uses: ./.github/actions/setup
with: with:
postgresql_version: ${{ matrix.psql }} postgresql_version: ${{ matrix.psql }}
- name: migrate to latest - name: migrate to latest
run: | run: |
uv run python -m lifecycle.migrate poetry run python -m lifecycle.migrate
- name: run tests - name: run tests
env: env:
# Test in the main database that we just migrated from the previous stable version # Test in the main database that we just migrated from the previous stable version
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: | run: |
uv run make ci-test poetry run make test
test-unittest: test-unittest:
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 name: test-unittest - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20 timeout-minutes: 30
needs: test-make-seed
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
psql: psql:
- 12-alpine
- 15-alpine - 15-alpine
- 16-alpine - 16-alpine
- 17-alpine
run_id: [1, 2, 3, 4, 5]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Setup authentik env - name: Setup authentik env
@ -126,23 +119,14 @@ jobs:
with: with:
postgresql_version: ${{ matrix.psql }} postgresql_version: ${{ matrix.psql }}
- name: run unittest - name: run unittest
env:
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: | run: |
uv run make ci-test poetry run make test
poetry run coverage xml
- if: ${{ always() }} - if: ${{ always() }}
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v4
with: with:
flags: unit flags: unit
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
- if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
flags: unit
file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }}
test-integration: test-integration:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30 timeout-minutes: 30
@ -151,22 +135,16 @@ jobs:
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: Create k8s Kind Cluster - name: Create k8s Kind Cluster
uses: helm/kind-action@v1.12.0 uses: helm/kind-action@v1.9.0
- name: run integration - name: run integration
run: | run: |
uv run coverage run manage.py test tests/integration poetry run coverage run manage.py test tests/integration
uv run coverage xml poetry run coverage xml
- if: ${{ always() }} - if: ${{ always() }}
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v4
with: with:
flags: integration flags: integration
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
- if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
flags: integration
file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }}
test-e2e: test-e2e:
name: test-e2e (${{ matrix.job.name }}) name: test-e2e (${{ matrix.job.name }})
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -187,8 +165,6 @@ jobs:
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap* glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
- name: radius - name: radius
glob: tests/e2e/test_provider_radius* glob: tests/e2e/test_provider_radius*
- name: scim
glob: tests/e2e/test_source_scim*
- name: flows - name: flows
glob: tests/e2e/test_flows* glob: tests/e2e/test_flows*
steps: steps:
@ -197,12 +173,12 @@ jobs:
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc) - name: Setup e2e env (chrome, etc)
run: | run: |
docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull docker-compose -f tests/e2e/docker-compose.yml up -d
- id: cache-web - id: cache-web
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
path: web/dist path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
- name: prepare web ui - name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true' if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web working-directory: web
@ -210,24 +186,16 @@ jobs:
npm ci npm ci
make -C .. gen-client-ts make -C .. gen-client-ts
npm run build npm run build
npm run build:sfe
- name: run e2e - name: run e2e
run: | run: |
uv run coverage run manage.py test ${{ matrix.job.glob }} poetry run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage xml poetry run coverage xml
- if: ${{ always() }} - if: ${{ always() }}
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v4
with: with:
flags: e2e flags: e2e
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
- if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
flags: e2e
file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }}
ci-core-mark: ci-core-mark:
if: always()
needs: needs:
- lint - lint
- test-migrations - test-migrations
@ -237,24 +205,57 @@ jobs:
- test-e2e - test-e2e
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: re-actors/alls-green@release/v1 - run: echo mark
with:
jobs: ${{ toJSON(needs) }}
build: build:
permissions: strategy:
# Needed to upload container images to ghcr.io fail-fast: false
packages: write matrix:
# Needed for attestation arch:
id-token: write - amd64
attestations: write - arm64
# Needed for checkout
contents: read
needs: ci-core-mark needs: ci-core-mark
uses: ./.github/workflows/_reusable-docker-build.yaml runs-on: ubuntu-latest
secrets: inherit permissions:
with: # Needed to upload contianer images to ghcr.io
image_name: ${{ github.repository == 'goauthentik/authentik-internal' && 'ghcr.io/goauthentik/internal-server' || 'ghcr.io/goauthentik/dev-server' }} packages: write
release: false timeout-minutes: 120
if: "github.repository == 'goauthentik/authentik'"
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
with:
image-name: ghcr.io/goauthentik/dev-server
image-arch: ${{ matrix.arch }}
- name: Login to Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: generate ts client
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@v5
with:
context: .
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: ${{ steps.ev.outputs.imageTags }}
push: true
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
cache-from: type=gha
cache-to: type=gha,mode=max
platforms: linux/${{ matrix.arch }}
pr-comment: pr-comment:
needs: needs:
- build - build
@ -271,12 +272,9 @@ jobs:
- name: prepare variables - name: prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with: with:
image-name: ghcr.io/goauthentik/dev-server image-name: ghcr.io/goauthentik/dev-server
- name: Comment on PR - name: Comment on PR
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: ./.github/actions/comment-pr-instructions uses: ./.github/actions/comment-pr-instructions
with: with:
tag: ${{ steps.ev.outputs.imageMainTag }} tag: gh-${{ steps.ev.outputs.imageMainTag }}

View File

@ -29,9 +29,9 @@ jobs:
- name: Generate API - name: Generate API
run: make gen-client-go run: make gen-client-go
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v8 uses: golangci/golangci-lint-action@v4
with: with:
version: latest version: v1.54.2
args: --timeout 5000s --verbose args: --timeout 5000s --verbose
skip-cache: true skip-cache: true
test-unittest: test-unittest:
@ -49,17 +49,13 @@ jobs:
run: | run: |
go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./... go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./...
ci-outpost-mark: ci-outpost-mark:
if: always()
needs: needs:
- lint-golint - lint-golint
- test-unittest - test-unittest
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: re-actors/alls-green@release/v1 - run: echo mark
with:
jobs: ${{ toJSON(needs) }}
build-container: build-container:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
timeout-minutes: 120 timeout-minutes: 120
needs: needs:
- ci-outpost-mark - ci-outpost-mark
@ -73,28 +69,23 @@ jobs:
- rac - rac
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
# Needed to upload container images to ghcr.io # Needed to upload contianer images to ghcr.io
packages: write packages: write
# Needed for attestation if: "github.repository == 'goauthentik/authentik'"
id-token: write
attestations: write
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0 uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: prepare variables - name: prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with: with:
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }} image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
- name: Login to Container Registry - name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
@ -103,25 +94,17 @@ jobs:
- name: Generate API - name: Generate API
run: make gen-client-go run: make gen-client-go
- name: Build Docker Image - name: Build Docker Image
id: push uses: docker/build-push-action@v5
uses: docker/build-push-action@v6
with: with:
tags: ${{ steps.ev.outputs.imageTags }} tags: ${{ steps.ev.outputs.imageTags }}
file: ${{ matrix.type }}.Dockerfile file: ${{ matrix.type }}.Dockerfile
push: ${{ steps.ev.outputs.shouldPush == 'true' }} push: true
build-args: | build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
context: . context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache cache-from: type=gha
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }} cache-to: type=gha,mode=max
- uses: actions/attest-build-provenance@v2
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
build-binary: build-binary:
timeout-minutes: 120 timeout-minutes: 120
needs: needs:

View File

@ -12,23 +12,14 @@ on:
- version-* - version-*
jobs: jobs:
lint: lint-eslint:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
command:
- lint
- lint:lockfile
- tsc
- prettier-check
project: project:
- web - web
include: - tests/wdio
- command: tsc
project: web
- command: lit-analyse
project: web
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
@ -37,14 +28,83 @@ jobs:
cache: "npm" cache: "npm"
cache-dependency-path: ${{ matrix.project }}/package-lock.json cache-dependency-path: ${{ matrix.project }}/package-lock.json
- working-directory: ${{ matrix.project }}/ - working-directory: ${{ matrix.project }}/
run: | run: npm ci
npm ci
- name: Generate API - name: Generate API
run: make gen-client-ts run: make gen-client-ts
- name: Lint - name: Eslint
working-directory: ${{ matrix.project }}/ working-directory: ${{ matrix.project }}/
run: npm run ${{ matrix.command }} run: npm run lint
lint-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: TSC
working-directory: web/
run: npm run tsc
lint-prettier:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
project:
- web
- tests/wdio
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: ${{ matrix.project }}/package.json
cache: "npm"
cache-dependency-path: ${{ matrix.project }}/package-lock.json
- working-directory: ${{ matrix.project }}/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: prettier
working-directory: ${{ matrix.project }}/
run: npm run prettier-check
lint-lit-analyse:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: |
npm ci
# lit-analyse doesn't understand path rewrites, so make it
# belive it's an actual module
cd node_modules/@goauthentik
ln -s ../../src/ web
- name: Generate API
run: make gen-client-ts
- name: lit-analyse
working-directory: web/
run: npm run lit-analyse
ci-web-mark:
needs:
- lint-eslint
- lint-prettier
- lint-lit-analyse
- lint-build
runs-on: ubuntu-latest
steps:
- run: echo mark
build: build:
needs:
- ci-web-mark
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -60,31 +120,3 @@ jobs:
- name: build - name: build
working-directory: web/ working-directory: web/
run: npm run build run: npm run build
ci-web-mark:
if: always()
needs:
- build
- lint
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
test:
needs:
- ci-web-mark
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: test
working-directory: web/
run: npm run test || exit 0

View File

@ -12,21 +12,20 @@ on:
- version-* - version-*
jobs: jobs:
lint: lint-prettier:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
command:
- lint:lockfile
- prettier-check
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: website/package.json
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/ - working-directory: website/
run: npm ci run: npm ci
- name: Lint - name: prettier
working-directory: website/ working-directory: website/
run: npm run ${{ matrix.command }} run: npm run prettier-check
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@ -49,7 +48,7 @@ jobs:
matrix: matrix:
job: job:
- build - build
- build:integrations - build-docs-only
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
@ -62,65 +61,11 @@ jobs:
- name: build - name: build
working-directory: website/ working-directory: website/
run: npm run ${{ matrix.job }} run: npm run ${{ matrix.job }}
build-container:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/dev-docs
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
platforms: linux/amd64,linux/arm64
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }}
- uses: actions/attest-build-provenance@v2
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
ci-website-mark: ci-website-mark:
if: always()
needs: needs:
- lint - lint-prettier
- test - test
- build - build
- build-container
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: re-actors/alls-green@release/v1 - run: echo mark
with:
jobs: ${{ toJSON(needs) }}
allowed-skips: ${{ github.repository == 'goauthentik/authentik-internal' && 'build-container' || '[]' }}

View File

@ -2,7 +2,7 @@ name: "CodeQL"
on: on:
push: push:
branches: [main, next, version*] branches: [main, "*", next, version*]
pull_request: pull_request:
branches: [main] branches: [main]
schedule: schedule:

View File

@ -1,45 +0,0 @@
name: authentik-gen-update-webauthn-mds
on:
workflow_dispatch:
schedule:
- cron: "30 1 1,15 * *"
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Setup authentik env
uses: ./.github/actions/setup
- run: uv run ak update_webauthn_mds
- uses: peter-evans/create-pull-request@v7
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
branch: update-fido-mds-client
commit-message: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
title: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
body: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
delete-branch: true
signoff: true
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

View File

@ -7,7 +7,6 @@ on:
jobs: jobs:
clean-ghcr: clean-ghcr:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
name: Delete old unused container images name: Delete old unused container images
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:

View File

@ -42,7 +42,7 @@ jobs:
with: with:
githubToken: ${{ steps.generate_token.outputs.token }} githubToken: ${{ steps.generate_token.outputs.token }}
compressOnly: ${{ github.event_name != 'pull_request' }} compressOnly: ${{ github.event_name != 'pull_request' }}
- uses: peter-evans/create-pull-request@v7 - uses: peter-evans/create-pull-request@v6
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
id: cpr id: cpr
with: with:
@ -53,7 +53,6 @@ jobs:
body: ${{ steps.compress.outputs.markdown }} body: ${{ steps.compress.outputs.markdown }}
delete-branch: true delete-branch: true
signoff: true signoff: true
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3 - uses: peter-evans/enable-pull-request-automerge@v3
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
with: with:

View File

@ -1,47 +0,0 @@
name: authentik-packages-npm-publish
on:
push:
branches: [main]
paths:
- packages/docusaurus-config/**
- packages/eslint-config/**
- packages/prettier-config/**
- packages/tsconfig/**
- web/packages/esbuild-plugin-live-reload/**
workflow_dispatch:
jobs:
publish:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
package:
- packages/docusaurus-config
- packages/eslint-config
- packages/prettier-config
- packages/tsconfig
- web/packages/esbuild-plugin-live-reload
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4
with:
node-version-file: ${{ matrix.package }}/package.json
registry-url: "https://registry.npmjs.org"
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
with:
files: |
${{ matrix.package }}/package.json
- name: Publish package
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ${{ matrix.package }}
run: |
npm ci
npm run build
npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}

View File

@ -12,7 +12,6 @@ env:
jobs: jobs:
publish-source-docs: publish-source-docs:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 120 timeout-minutes: 120
steps: steps:
@ -21,8 +20,8 @@ jobs:
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: generate docs - name: generate docs
run: | run: |
uv run make migrate poetry run make migrate
uv run ak build_source_docs poetry run ak build_source_docs
- name: Publish - name: Publish
uses: netlify/actions/cli@master uses: netlify/actions/cli@master
with: with:

View File

@ -11,7 +11,6 @@ permissions:
jobs: jobs:
update-next: update-next:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
environment: internal-production environment: internal-production
steps: steps:

View File

@ -7,70 +7,51 @@ on:
jobs: jobs:
build-server: build-server:
uses: ./.github/workflows/_reusable-docker-build.yaml
secrets: inherit
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
with:
image_name: ghcr.io/goauthentik/server,beryju/authentik
release: true
registry_dockerhub: true
registry_ghcr: true
build-docs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
# Needed to upload container images to ghcr.io # Needed to upload contianer images to ghcr.io
packages: write packages: write
# Needed for attestation
id-token: write
attestations: write
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0 uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: prepare variables - name: prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with: with:
image-name: ghcr.io/goauthentik/docs image-name: ghcr.io/goauthentik/server,beryju/authentik
- name: Docker Login Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: Build Docker Image - name: Build Docker Image
id: push uses: docker/build-push-action@v5
uses: docker/build-push-action@v6
with: with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
push: true
platforms: linux/amd64,linux/arm64
context: . context: .
- uses: actions/attest-build-provenance@v2 push: true
id: attest secrets: |
if: true GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
with: GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
subject-name: ${{ steps.ev.outputs.attestImageNames }} tags: ${{ steps.ev.outputs.imageTags }}
subject-digest: ${{ steps.push.outputs.digest }} platforms: linux/amd64,linux/arm64
push-to-registry: true
build-outpost: build-outpost:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
# Needed to upload container images to ghcr.io # Needed to upload contianer images to ghcr.io
packages: write packages: write
# Needed for attestation
id-token: write
attestations: write
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@ -85,14 +66,12 @@ jobs:
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0 uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: prepare variables - name: prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with: with:
image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }} image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }}
- name: make empty clients - name: make empty clients
@ -111,22 +90,13 @@ jobs:
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image - name: Build Docker Image
uses: docker/build-push-action@v6 uses: docker/build-push-action@v5
id: push
with: with:
push: true push: true
build-args: |
VERSION=${{ github.ref }}
tags: ${{ steps.ev.outputs.imageTags }} tags: ${{ steps.ev.outputs.imageTags }}
file: ${{ matrix.type }}.Dockerfile file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
context: . context: .
- uses: actions/attest-build-provenance@v2
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
build-outpost-binary: build-outpost-binary:
timeout-minutes: 120 timeout-minutes: 120
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -171,27 +141,6 @@ jobs:
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
tag: ${{ github.ref }} tag: ${{ github.ref }}
upload-aws-cfn-template:
permissions:
# Needed for AWS login
id-token: write
contents: read
needs:
- build-server
- build-outpost
env:
AWS_REGION: eu-central-1
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
aws-region: ${{ env.AWS_REGION }}
- name: Upload template
run: |
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
test-release: test-release:
needs: needs:
- build-server - build-server
@ -202,12 +151,12 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Run test suite in final docker images - name: Run test suite in final docker images
run: | run: |
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker compose pull -q docker-compose pull -q
docker compose up --no-start docker-compose up --no-start
docker compose start postgresql redis docker-compose start postgresql redis
docker compose run -u root server test-all docker-compose run -u root server test-all
sentry-release: sentry-release:
needs: needs:
- build-server - build-server
@ -219,23 +168,21 @@ jobs:
- name: prepare variables - name: prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with: with:
image-name: ghcr.io/goauthentik/server image-name: ghcr.io/goauthentik/server
- name: Get static files from docker image - name: Get static files from docker image
run: | run: |
docker pull ${{ steps.ev.outputs.imageMainName }} docker pull ghcr.io/goauthentik/server:${{ steps.ev.outputs.imageMainTag }}
container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) container=$(docker container create ghcr.io/goauthentik/server:${{ steps.ev.outputs.imageMainTag }})
docker cp ${container}:web/ . docker cp ${container}:web/ .
- name: Create a Sentry.io release - name: Create a Sentry.io release
uses: getsentry/action-release@v3 uses: getsentry/action-release@v1
continue-on-error: true continue-on-error: true
env: env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: authentik-security-inc SENTRY_ORG: authentik-security-inc
SENTRY_PROJECT: authentik SENTRY_PROJECT: authentik
with: with:
release: authentik@${{ steps.ev.outputs.version }} version: authentik@${{ steps.ev.outputs.version }}
sourcemaps: "./web/dist" sourcemaps: "./web/dist"
url_prefix: "~/static/dist" url_prefix: "~/static/dist"

View File

@ -14,7 +14,16 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Pre-release test - name: Pre-release test
run: | run: |
make test-docker echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker buildx install
mkdir -p ./gen-ts-api
docker build -t testing:latest .
echo "AUTHENTIK_IMAGE=testing" >> .env
echo "AUTHENTIK_TAG=latest" >> .env
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test-all
- id: generate_token - id: generate_token
uses: tibdex/github-app-token@v2 uses: tibdex/github-app-token@v2
with: with:
@ -23,8 +32,6 @@ jobs:
- name: prepare variables - name: prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with: with:
image-name: ghcr.io/goauthentik/server image-name: ghcr.io/goauthentik/server
- name: Create Release - name: Create Release

View File

@ -1,21 +0,0 @@
name: "authentik-repo-mirror-cleanup"
on:
workflow_dispatch:
jobs:
to_internal:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- if: ${{ env.MIRROR_KEY != '' }}
uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb
with:
target_repo_url: git@github.com:goauthentik/authentik-internal.git
ssh_private_key: ${{ secrets.GH_MIRROR_KEY }}
args: --tags --force --prune
env:
MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }}

View File

@ -1,20 +0,0 @@
name: "authentik-repo-mirror"
on: [push, delete]
jobs:
to_internal:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- if: ${{ env.MIRROR_KEY != '' }}
uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb
with:
target_repo_url: git@github.com:goauthentik/authentik-internal.git
ssh_private_key: ${{ secrets.GH_MIRROR_KEY }}
args: --tags --force
env:
MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }}

View File

@ -1,8 +1,8 @@
name: "authentik-repo-stale" name: 'authentik-repo-stale'
on: on:
schedule: schedule:
- cron: "30 1 * * *" - cron: '30 1 * * *'
workflow_dispatch: workflow_dispatch:
permissions: permissions:
@ -11,7 +11,6 @@ permissions:
jobs: jobs:
stale: stale:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: generate_token - id: generate_token
@ -24,8 +23,8 @@ jobs:
repo-token: ${{ steps.generate_token.outputs.token }} repo-token: ${{ steps.generate_token.outputs.token }}
days-before-stale: 60 days-before-stale: 60
days-before-close: 7 days-before-close: 7
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question
stale-issue-label: status/stale stale-issue-label: wontfix
stale-issue-message: > stale-issue-message: >
This issue has been automatically marked as stale because it has not had This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you recent activity. It will be closed if no further activity occurs. Thank you

View File

@ -1,27 +0,0 @@
name: authentik-semgrep
on:
workflow_dispatch: {}
pull_request: {}
push:
branches:
- main
- master
paths:
- .github/workflows/semgrep.yml
schedule:
# random HH:MM to avoid a load spike on GitHub Actions at 00:00
- cron: '12 15 * * *'
jobs:
semgrep:
name: semgrep/ci
runs-on: ubuntu-latest
permissions:
contents: read
env:
SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }}
container:
image: semgrep/semgrep
if: (github.actor != 'dependabot[bot]')
steps:
- uses: actions/checkout@v4
- run: semgrep ci

View File

@ -1,13 +1,9 @@
--- ---
name: authentik-translate-extract-compile name: authentik-backend-translate-extract-compile
on: on:
schedule: schedule:
- cron: "0 0 * * *" # every day at midnight - cron: "0 0 * * *" # every day at midnight
workflow_dispatch: workflow_dispatch:
pull_request:
branches:
- main
- version-*
env: env:
POSTGRES_DB: authentik POSTGRES_DB: authentik
@ -16,35 +12,27 @@ env:
jobs: jobs:
compile: compile:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: generate_token - id: generate_token
if: ${{ github.event_name != 'pull_request' }}
uses: tibdex/github-app-token@v2 uses: tibdex/github-app-token@v2
with: with:
app_id: ${{ secrets.GH_APP_ID }} app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4 - uses: actions/checkout@v4
if: ${{ github.event_name != 'pull_request' }}
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}
- uses: actions/checkout@v4
if: ${{ github.event_name == 'pull_request' }}
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: Generate API
run: make gen-client-ts
- name: run extract - name: run extract
run: | run: |
uv run make i18n-extract poetry run make i18n-extract
- name: run compile - name: run compile
run: | run: |
uv run ak compilemessages poetry run ak compilemessages
make web-check-compile make web-check-compile
- name: Create Pull Request - name: Create Pull Request
if: ${{ github.event_name != 'pull_request' }} uses: peter-evans/create-pull-request@v6
uses: peter-evans/create-pull-request@v7
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}
branch: extract-compile-backend-translation branch: extract-compile-backend-translation
@ -53,6 +41,3 @@ jobs:
body: "core, web: update translations" body: "core, web: update translations"
delete-branch: true delete-branch: true
signoff: true signoff: true
labels: dependencies
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>

View File

@ -15,7 +15,6 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
steps: steps:
- uses: actions/checkout@v4
- id: generate_token - id: generate_token
uses: tibdex/github-app-token@v2 uses: tibdex/github-app-token@v2
with: with:
@ -26,13 +25,23 @@ jobs:
env: env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }} GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: | run: |
title=$(gh pr view ${{ github.event.pull_request.number }} --json "title" -q ".title") title=$(curl -q -L \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title)
echo "title=${title}" >> "$GITHUB_OUTPUT" echo "title=${title}" >> "$GITHUB_OUTPUT"
- name: Rename - name: Rename
env: env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }} GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: | run: |
gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies curl -L \
-X PATCH \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
- uses: peter-evans/enable-pull-request-automerge@v3 - uses: peter-evans/enable-pull-request-automerge@v3
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}

View File

@ -1,4 +1,4 @@
name: authentik-api-ts-publish name: authentik-web-api-publish
on: on:
push: push:
branches: [main] branches: [main]
@ -7,7 +7,6 @@ on:
workflow_dispatch: workflow_dispatch:
jobs: jobs:
build: build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: generate_token - id: generate_token
@ -32,16 +31,11 @@ jobs:
env: env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
- name: Upgrade /web - name: Upgrade /web
working-directory: web working-directory: web/
run: | run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION npm i @goauthentik/api@$VERSION
- name: Upgrade /web/packages/sfe - uses: peter-evans/create-pull-request@v6
working-directory: web/packages/sfe
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- uses: peter-evans/create-pull-request@v7
id: cpr id: cpr
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}
@ -53,7 +47,6 @@ jobs:
signoff: true signoff: true
# ID from https://api.github.com/users/authentik-automation[bot] # ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3 - uses: peter-evans/enable-pull-request-automerge@v3
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}

13
.gitignore vendored
View File

@ -11,10 +11,6 @@ local_settings.py
db.sqlite3 db.sqlite3
media media
# Node
node_modules
# If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ # If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/
# in your Git repository. Update and uncomment the following line accordingly. # in your Git repository. Update and uncomment the following line accordingly.
# <django-project-name>/staticfiles/ # <django-project-name>/staticfiles/
@ -37,7 +33,6 @@ eggs/
lib64/ lib64/
parts/ parts/
dist/ dist/
out/
sdist/ sdist/
var/ var/
wheels/ wheels/
@ -100,6 +95,9 @@ ipython_config.py
# pyenv # pyenv
.python-version .python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files # SageMath parsed files
*.sage.py *.sage.py
@ -163,6 +161,8 @@ dmypy.json
# pyenv # pyenv
# celery beat schedule file
# SageMath parsed files # SageMath parsed files
# Environments # Environments
@ -209,6 +209,3 @@ source_docs/
### Golang ### ### Golang ###
/vendor/ /vendor/
### Docker ###
docker-compose.override.yml

View File

@ -1,47 +0,0 @@
# Prettier Ignorefile
## Static Files
**/LICENSE
authentik/stages/**/*
## Build asset directories
coverage
dist
out
.docusaurus
website/docs/developer-docs/api/**/*
## Environment
*.env
## Secrets
*.secrets
## Yarn
.yarn/**/*
## Node
node_modules
coverage
## Configs
*.log
*.yaml
*.yml
# Templates
# TODO: Rename affected files to *.template.* or similar.
*.html
*.mdx
*.md
## Import order matters
poly.ts
src/locale-codes.ts
src/locales/
# Storybook
storybook-static/
.storybook/css-import-maps*

View File

@ -2,7 +2,6 @@
"recommendations": [ "recommendations": [
"bashmish.es6-string-css", "bashmish.es6-string-css",
"bpruitt-goddard.mermaid-markdown-syntax-highlighting", "bpruitt-goddard.mermaid-markdown-syntax-highlighting",
"charliermarsh.ruff",
"dbaeumer.vscode-eslint", "dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig", "EditorConfig.EditorConfig",
"esbenp.prettier-vscode", "esbenp.prettier-vscode",
@ -11,10 +10,11 @@
"Gruntfuggly.todo-tree", "Gruntfuggly.todo-tree",
"mechatroner.rainbow-csv", "mechatroner.rainbow-csv",
"ms-python.black-formatter", "ms-python.black-formatter",
"ms-python.black-formatter", "ms-python.isort",
"ms-python.debugpy", "ms-python.pylint",
"ms-python.python", "ms-python.python",
"ms-python.vscode-pylance", "ms-python.vscode-pylance",
"ms-python.black-formatter",
"redhat.vscode-yaml", "redhat.vscode-yaml",
"Tobermory.es6-string-html", "Tobermory.es6-string-html",
"unifiedjs.vscode-mdx", "unifiedjs.vscode-mdx",

66
.vscode/launch.json vendored
View File

@ -2,76 +2,26 @@
"version": "0.2.0", "version": "0.2.0",
"configurations": [ "configurations": [
{ {
"name": "Debug: Attach Server Core", "name": "Python: PDB attach Server",
"type": "debugpy", "type": "python",
"request": "attach", "request": "attach",
"connect": { "connect": {
"host": "localhost", "host": "localhost",
"port": 9901 "port": 6800
}, },
"pathMappings": [ "justMyCode": true,
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}
],
"django": true "django": true
}, },
{ {
"name": "Debug: Attach Worker", "name": "Python: PDB attach Worker",
"type": "debugpy", "type": "python",
"request": "attach", "request": "attach",
"connect": { "connect": {
"host": "localhost", "host": "localhost",
"port": 9901 "port": 6900
}, },
"pathMappings": [ "justMyCode": true,
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}
],
"django": true "django": true
}, },
{
"name": "Debug: Start Server Router",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/server",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start LDAP Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/ldap",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start Proxy Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/proxy",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start RAC Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/rac",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start Radius Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/radius",
"cwd": "${workspaceFolder}"
}
] ]
} }

48
.vscode/settings.json vendored
View File

@ -1,24 +1,40 @@
{ {
"cSpell.words": [
"akadmin",
"asgi",
"authentik",
"authn",
"goauthentik",
"jwks",
"oidc",
"openid",
"plex",
"saml",
"totp",
"webauthn",
"traefik",
"passwordless",
"kubernetes",
"sso",
"slo",
"scim",
],
"todo-tree.tree.showCountsInTree": true, "todo-tree.tree.showCountsInTree": true,
"todo-tree.tree.showBadges": true, "todo-tree.tree.showBadges": true,
"yaml.customTags": [ "yaml.customTags": [
"!Condition sequence",
"!Context scalar",
"!Enumerate sequence",
"!Env scalar",
"!Env sequence",
"!Find sequence", "!Find sequence",
"!Format sequence",
"!If sequence",
"!Index scalar",
"!KeyOf scalar", "!KeyOf scalar",
"!Value scalar", "!Context scalar",
"!AtIndex scalar", "!Context sequence",
"!ParseJSON scalar" "!Format sequence",
"!Condition sequence",
"!Env sequence",
"!Env scalar",
"!If sequence"
], ],
"typescript.preferences.importModuleSpecifier": "non-relative", "typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index", "typescript.preferences.importModuleSpecifierEnding": "index",
"typescript.tsdk": "./node_modules/typescript/lib", "typescript.tsdk": "./web/node_modules/typescript/lib",
"typescript.enablePromptUseWorkspaceTsdk": true, "typescript.enablePromptUseWorkspaceTsdk": true,
"yaml.schemas": { "yaml.schemas": {
"./blueprints/schema.json": "blueprints/**/*.yaml" "./blueprints/schema.json": "blueprints/**/*.yaml"
@ -31,6 +47,10 @@
"ignoreCase": false "ignoreCase": false
} }
], ],
"go.testFlags": ["-count=1"], "go.testFlags": [
"github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"] "-count=1"
],
"github-actions.workflows.pinned.workflows": [
".github/workflows/ci-main.yml"
]
} }

82
.vscode/tasks.json vendored
View File

@ -2,91 +2,85 @@
"version": "2.0.0", "version": "2.0.0",
"tasks": [ "tasks": [
{ {
"label": "authentik/core: make", "label": "authentik[core]: format & test",
"command": "uv", "command": "poetry",
"args": [
"run",
"make"
],
"group": "build",
},
{
"label": "authentik[core]: run",
"command": "poetry",
"args": [ "args": [
"run", "run",
"make", "make",
"lint-fix",
"lint"
],
"presentation": {
"panel": "new"
},
"group": "test"
},
{
"label": "authentik/core: run",
"command": "uv",
"args": [
"run", "run",
"ak",
"server"
], ],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
"group": "running" "group": "running"
} },
}, },
{ {
"label": "authentik/web: make", "label": "authentik[web]: format",
"command": "make", "command": "make",
"args": [ "args": ["web"],
"web" "group": "build",
],
"group": "build"
}, },
{ {
"label": "authentik/web: watch", "label": "authentik[web]: watch",
"command": "make", "command": "make",
"args": [ "args": ["web-watch"],
"web-watch"
],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
"group": "running" "group": "running"
} },
}, },
{ {
"label": "authentik: install", "label": "authentik: install",
"command": "make", "command": "make",
"args": [ "args": ["install"],
"install", "group": "build",
"-j4"
],
"group": "build"
}, },
{ {
"label": "authentik/website: make", "label": "authentik: i18n-extract",
"command": "make", "command": "poetry",
"args": [ "args": [
"website" "run",
"make",
"i18n-extract"
], ],
"group": "build" "group": "build",
}, },
{ {
"label": "authentik/website: watch", "label": "authentik[website]: format",
"command": "make", "command": "make",
"args": [ "args": ["website"],
"website-watch" "group": "build",
], },
{
"label": "authentik[website]: watch",
"command": "make",
"args": ["website-watch"],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
"group": "running" "group": "running"
} },
}, },
{ {
"label": "authentik/api: generate", "label": "authentik[api]: generate",
"command": "uv", "command": "poetry",
"args": [ "args": [
"run", "run",
"make", "make",
"gen" "gen"
], ],
"group": "build" "group": "build"
} },
] ]
} }

View File

@ -10,30 +10,19 @@ schemas/ @goauthentik/backend
scripts/ @goauthentik/backend scripts/ @goauthentik/backend
tests/ @goauthentik/backend tests/ @goauthentik/backend
pyproject.toml @goauthentik/backend pyproject.toml @goauthentik/backend
uv.lock @goauthentik/backend poetry.lock @goauthentik/backend
go.mod @goauthentik/backend go.mod @goauthentik/backend
go.sum @goauthentik/backend go.sum @goauthentik/backend
# Infrastructure # Infrastructure
.github/ @goauthentik/infrastructure .github/ @goauthentik/infrastructure
lifecycle/aws/ @goauthentik/infrastructure
Dockerfile @goauthentik/infrastructure Dockerfile @goauthentik/infrastructure
*Dockerfile @goauthentik/infrastructure *Dockerfile @goauthentik/infrastructure
.dockerignore @goauthentik/infrastructure .dockerignore @goauthentik/infrastructure
docker-compose.yml @goauthentik/infrastructure docker-compose.yml @goauthentik/infrastructure
Makefile @goauthentik/infrastructure
.editorconfig @goauthentik/infrastructure
CODEOWNERS @goauthentik/infrastructure
# Web packages
packages/ @goauthentik/frontend
# Web # Web
web/ @goauthentik/frontend web/ @goauthentik/frontend
tests/wdio/ @goauthentik/frontend tests/wdio/ @goauthentik/frontend
# Locale
locale/ @goauthentik/backend @goauthentik/frontend
web/xliff/ @goauthentik/backend @goauthentik/frontend
# Docs & Website # Docs & Website
website/ @goauthentik/docs website/ @goauthentik/docs
CODE_OF_CONDUCT.md @goauthentik/docs
# Security # Security
SECURITY.md @goauthentik/security @goauthentik/docs website/docs/security/ @goauthentik/security
website/docs/security/ @goauthentik/security @goauthentik/docs

View File

@ -5,7 +5,7 @@
We as members, contributors, and leaders pledge to make participation in our We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socioeconomic status, identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity nationality, personal appearance, race, religion, or sexual identity
and orientation. and orientation.

View File

@ -1 +1 @@
website/docs/developer-docs/index.md website/developer-docs/index.md

View File

@ -1,31 +1,43 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
# Stage 1: Build webui # Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/library/node:24-slim AS node-builder FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder
ENV NODE_ENV=production
WORKDIR /work/website
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
--mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \
npm ci --include=dev
COPY ./website /work/website/
COPY ./blueprints /work/blueprints/
COPY ./SECURITY.md /work/
RUN npm run build-docs-only
# Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder
ARG GIT_BUILD_HASH
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
ENV NODE_ENV=production ENV NODE_ENV=production
WORKDIR /work/web WORKDIR /work/web
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
--mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
--mount=type=bind,target=/work/web/scripts,src=./web/scripts \
--mount=type=cache,id=npm-ak,sharing=shared,target=/root/.npm \
npm ci --include=dev npm ci --include=dev
COPY ./package.json /work
COPY ./web /work/web/ COPY ./web /work/web/
COPY ./website /work/website/ COPY ./website /work/website/
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
RUN npm run build && \ RUN npm run build
npm run build:sfe
# Stage 2: Build go proxy # Stage 3: Build go proxy
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder FROM --platform=${BUILDPLATFORM} docker.io/golang:1.22.0-bookworm AS go-builder
ARG TARGETOS ARG TARGETOS
ARG TARGETARCH ARG TARGETARCH
@ -36,11 +48,6 @@ ARG GOARCH=$TARGETARCH
WORKDIR /go/src/goauthentik.io WORKDIR /go/src/goauthentik.io
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
dpkg --add-architecture arm64 && \
apt-get update && \
apt-get install -y --no-install-recommends crossbuild-essential-arm64 gcc-aarch64-linux-gnu
RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \ RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
--mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \ --mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \
--mount=type=cache,target=/go/pkg/mod \ --mount=type=cache,target=/go/pkg/mod \
@ -49,106 +56,78 @@ RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
COPY ./cmd /go/src/goauthentik.io/cmd COPY ./cmd /go/src/goauthentik.io/cmd
COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib
COPY ./web/static.go /go/src/goauthentik.io/web/static.go COPY ./web/static.go /go/src/goauthentik.io/web/static.go
COPY --from=node-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt
COPY --from=node-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt
COPY ./internal /go/src/goauthentik.io/internal COPY ./internal /go/src/goauthentik.io/internal
COPY ./go.mod /go/src/goauthentik.io/go.mod COPY ./go.mod /go/src/goauthentik.io/go.mod
COPY ./go.sum /go/src/goauthentik.io/go.sum COPY ./go.sum /go/src/goauthentik.io/go.sum
ENV CGO_ENABLED=0
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \
go build -o /go/authentik ./cmd/server
# Stage 3: MaxMind GeoIP # Stage 4: MaxMind GeoIP
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.1 as geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
ENV GEOIPUPDATE_VERBOSE="1" ENV GEOIPUPDATE_VERBOSE="true"
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
USER root USER root
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
mkdir -p /usr/share/GeoIP && \ mkdir -p /usr/share/GeoIP && \
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 4: Download uv # Stage 5: Python dependencies
FROM ghcr.io/astral-sh/uv:0.7.17 AS uv FROM docker.io/python:3.12.2-slim-bookworm AS python-deps
# Stage 5: Base python image
FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base
ENV VENV_PATH="/ak-root/.venv" \ WORKDIR /ak-root/poetry
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
UV_COMPILE_BYTECODE=1 \
UV_LINK_MODE=copy \
UV_NATIVE_TLS=1 \
UV_PYTHON_DOWNLOADS=0
WORKDIR /ak-root/ ENV VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false \
COPY --from=uv /uv /uvx /bin/ PATH="/ak-root/venv/bin:$PATH"
# Stage 6: Python dependencies
FROM python-base AS python-deps
ARG TARGETARCH
ARG TARGETVARIANT
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
ENV PATH="/root/.cargo/bin:$PATH"
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
apt-get update && \ apt-get update && \
# Required for installing pip packages # Required for installing pip packages
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
# Build essentials
build-essential pkg-config libffi-dev git \
# cryptography
curl \
# libxml
libxslt-dev zlib1g-dev \
# postgresql
libpq-dev \
# python-kadmin-rs
clang libkrb5-dev sccache \
# xmlsec
libltdl-dev && \
curl https://sh.rustup.rs -sSf | sh -s -- -y
ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec" RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
--mount=type=cache,target=/root/.cache/pip \
--mount=type=cache,target=/root/.cache/pypoetry \
python -m venv /ak-root/venv/ && \
pip3 install --upgrade pip && \
pip3 install poetry && \
poetry install --only=main --no-ansi --no-interaction
RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \ # Stage 6: Run
--mount=type=bind,target=uv.lock,src=uv.lock \ FROM docker.io/python:3.12.2-slim-bookworm AS final-image
--mount=type=bind,target=packages,src=packages \
--mount=type=cache,target=/root/.cache/uv \
uv sync --frozen --no-install-project --no-dev
# Stage 7: Run
FROM python-base AS final-image
ARG VERSION
ARG GIT_BUILD_HASH ARG GIT_BUILD_HASH
ARG VERSION
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
LABEL org.opencontainers.image.url=https://goauthentik.io LABEL org.opencontainers.image.url https://goauthentik.io
LABEL org.opencontainers.image.description="goauthentik.io Main server image, see https://goauthentik.io for more info." LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
LABEL org.opencontainers.image.source=https://github.com/goauthentik/authentik LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik
LABEL org.opencontainers.image.version=${VERSION} LABEL org.opencontainers.image.version ${VERSION}
LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH} LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH}
WORKDIR / WORKDIR /
# We cannot cache this layer otherwise we'll end up with a bigger image # We cannot cache this layer otherwise we'll end up with a bigger image
RUN apt-get update && \ RUN apt-get update && \
apt-get upgrade -y && \
# Required for runtime # Required for runtime
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \ apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 ca-certificates && \
# Required for bootstrap & healtcheck # Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \ apt-get install -y --no-install-recommends runit && \
pip3 install --no-cache-dir --upgrade pip && \
apt-get clean && \ apt-get clean && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \ rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \ adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
@ -159,19 +138,18 @@ RUN apt-get update && \
COPY ./authentik/ /authentik COPY ./authentik/ /authentik
COPY ./pyproject.toml / COPY ./pyproject.toml /
COPY ./uv.lock / COPY ./poetry.lock /
COPY ./schemas /schemas COPY ./schemas /schemas
COPY ./locale /locale COPY ./locale /locale
COPY ./tests /tests COPY ./tests /tests
COPY ./manage.py / COPY ./manage.py /
COPY ./blueprints /blueprints COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle COPY ./lifecycle/ /lifecycle
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
COPY --from=go-builder /go/authentik /bin/authentik COPY --from=go-builder /go/authentik /bin/authentik
COPY ./packages/ /ak-root/packages COPY --from=python-deps /ak-root/venv /ak-root/venv
COPY --from=python-deps /ak-root/.venv /ak-root/.venv COPY --from=web-builder /work/web/dist/ /web/dist/
COPY --from=node-builder /work/web/dist/ /web/dist/ COPY --from=web-builder /work/web/authentik/ /web/authentik/
COPY --from=node-builder /work/web/authentik/ /web/authentik/ COPY --from=website-builder /work/website/help/ /website/help/
COPY --from=geoip /usr/share/GeoIP /geoip COPY --from=geoip /usr/share/GeoIP /geoip
USER 1000 USER 1000
@ -179,7 +157,9 @@ USER 1000
ENV TMPDIR=/dev/shm/ \ ENV TMPDIR=/dev/shm/ \
PYTHONDONTWRITEBYTECODE=1 \ PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \ PYTHONUNBUFFERED=1 \
GOFIPS=1 PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]

178
Makefile
View File

@ -1,21 +1,33 @@
.PHONY: gen dev-reset all clean test web website .PHONY: gen dev-reset all clean test web website
SHELL := /usr/bin/env bash .SHELLFLAGS += ${SHELLFLAGS} -e
.SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail
PWD = $(shell pwd) PWD = $(shell pwd)
UID = $(shell id -u) UID = $(shell id -u)
GID = $(shell id -g) GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.generate_semver) NPM_VERSION = $(shell python -m scripts.npm_version)
PY_SOURCES = authentik packages tests scripts lifecycle .github PY_SOURCES = authentik tests scripts lifecycle
DOCKER_IMAGE ?= "authentik:test" DOCKER_IMAGE ?= "authentik:test"
GEN_API_TS = gen-ts-api GEN_API_TS = "gen-ts-api"
GEN_API_PY = gen-py-api GEN_API_GO = "gen-go-api"
GEN_API_GO = gen-go-api
pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null) pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
-I .github/codespell-words.txt \
-S 'web/src/locales/**' \
authentik \
internal \
cmd \
web/src \
website/src \
website/blog \
website/developer-docs \
website/docs \
website/integrations \
website/src
all: lint-fix lint test gen web ## Lint, build, and test everything all: lint-fix lint test gen web ## Lint, build, and test everything
@ -29,41 +41,45 @@ help: ## Show this help
sort sort
@echo "" @echo ""
go-test: test-go:
go test -timeout 0 -v -race -cover ./... go test -timeout 0 -v -race -cover ./...
test-docker: ## Run all tests in a docker-compose
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker-compose pull -q
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test-all
rm -f .env
test: ## Run the server tests and produce a coverage report (locally) test: ## Run the server tests and produce a coverage report (locally)
uv run coverage run manage.py test --keepdb authentik coverage run manage.py test --keepdb authentik
uv run coverage html coverage html
uv run coverage report coverage report
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors. lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
uv run black $(PY_SOURCES) isort $(PY_SOURCES)
uv run ruff check --fix $(PY_SOURCES) black $(PY_SOURCES)
ruff --fix $(PY_SOURCES)
lint-codespell: ## Reports spelling errors. codespell -w $(CODESPELL_ARGS)
uv run codespell -w
lint: ## Lint the python and golang sources lint: ## Lint the python and golang sources
uv run bandit -c pyproject.toml -r $(PY_SOURCES) bandit -r $(PY_SOURCES) -x node_modules
./web/node_modules/.bin/pyright $(PY_SOURCES)
pylint $(PY_SOURCES)
golangci-lint run -v golangci-lint run -v
core-install: core-install:
uv sync --frozen poetry install
migrate: ## Run the Authentik Django server's migrations migrate: ## Run the Authentik Django server's migrations
uv run python -m lifecycle.migrate python -m lifecycle.migrate
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
aws-cfn:
cd lifecycle/aws && npm run aws-cfn
run: ## Run the main authentik server process
uv run ak server
core-i18n-extract: core-i18n-extract:
uv run ak makemessages \ ak makemessages \
--add-location file \ --add-location file \
--no-obsolete \ --no-obsolete \
--ignore web \ --ignore web \
@ -86,10 +102,6 @@ dev-create-db:
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state. dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
update-test-mmdb: ## Update test GeoIP and ASN Databases
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb -o ${PWD}/tests/GeoLite2-ASN-Test.mmdb
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb -o ${PWD}/tests/GeoLite2-City-Test.mmdb
######################### #########################
## API Schema ## API Schema
######################### #########################
@ -98,11 +110,11 @@ gen-build: ## Extract the schema from the database
AUTHENTIK_DEBUG=true \ AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \ AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
uv run ak make_blueprint_schema --file blueprints/schema.json ak make_blueprint_schema > blueprints/schema.json
AUTHENTIK_DEBUG=true \ AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \ AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
uv run ak spectacular --file schema.yml ak spectacular --file schema.yml
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
@ -122,27 +134,19 @@ gen-diff: ## (Release) generate the changelog diff between the current schema a
npx prettier --write diff.md npx prettier --write diff.md
gen-clean-ts: ## Remove generated API client for Typescript gen-clean-ts: ## Remove generated API client for Typescript
rm -rf ${PWD}/${GEN_API_TS}/ rm -rf ./${GEN_API_TS}/
rm -rf ${PWD}/web/node_modules/@goauthentik/api/ rm -rf ./web/node_modules/@goauthentik/api/
gen-clean-go: ## Remove generated API client for Go gen-clean-go: ## Remove generated API client for Go
mkdir -p ${PWD}/${GEN_API_GO} rm -rf ./${GEN_API_GO}/
ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),)
make -C ${PWD}/${GEN_API_GO} clean
else
rm -rf ${PWD}/${GEN_API_GO}
endif
gen-clean-py: ## Remove generated API client for Python gen-clean: gen-clean-ts gen-clean-go ## Remove generated API clients
rm -rf ${PWD}/${GEN_API_PY}/
gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients
gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescript into the authentik UI Application gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescript into the authentik UI Application
docker run \ docker run \
--rm -v ${PWD}:/local \ --rm -v ${PWD}:/local \
--user ${UID}:${GID} \ --user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \ docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
-i /local/schema.yml \ -i /local/schema.yml \
-g typescript-fetch \ -g typescript-fetch \
-o /local/${GEN_API_TS} \ -o /local/${GEN_API_TS} \
@ -150,36 +154,29 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
--additional-properties=npmVersion=${NPM_VERSION} \ --additional-properties=npmVersion=${NPM_VERSION} \
--git-repo-id authentik \ --git-repo-id authentik \
--git-user-id goauthentik --git-user-id goauthentik
mkdir -p web/node_modules/@goauthentik/api
cd ${PWD}/${GEN_API_TS} && npm link cd ./${GEN_API_TS} && npm i
cd ${PWD}/web && npm link @goauthentik/api \cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api
gen-client-py: gen-clean-py ## Build and install the authentik API for Python
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
-i /local/schema.yml \
-g python \
-o /local/${GEN_API_PY} \
-c /local/scripts/api-py-config.yaml \
--additional-properties=packageVersion=${NPM_VERSION} \
--git-repo-id authentik \
--git-user-id goauthentik
gen-client-go: gen-clean-go ## Build and install the authentik API for Golang gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
mkdir -p ${PWD}/${GEN_API_GO} mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates
ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),) wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml
git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO} wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache
else wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache
cd ${PWD}/${GEN_API_GO} && git pull cp schema.yml ./${GEN_API_GO}/
endif docker run \
cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO} --rm -v ${PWD}/${GEN_API_GO}:/local \
make -C ${PWD}/${GEN_API_GO} build --user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
-i /local/schema.yml \
-g go \
-o /local/ \
-c /local/config.yaml
go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
gen-dev-config: ## Generate a local development config file gen-dev-config: ## Generate a local development config file
uv run scripts/generate_config.py python -m scripts.generate_config
gen: gen-build gen-client-ts gen: gen-build gen-client-ts
@ -195,9 +192,6 @@ web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting is
web-install: ## Install the necessary libraries to build the Authentik UI web-install: ## Install the necessary libraries to build the Authentik UI
cd web && npm ci cd web && npm ci
web-test: ## Run tests for the Authentik UI
cd web && npm run test
web-watch: ## Build and watch the Authentik UI for changes, updating automatically web-watch: ## Build and watch the Authentik UI for changes, updating automatically
rm -rf web/dist/ rm -rf web/dist/
mkdir web/dist/ mkdir web/dist/
@ -229,7 +223,7 @@ website: website-lint-fix website-build ## Automatically fix formatting issues
website-install: website-install:
cd website && npm ci cd website && npm ci
website-lint-fix: lint-codespell website-lint-fix:
cd website && npm run prettier cd website && npm run prettier
website-build: website-build:
@ -243,12 +237,8 @@ website-watch: ## Build and watch the documentation website, updating automatic
######################### #########################
docker: ## Build a docker image of the current source tree docker: ## Build a docker image of the current source tree
mkdir -p ${GEN_API_TS}
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
test-docker:
BUILD=true ${PWD}/scripts/test_docker.sh
######################### #########################
## CI ## CI
######################### #########################
@ -259,22 +249,26 @@ ci--meta-debug:
python -V python -V
node --version node --version
ci-pylint: ci--meta-debug
pylint $(PY_SOURCES)
ci-black: ci--meta-debug ci-black: ci--meta-debug
uv run black --check $(PY_SOURCES) black --check $(PY_SOURCES)
ci-ruff: ci--meta-debug ci-ruff: ci--meta-debug
uv run ruff check $(PY_SOURCES) ruff check $(PY_SOURCES)
ci-codespell: ci--meta-debug ci-codespell: ci--meta-debug
uv run codespell -s codespell $(CODESPELL_ARGS) -s
ci-isort: ci--meta-debug
isort --check $(PY_SOURCES)
ci-bandit: ci--meta-debug ci-bandit: ci--meta-debug
uv run bandit -r $(PY_SOURCES) bandit -r $(PY_SOURCES)
ci-pyright: ci--meta-debug
./web/node_modules/.bin/pyright $(PY_SOURCES)
ci-pending-migrations: ci--meta-debug ci-pending-migrations: ci--meta-debug
uv run ak makemigrations --check ak makemigrations --check
ci-test: ci--meta-debug
uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
uv run coverage report
uv run coverage xml

View File

@ -15,9 +15,7 @@
## What is authentik? ## What is authentik?
authentik is an open-source Identity Provider that emphasizes flexibility and versatility, with support for a wide set of protocols. authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them.
Our [enterprise offer](https://goauthentik.io/pricing) can also be used as a self-hosted replacement for large-scale deployments of Okta/Auth0, Entra ID, Ping Identity, or other legacy IdPs for employees and B2B2C use.
## Installation ## Installation
@ -27,14 +25,14 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
## Screenshots ## Screenshots
| Light | Dark | | Light | Dark |
| ----------------------------------------------------------- | ---------------------------------------------------------- | | ------------------------------------------------------ | ----------------------------------------------------- |
| ![](https://docs.goauthentik.io/img/screen_apps_light.jpg) | ![](https://docs.goauthentik.io/img/screen_apps_dark.jpg) | | ![](https://goauthentik.io/img/screen_apps_light.jpg) | ![](https://goauthentik.io/img/screen_apps_dark.jpg) |
| ![](https://docs.goauthentik.io/img/screen_admin_light.jpg) | ![](https://docs.goauthentik.io/img/screen_admin_dark.jpg) | | ![](https://goauthentik.io/img/screen_admin_light.jpg) | ![](https://goauthentik.io/img/screen_admin_dark.jpg) |
## Development ## Development
See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github) See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
## Security ## Security
@ -42,4 +40,4 @@ See [SECURITY.md](SECURITY.md)
## Adoption and Contributions ## Adoption and Contributions
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github). Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).

View File

@ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di
## Independent audits and pentests ## Independent audits and pentests
We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specific audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security). In May/June of 2023 [Cure53](https://cure53.de) conducted an audit and pentest. The [results](https://cure53.de/pentest-report_authentik.pdf) are published on the [Cure53 website](https://cure53.de/#publications-2023). For more details about authentik's response to the findings of the audit refer to [2023-06 Cure53 Code audit](https://goauthentik.io/docs/security/2023-06-cure53).
## What authentik classifies as a CVE ## What authentik classifies as a CVE
@ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
(.x being the latest patch release for each version) (.x being the latest patch release for each version)
| Version | Supported | | Version | Supported |
| --------- | --------- | | --- | --- |
| 2025.4.x | ✅ | | 2023.6.x | ✅ |
| 2025.6.x | ✅ | | 2023.8.x | ✅ |
## Reporting a Vulnerability ## Reporting a Vulnerability
@ -31,12 +31,12 @@ To report a vulnerability, send an email to [security@goauthentik.io](mailto:se
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories: authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
| Score | Severity | | Score | Severity |
| ---------- | -------- | | --- | --- |
| 0.0 | None | | 0.0 | None |
| 0.1 3.9 | Low | | 0.1 3.9 | Low |
| 4.0 6.9 | Medium | | 4.0 6.9 | Medium |
| 7.0 8.9 | High | | 7.0 8.9 | High |
| 9.0 10.0 | Critical | | 9.0 10.0 | Critical |
## Disclosure process ## Disclosure process

View File

@ -1,12 +1,13 @@
"""authentik root module""" """authentik root module"""
from os import environ from os import environ
from typing import Optional
__version__ = "2025.6.3" __version__ = "2023.10.7"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
def get_build_hash(fallback: str | None = None) -> str: def get_build_hash(fallback: Optional[str] = None) -> str:
"""Get build hash""" """Get build hash"""
build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "") build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "")
return fallback if build_hash == "" and fallback else build_hash return fallback if build_hash == "" and fallback else build_hash
@ -16,5 +17,5 @@ def get_full_version() -> str:
"""Get full version, with build hash appended""" """Get full version, with build hash appended"""
version = __version__ version = __version__
if (build_hash := get_build_hash()) != "": if (build_hash := get_build_hash()) != "":
return f"{version}+{build_hash}" version += "." + build_hash
return version return version

View File

@ -0,0 +1,79 @@
"""authentik administration metrics"""
from datetime import timedelta
from django.db.models.functions import ExtractHour
from drf_spectacular.utils import extend_schema, extend_schema_field
from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import IntegerField, SerializerMethodField
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.core.api.utils import PassiveSerializer
from authentik.events.models import EventAction
class CoordinateSerializer(PassiveSerializer):
"""Coordinates for diagrams"""
x_cord = IntegerField(read_only=True)
y_cord = IntegerField(read_only=True)
class LoginMetricsSerializer(PassiveSerializer):
"""Login Metrics per 1h"""
logins = SerializerMethodField()
logins_failed = SerializerMethodField()
authorizations = SerializerMethodField()
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins(self, _):
"""Get successful logins per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN_FAILED
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations(self, _):
"""Get successful authorizations per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
class AdministrationMetricsViewSet(APIView):
"""Login Metrics per 1h"""
permission_classes = [IsAuthenticated]
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
def get(self, request: Request) -> Response:
"""Login Metrics per 1h"""
serializer = LoginMetricsSerializer(True)
serializer.context["user"] = request.user
return Response(serializer.data)

View File

@ -2,23 +2,18 @@
import platform import platform
from datetime import datetime from datetime import datetime
from ssl import OPENSSL_VERSION
from sys import version as python_version from sys import version as python_version
from typing import TypedDict from typing import TypedDict
from cryptography.hazmat.backends.openssl.backend import backend
from django.conf import settings
from django.utils.timezone import now from django.utils.timezone import now
from django.views.debug import SafeExceptionReporterFilter
from drf_spectacular.utils import extend_schema from drf_spectacular.utils import extend_schema
from gunicorn import version_info as gunicorn_version
from rest_framework.fields import SerializerMethodField from rest_framework.fields import SerializerMethodField
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
from authentik import get_full_version
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
from authentik.enterprise.license import LicenseKey
from authentik.lib.config import CONFIG from authentik.lib.config import CONFIG
from authentik.lib.utils.reflection import get_env from authentik.lib.utils.reflection import get_env
from authentik.outposts.apps import MANAGED_OUTPOST from authentik.outposts.apps import MANAGED_OUTPOST
@ -30,13 +25,11 @@ class RuntimeDict(TypedDict):
"""Runtime information""" """Runtime information"""
python_version: str python_version: str
gunicorn_version: str
environment: str environment: str
architecture: str architecture: str
platform: str platform: str
uname: str uname: str
openssl_version: str
openssl_fips_enabled: bool | None
authentik_version: str
class SystemInfoSerializer(PassiveSerializer): class SystemInfoSerializer(PassiveSerializer):
@ -54,16 +47,10 @@ class SystemInfoSerializer(PassiveSerializer):
def get_http_headers(self, request: Request) -> dict[str, str]: def get_http_headers(self, request: Request) -> dict[str, str]:
"""Get HTTP Request headers""" """Get HTTP Request headers"""
headers = {} headers = {}
raw_session = request._request.COOKIES.get(settings.SESSION_COOKIE_NAME)
for key, value in request.META.items(): for key, value in request.META.items():
if not isinstance(value, str): if not isinstance(value, str):
continue continue
actual_value = value headers[key] = value
if raw_session is not None and raw_session in actual_value:
actual_value = actual_value.replace(
raw_session, SafeExceptionReporterFilter.cleansed_substitute
)
headers[key] = actual_value
return headers return headers
def get_http_host(self, request: Request) -> str: def get_http_host(self, request: Request) -> str:
@ -77,15 +64,11 @@ class SystemInfoSerializer(PassiveSerializer):
def get_runtime(self, request: Request) -> RuntimeDict: def get_runtime(self, request: Request) -> RuntimeDict:
"""Get versions""" """Get versions"""
return { return {
"architecture": platform.machine(),
"authentik_version": get_full_version(),
"environment": get_env(),
"openssl_fips_enabled": (
backend._fips_enabled if LicenseKey.get_total().status().is_valid else None
),
"openssl_version": OPENSSL_VERSION,
"platform": platform.platform(),
"python_version": python_version, "python_version": python_version,
"gunicorn_version": ".".join(str(x) for x in gunicorn_version),
"environment": get_env(),
"architecture": platform.machine(),
"platform": platform.platform(),
"uname": " ".join(platform.uname()), "uname": " ".join(platform.uname()),
} }

View File

@ -1,7 +1,6 @@
"""authentik administration overview""" """authentik administration overview"""
from django.core.cache import cache from django.core.cache import cache
from django_tenants.utils import get_public_schema_name
from drf_spectacular.utils import extend_schema from drf_spectacular.utils import extend_schema
from packaging.version import parse from packaging.version import parse
from rest_framework.fields import SerializerMethodField from rest_framework.fields import SerializerMethodField
@ -11,10 +10,8 @@ from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
from authentik import __version__, get_build_hash from authentik import __version__, get_build_hash
from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
from authentik.outposts.models import Outpost
from authentik.tenants.utils import get_current_tenant
class VersionSerializer(PassiveSerializer): class VersionSerializer(PassiveSerializer):
@ -22,10 +19,8 @@ class VersionSerializer(PassiveSerializer):
version_current = SerializerMethodField() version_current = SerializerMethodField()
version_latest = SerializerMethodField() version_latest = SerializerMethodField()
version_latest_valid = SerializerMethodField()
build_hash = SerializerMethodField() build_hash = SerializerMethodField()
outdated = SerializerMethodField() outdated = SerializerMethodField()
outpost_outdated = SerializerMethodField()
def get_build_hash(self, _) -> str: def get_build_hash(self, _) -> str:
"""Get build hash, if version is not latest or released""" """Get build hash, if version is not latest or released"""
@ -37,31 +32,16 @@ class VersionSerializer(PassiveSerializer):
def get_version_latest(self, _) -> str: def get_version_latest(self, _) -> str:
"""Get latest version from cache""" """Get latest version from cache"""
if get_current_tenant().schema_name == get_public_schema_name():
return __version__
version_in_cache = cache.get(VERSION_CACHE_KEY) version_in_cache = cache.get(VERSION_CACHE_KEY)
if not version_in_cache: # pragma: no cover if not version_in_cache: # pragma: no cover
update_latest_version.send() update_latest_version.delay()
return __version__ return __version__
return version_in_cache return version_in_cache
def get_version_latest_valid(self, _) -> bool:
"""Check if latest version is valid"""
return cache.get(VERSION_CACHE_KEY) != VERSION_NULL
def get_outdated(self, instance) -> bool: def get_outdated(self, instance) -> bool:
"""Check if we're running the latest version""" """Check if we're running the latest version"""
return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance)) return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance))
def get_outpost_outdated(self, _) -> bool:
"""Check if any outpost is outdated/has a version mismatch"""
any_outdated = False
for outpost in Outpost.objects.all():
for state in outpost.state:
if state.version_outdated:
any_outdated = True
return any_outdated
class VersionView(APIView): class VersionView(APIView):
"""Get running and latest version.""" """Get running and latest version."""

View File

@ -1,33 +0,0 @@
from rest_framework.permissions import IsAdminUser
from rest_framework.viewsets import ReadOnlyModelViewSet
from authentik.admin.models import VersionHistory
from authentik.core.api.utils import ModelSerializer
class VersionHistorySerializer(ModelSerializer):
"""VersionHistory Serializer"""
class Meta:
model = VersionHistory
fields = [
"id",
"timestamp",
"version",
"build",
]
class VersionHistoryViewSet(ReadOnlyModelViewSet):
"""VersionHistory Viewset"""
queryset = VersionHistory.objects.all()
serializer_class = VersionHistorySerializer
permission_classes = [IsAdminUser]
filterset_fields = [
"version",
"build",
]
search_fields = ["version", "build"]
ordering = ["-timestamp"]
pagination_class = None

View File

@ -0,0 +1,26 @@
"""authentik administration overview"""
from django.conf import settings
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.fields import IntegerField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.rbac.permissions import HasPermission
from authentik.root.celery import CELERY_APP
class WorkerView(APIView):
"""Get currently connected worker count."""
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
def get(self, request: Request) -> Response:
"""Get currently connected worker count."""
count = len(CELERY_APP.control.ping(timeout=0.5))
# In debug we run with `task_always_eager`, so tasks are ran on the main process
if settings.DEBUG: # pragma: no cover
count += 1
return Response({"count": count})

View File

@ -1,13 +1,11 @@
"""authentik admin app config""" """authentik admin app config"""
from prometheus_client import Info from prometheus_client import Gauge, Info
from authentik.blueprints.apps import ManagedAppConfig from authentik.blueprints.apps import ManagedAppConfig
from authentik.lib.config import CONFIG
from authentik.lib.utils.time import fqdn_rand
from authentik.tasks.schedules.lib import ScheduleSpec
PROM_INFO = Info("authentik_version", "Currently running authentik version") PROM_INFO = Info("authentik_version", "Currently running authentik version")
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
class AuthentikAdminConfig(ManagedAppConfig): class AuthentikAdminConfig(ManagedAppConfig):
@ -17,31 +15,3 @@ class AuthentikAdminConfig(ManagedAppConfig):
label = "authentik_admin" label = "authentik_admin"
verbose_name = "authentik Admin" verbose_name = "authentik Admin"
default = True default = True
@ManagedAppConfig.reconcile_global
def clear_update_notifications(self):
"""Clear update notifications on startup if the notification was for the version
we're running now."""
from packaging.version import parse
from authentik.admin.tasks import LOCAL_VERSION
from authentik.events.models import EventAction, Notification
for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE):
if "new_version" not in notification.event.context:
continue
notification_version = notification.event.context["new_version"]
if LOCAL_VERSION >= parse(notification_version):
notification.delete()
@property
def global_schedule_specs(self) -> list[ScheduleSpec]:
from authentik.admin.tasks import update_latest_version
return [
ScheduleSpec(
actor=update_latest_version,
crontab=f"{fqdn_rand('admin_latest_version')} * * * *",
paused=CONFIG.get_bool("disable_update_check"),
),
]

View File

@ -1,22 +0,0 @@
"""authentik admin models"""
from django.db import models
from django.utils.translation import gettext_lazy as _
class VersionHistory(models.Model):
id = models.BigAutoField(primary_key=True)
timestamp = models.DateTimeField()
version = models.TextField()
build = models.TextField()
class Meta:
managed = False
db_table = "authentik_version_history"
ordering = ("-timestamp",)
verbose_name = _("Version history")
verbose_name_plural = _("Version history")
default_permissions = []
def __str__(self):
return f"{self.version}.{self.build} ({self.timestamp})"

View File

@ -0,0 +1,13 @@
"""authentik admin settings"""
from celery.schedules import crontab
from authentik.lib.utils.time import fqdn_rand
CELERY_BEAT_SCHEDULE = {
"admin_latest_version": {
"task": "authentik.admin.tasks.update_latest_version",
"schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"),
"options": {"queue": "authentik_scheduled"},
}
}

View File

@ -0,0 +1,14 @@
"""admin signals"""
from django.dispatch import receiver
from authentik.admin.apps import GAUGE_WORKERS
from authentik.root.celery import CELERY_APP
from authentik.root.monitoring import monitoring_set
@receiver(monitoring_set)
def monitoring_set_workers(sender, **kwargs):
"""Set worker gauge"""
count = len(CELERY_APP.control.ping(timeout=0.5))
GAUGE_WORKERS.set(count)

View File

@ -1,24 +1,27 @@
"""authentik admin tasks""" """authentik admin tasks"""
import re
from django.core.cache import cache from django.core.cache import cache
from django.utils.translation import gettext_lazy as _ from django.core.validators import URLValidator
from django_dramatiq_postgres.middleware import CurrentTask from django.db import DatabaseError, InternalError, ProgrammingError
from dramatiq import actor
from packaging.version import parse from packaging.version import parse
from requests import RequestException from requests import RequestException
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik import __version__, get_build_hash from authentik import __version__, get_build_hash
from authentik.admin.apps import PROM_INFO from authentik.admin.apps import PROM_INFO
from authentik.events.models import Event, EventAction from authentik.events.models import Event, EventAction, Notification
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
from authentik.lib.config import CONFIG from authentik.lib.config import CONFIG
from authentik.lib.utils.http import get_http_session from authentik.lib.utils.http import get_http_session
from authentik.tasks.models import Task from authentik.root.celery import CELERY_APP
LOGGER = get_logger() LOGGER = get_logger()
VERSION_NULL = "0.0.0"
VERSION_CACHE_KEY = "authentik_latest_version" VERSION_CACHE_KEY = "authentik_latest_version"
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
# Chop of the first ^ because we want to search the entire string
URL_FINDER = URLValidator.regex.pattern[1:]
LOCAL_VERSION = parse(__version__) LOCAL_VERSION = parse(__version__)
@ -33,12 +36,27 @@ def _set_prom_info():
) )
@actor(description=_("Update latest version info.")) @CELERY_APP.task(
def update_latest_version(): throws=(DatabaseError, ProgrammingError, InternalError),
self: Task = CurrentTask.get_task() )
def clear_update_notifications():
"""Clear update notifications on startup if the notification was for the version
we're running now."""
for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE):
if "new_version" not in notification.event.context:
continue
notification_version = notification.event.context["new_version"]
if LOCAL_VERSION >= parse(notification_version):
notification.delete()
@CELERY_APP.task(bind=True, base=SystemTask)
@prefill_task
def update_latest_version(self: SystemTask):
"""Update latest version info"""
if CONFIG.get_bool("disable_update_check"): if CONFIG.get_bool("disable_update_check"):
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
self.info("Version check disabled.") self.set_status(TaskStatus.WARNING, "Version check disabled.")
return return
try: try:
response = get_http_session().get( response = get_http_session().get(
@ -48,7 +66,7 @@ def update_latest_version():
data = response.json() data = response.json()
upstream_version = data.get("stable", {}).get("version") upstream_version = data.get("stable", {}).get("version")
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT) cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
self.info("Successfully updated latest Version") self.set_status(TaskStatus.SUCCESSFUL, "Successfully updated latest Version")
_set_prom_info() _set_prom_info()
# Check if upstream version is newer than what we're running, # Check if upstream version is newer than what we're running,
# and if no event exists yet, create one. # and if no event exists yet, create one.
@ -59,19 +77,13 @@ def update_latest_version():
context__new_version=upstream_version, context__new_version=upstream_version,
).exists(): ).exists():
return return
Event.new( event_dict = {"new_version": upstream_version}
EventAction.UPDATE_AVAILABLE, if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")):
message=_( event_dict["message"] = f"Changelog: {match.group()}"
"New version {version} available!".format( Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
version=upstream_version,
)
),
new_version=upstream_version,
changelog=data.get("stable", {}).get("changelog_url"),
).save()
except (RequestException, IndexError) as exc: except (RequestException, IndexError) as exc:
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
raise exc self.set_error(exc)
_set_prom_info() _set_prom_info()

View File

@ -29,6 +29,18 @@ class TestAdminAPI(TestCase):
body = loads(response.content) body = loads(response.content)
self.assertEqual(body["version_current"], __version__) self.assertEqual(body["version_current"], __version__)
def test_workers(self):
"""Test Workers API"""
response = self.client.get(reverse("authentik_api:admin_workers"))
self.assertEqual(response.status_code, 200)
body = loads(response.content)
self.assertEqual(body["count"], 0)
def test_metrics(self):
"""Test metrics API"""
response = self.client.get(reverse("authentik_api:admin_metrics"))
self.assertEqual(response.status_code, 200)
def test_apps(self): def test_apps(self):
"""Test apps API""" """Test apps API"""
response = self.client.get(reverse("authentik_api:apps-list")) response = self.client.get(reverse("authentik_api:apps-list"))

View File

@ -1,12 +1,12 @@
"""test admin tasks""" """test admin tasks"""
from django.apps import apps
from django.core.cache import cache from django.core.cache import cache
from django.test import TestCase from django.test import TestCase
from requests_mock import Mocker from requests_mock import Mocker
from authentik.admin.tasks import ( from authentik.admin.tasks import (
VERSION_CACHE_KEY, VERSION_CACHE_KEY,
clear_update_notifications,
update_latest_version, update_latest_version,
) )
from authentik.events.models import Event, EventAction from authentik.events.models import Event, EventAction
@ -17,7 +17,6 @@ RESPONSE_VALID = {
"stable": { "stable": {
"version": "99999999.9999999", "version": "99999999.9999999",
"changelog": "See https://goauthentik.io/test", "changelog": "See https://goauthentik.io/test",
"changelog_url": "https://goauthentik.io/test",
"reason": "bugfix", "reason": "bugfix",
}, },
} }
@ -30,23 +29,23 @@ class TestAdminTasks(TestCase):
"""Test Update checker with valid response""" """Test Update checker with valid response"""
with Mocker() as mocker, CONFIG.patch("disable_update_check", False): with Mocker() as mocker, CONFIG.patch("disable_update_check", False):
mocker.get("https://version.goauthentik.io/version.json", json=RESPONSE_VALID) mocker.get("https://version.goauthentik.io/version.json", json=RESPONSE_VALID)
update_latest_version.send() update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999") self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999")
self.assertTrue( self.assertTrue(
Event.objects.filter( Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE, action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999", context__new_version="99999999.9999999",
context__message="New version 99999999.9999999 available!", context__message="Changelog: https://goauthentik.io/test",
).exists() ).exists()
) )
# test that a consecutive check doesn't create a duplicate event # test that a consecutive check doesn't create a duplicate event
update_latest_version.send() update_latest_version.delay().get()
self.assertEqual( self.assertEqual(
len( len(
Event.objects.filter( Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE, action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999", context__new_version="99999999.9999999",
context__message="New version 99999999.9999999 available!", context__message="Changelog: https://goauthentik.io/test",
) )
), ),
1, 1,
@ -56,7 +55,7 @@ class TestAdminTasks(TestCase):
"""Test Update checker with invalid response""" """Test Update checker with invalid response"""
with Mocker() as mocker: with Mocker() as mocker:
mocker.get("https://version.goauthentik.io/version.json", status_code=400) mocker.get("https://version.goauthentik.io/version.json", status_code=400)
update_latest_version.send() update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0") self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
self.assertFalse( self.assertFalse(
Event.objects.filter( Event.objects.filter(
@ -67,19 +66,17 @@ class TestAdminTasks(TestCase):
def test_version_disabled(self): def test_version_disabled(self):
"""Test Update checker while its disabled""" """Test Update checker while its disabled"""
with CONFIG.patch("disable_update_check", True): with CONFIG.patch("disable_update_check", True):
update_latest_version.send() update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0") self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
def test_clear_update_notifications(self): def test_clear_update_notifications(self):
"""Test clear of previous notification""" """Test clear of previous notification"""
admin_config = apps.get_app_config("authentik_admin")
Event.objects.create( Event.objects.create(
action=EventAction.UPDATE_AVAILABLE, action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"}
context={"new_version": "99999999.9999999.9999999"},
) )
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"})
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={})
admin_config.clear_update_notifications() clear_update_notifications()
self.assertFalse( self.assertFalse(
Event.objects.filter( Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1"

View File

@ -3,14 +3,20 @@
from django.urls import path from django.urls import path
from authentik.admin.api.meta import AppsViewSet, ModelViewSet from authentik.admin.api.meta import AppsViewSet, ModelViewSet
from authentik.admin.api.metrics import AdministrationMetricsViewSet
from authentik.admin.api.system import SystemView from authentik.admin.api.system import SystemView
from authentik.admin.api.version import VersionView from authentik.admin.api.version import VersionView
from authentik.admin.api.version_history import VersionHistoryViewSet from authentik.admin.api.workers import WorkerView
api_urlpatterns = [ api_urlpatterns = [
("admin/apps", AppsViewSet, "apps"), ("admin/apps", AppsViewSet, "apps"),
("admin/models", ModelViewSet, "models"), ("admin/models", ModelViewSet, "models"),
path(
"admin/metrics/",
AdministrationMetricsViewSet.as_view(),
name="admin_metrics",
),
path("admin/version/", VersionView.as_view(), name="admin_version"), path("admin/version/", VersionView.as_view(), name="admin_version"),
("admin/version/history", VersionHistoryViewSet, "version_history"), path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
path("admin/system/", SystemView.as_view(), name="admin_system"), path("admin/system/", SystemView.as_view(), name="admin_system"),
] ]

View File

@ -1,13 +1,35 @@
"""authentik API AppConfig""" """authentik API AppConfig"""
from authentik.blueprints.apps import ManagedAppConfig from django.apps import AppConfig
class AuthentikAPIConfig(ManagedAppConfig): class AuthentikAPIConfig(AppConfig):
"""authentik API Config""" """authentik API Config"""
name = "authentik.api" name = "authentik.api"
label = "authentik_api" label = "authentik_api"
mountpoint = "api/" mountpoint = "api/"
verbose_name = "authentik API" verbose_name = "authentik API"
default = True
def ready(self) -> None:
from drf_spectacular.extensions import OpenApiAuthenticationExtension
from authentik.api.authentication import TokenAuthentication
# Class is defined here as it needs to be created early enough that drf-spectacular will
# find it, but also won't cause any import issues
# pylint: disable=unused-variable
class TokenSchema(OpenApiAuthenticationExtension):
"""Auth schema"""
target_class = TokenAuthentication
name = "authentik"
def get_security_definition(self, auto_schema):
"""Auth schema"""
return {
"type": "apiKey",
"in": "header",
"name": "Authorization",
"scheme": "bearer",
}

View File

@ -1,33 +1,23 @@
"""API Authentication""" """API Authentication"""
from hmac import compare_digest from hmac import compare_digest
from pathlib import Path from typing import Any, Optional
from tempfile import gettempdir
from typing import Any
from django.conf import settings from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from drf_spectacular.extensions import OpenApiAuthenticationExtension
from rest_framework.authentication import BaseAuthentication, get_authorization_header from rest_framework.authentication import BaseAuthentication, get_authorization_header
from rest_framework.exceptions import AuthenticationFailed from rest_framework.exceptions import AuthenticationFailed
from rest_framework.request import Request from rest_framework.request import Request
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.core.middleware import CTX_AUTH_VIA from authentik.core.middleware import CTX_AUTH_VIA
from authentik.core.models import Token, TokenIntents, User, UserTypes from authentik.core.models import Token, TokenIntents, User
from authentik.outposts.models import Outpost from authentik.outposts.models import Outpost
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
LOGGER = get_logger() LOGGER = get_logger()
_tmp = Path(gettempdir())
try:
with open(_tmp / "authentik-core-ipc.key") as _f:
ipc_key = _f.read()
except OSError:
ipc_key = None
def validate_auth(header: bytes) -> str | None: def validate_auth(header: bytes) -> Optional[str]:
"""Validate that the header is in a correct format, """Validate that the header is in a correct format,
returns type and credentials""" returns type and credentials"""
auth_credentials = header.decode().strip() auth_credentials = header.decode().strip()
@ -42,7 +32,7 @@ def validate_auth(header: bytes) -> str | None:
return auth_credentials return auth_credentials
def bearer_auth(raw_header: bytes) -> User | None: def bearer_auth(raw_header: bytes) -> Optional[User]:
"""raw_header in the Format of `Bearer ....`""" """raw_header in the Format of `Bearer ....`"""
user = auth_user_lookup(raw_header) user = auth_user_lookup(raw_header)
if not user: if not user:
@ -52,7 +42,7 @@ def bearer_auth(raw_header: bytes) -> User | None:
return user return user
def auth_user_lookup(raw_header: bytes) -> User | None: def auth_user_lookup(raw_header: bytes) -> Optional[User]:
"""raw_header in the Format of `Bearer ....`""" """raw_header in the Format of `Bearer ....`"""
from authentik.providers.oauth2.models import AccessToken from authentik.providers.oauth2.models import AccessToken
@ -82,15 +72,10 @@ def auth_user_lookup(raw_header: bytes) -> User | None:
if user: if user:
CTX_AUTH_VIA.set("secret_key") CTX_AUTH_VIA.set("secret_key")
return user return user
# then try to auth via secret key (for embedded outpost/etc)
user = token_ipc(auth_credentials)
if user:
CTX_AUTH_VIA.set("ipc")
return user
raise AuthenticationFailed("Token invalid/expired") raise AuthenticationFailed("Token invalid/expired")
def token_secret_key(value: str) -> User | None: def token_secret_key(value: str) -> Optional[User]:
"""Check if the token is the secret key """Check if the token is the secret key
and return the service account for the managed outpost""" and return the service account for the managed outpost"""
from authentik.outposts.apps import MANAGED_OUTPOST from authentik.outposts.apps import MANAGED_OUTPOST
@ -104,43 +89,6 @@ def token_secret_key(value: str) -> User | None:
return outpost.user return outpost.user
class IPCUser(AnonymousUser):
"""'Virtual' user for IPC communication between authentik core and the authentik router"""
username = "authentik:system"
is_active = True
is_superuser = True
@property
def type(self):
return UserTypes.INTERNAL_SERVICE_ACCOUNT
def has_perm(self, perm, obj=None):
return True
def has_perms(self, perm_list, obj=None):
return True
def has_module_perms(self, module):
return True
@property
def is_anonymous(self):
return False
@property
def is_authenticated(self):
return True
def token_ipc(value: str) -> User | None:
"""Check if the token is the secret key
and return the service account for the managed outpost"""
if not ipc_key or not compare_digest(value, ipc_key):
return None
return IPCUser()
class TokenAuthentication(BaseAuthentication): class TokenAuthentication(BaseAuthentication):
"""Token-based authentication using HTTP Bearer authentication""" """Token-based authentication using HTTP Bearer authentication"""
@ -154,14 +102,3 @@ class TokenAuthentication(BaseAuthentication):
return None return None
return (user, None) # pragma: no cover return (user, None) # pragma: no cover
class TokenSchema(OpenApiAuthenticationExtension):
"""Auth schema"""
target_class = TokenAuthentication
name = "authentik"
def get_security_definition(self, auto_schema):
"""Auth schema"""
return {"type": "http", "scheme": "bearer"}

View File

@ -0,0 +1,67 @@
"""API Authorization"""
from django.conf import settings
from django.db.models import Model
from django.db.models.query import QuerySet
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.authentication import get_authorization_header
from rest_framework.filters import BaseFilterBackend
from rest_framework.permissions import BasePermission
from rest_framework.request import Request
from authentik.api.authentication import validate_auth
from authentik.rbac.filters import ObjectFilter
class OwnerFilter(BaseFilterBackend):
"""Filter objects by their owner"""
owner_key = "user"
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
if request.user.is_superuser:
return queryset
return queryset.filter(**{self.owner_key: request.user})
class SecretKeyFilter(DjangoFilterBackend):
"""Allow access to all objects when authenticated with secret key as token.
Replaces both DjangoFilterBackend and ObjectFilter"""
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
auth_header = get_authorization_header(request)
token = validate_auth(auth_header)
if token and token == settings.SECRET_KEY:
return queryset
queryset = ObjectFilter().filter_queryset(request, queryset, view)
return super().filter_queryset(request, queryset, view)
class OwnerPermissions(BasePermission):
"""Authorize requests by an object's owner matching the requesting user"""
owner_key = "user"
def has_permission(self, request: Request, view) -> bool:
"""If the user is authenticated, we allow all requests here. For listing, the
object-level permissions are done by the filter backend"""
return request.user.is_authenticated
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
"""Check if the object's owner matches the currently logged in user"""
if not hasattr(obj, self.owner_key):
return False
owner = getattr(obj, self.owner_key)
if owner != request.user:
return False
return True
class OwnerSuperuserPermissions(OwnerPermissions):
"""Similar to OwnerPermissions, except always allow access for superusers"""
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
if request.user.is_superuser:
return True
return super().has_object_permission(request, view, obj)

View File

@ -1,7 +1,7 @@
"""API Decorators""" """API Decorators"""
from collections.abc import Callable
from functools import wraps from functools import wraps
from typing import Callable, Optional
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
@ -11,26 +11,21 @@ from structlog.stdlib import get_logger
LOGGER = get_logger() LOGGER = get_logger()
def permission_required(obj_perm: str | None = None, global_perms: list[str] | None = None): def permission_required(obj_perm: Optional[str] = None, global_perms: Optional[list[str]] = None):
"""Check permissions for a single custom action""" """Check permissions for a single custom action"""
def _check_obj_perm(self: ModelViewSet, request: Request): def wrapper_outter(func: Callable):
# Check obj_perm both globally and on the specific object
# Having the global permission has higher priority
if request.user.has_perm(obj_perm):
return
obj = self.get_object()
if not request.user.has_perm(obj_perm, obj):
LOGGER.debug("denying access for object", user=request.user, perm=obj_perm, obj=obj)
self.permission_denied(request)
def wrapper_outer(func: Callable):
"""Check permissions for a single custom action""" """Check permissions for a single custom action"""
@wraps(func) @wraps(func)
def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response: def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response:
if obj_perm: if obj_perm:
_check_obj_perm(self, request) obj = self.get_object()
if not request.user.has_perm(obj_perm, obj):
LOGGER.debug(
"denying access for object", user=request.user, perm=obj_perm, obj=obj
)
return self.permission_denied(request)
if global_perms: if global_perms:
for other_perm in global_perms: for other_perm in global_perms:
if not request.user.has_perm(other_perm): if not request.user.has_perm(other_perm):
@ -40,4 +35,4 @@ def permission_required(obj_perm: str | None = None, global_perms: list[str] | N
return wrapper return wrapper
return wrapper_outer return wrapper_outter

View File

@ -12,7 +12,6 @@ from drf_spectacular.settings import spectacular_settings
from drf_spectacular.types import OpenApiTypes from drf_spectacular.types import OpenApiTypes
from rest_framework.settings import api_settings from rest_framework.settings import api_settings
from authentik.api.apps import AuthentikAPIConfig
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
@ -54,7 +53,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom
return component return component
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): # noqa: W0613
"""Workaround to set a default response for endpoints. """Workaround to set a default response for endpoints.
Workaround suggested at Workaround suggested at
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>
@ -102,12 +101,3 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
comp = result["components"]["schemas"][component] comp = result["components"]["schemas"][component]
comp["additionalProperties"] = {} comp["additionalProperties"] = {}
return result return result
def preprocess_schema_exclude_non_api(endpoints, **kwargs):
"""Filter out all API Views which are not mounted under /api"""
return [
(path, path_regex, method, callback)
for path, path_regex, method, callback in endpoints
if path.startswith("/" + AuthentikAPIConfig.mountpoint)
]

View File

@ -1,13 +1,13 @@
{% extends "base/skeleton.html" %} {% extends "base/skeleton.html" %}
{% load authentik_core %} {% load static %}
{% block title %} {% block title %}
API Browser - {{ brand.branding_title }} API Browser - {{ brand.branding_title }}
{% endblock %} {% endblock %}
{% block head %} {% block head %}
<script src="{% versioned_script 'dist/standalone/api-browser/index-%v.js' %}" type="module"></script> <script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script>
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
{% endblock %} {% endblock %}

View File

@ -25,17 +25,17 @@ class TestAPIAuth(TestCase):
def test_invalid_type(self): def test_invalid_type(self):
"""Test invalid type""" """Test invalid type"""
with self.assertRaises(AuthenticationFailed): with self.assertRaises(AuthenticationFailed):
bearer_auth(b"foo bar") bearer_auth("foo bar".encode())
def test_invalid_empty(self): def test_invalid_empty(self):
"""Test invalid type""" """Test invalid type"""
self.assertIsNone(bearer_auth(b"Bearer ")) self.assertIsNone(bearer_auth("Bearer ".encode()))
self.assertIsNone(bearer_auth(b"")) self.assertIsNone(bearer_auth("".encode()))
def test_invalid_no_token(self): def test_invalid_no_token(self):
"""Test invalid with no token""" """Test invalid with no token"""
with self.assertRaises(AuthenticationFailed): with self.assertRaises(AuthenticationFailed):
auth = b64encode(b":abc").decode() auth = b64encode(":abc".encode()).decode()
self.assertIsNone(bearer_auth(f"Basic :{auth}".encode())) self.assertIsNone(bearer_auth(f"Basic :{auth}".encode()))
def test_bearer_valid(self): def test_bearer_valid(self):

View File

@ -0,0 +1,35 @@
"""test decorators api"""
from django.urls import reverse
from guardian.shortcuts import assign_perm
from rest_framework.test import APITestCase
from authentik.core.models import Application, User
from authentik.lib.generators import generate_id
class TestAPIDecorators(APITestCase):
"""test decorators api"""
def setUp(self) -> None:
super().setUp()
self.user = User.objects.create(username="test-user")
def test_obj_perm_denied(self):
"""Test object perm denied"""
self.client.force_login(self.user)
app = Application.objects.create(name=generate_id(), slug=generate_id())
response = self.client.get(
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
)
self.assertEqual(response.status_code, 403)
def test_other_perm_denied(self):
"""Test other perm denied"""
self.client.force_login(self.user)
app = Application.objects.create(name=generate_id(), slug=generate_id())
assign_perm("authentik_core.view_application", self.user, app)
response = self.client.get(
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
)
self.assertEqual(response.status_code, 403)

View File

@ -1,6 +1,6 @@
"""authentik API Modelviewset tests""" """authentik API Modelviewset tests"""
from collections.abc import Callable from typing import Callable
from django.test import TestCase from django.test import TestCase
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
@ -26,6 +26,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
for _, viewset, _ in router.registry: for _, viewset, _ in router.registry:
if not issubclass(viewset, ModelViewSet | ReadOnlyModelViewSet): if not issubclass(viewset, (ModelViewSet, ReadOnlyModelViewSet)):
continue continue
setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset)) setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset))

View File

@ -68,11 +68,7 @@ class ConfigView(APIView):
"""Get all capabilities this server instance supports""" """Get all capabilities this server instance supports"""
caps = [] caps = []
deb_test = settings.DEBUG or settings.TEST deb_test = settings.DEBUG or settings.TEST
if ( if Path(settings.MEDIA_ROOT).is_mount() or deb_test:
CONFIG.get("storage.media.backend", "file") == "s3"
or Path(settings.STORAGES["default"]["OPTIONS"]["location"]).is_mount()
or deb_test
):
caps.append(Capabilities.CAN_SAVE_MEDIA) caps.append(Capabilities.CAN_SAVE_MEDIA)
for processor in get_context_processors(): for processor in get_context_processors():
if cap := processor.capability(): if cap := processor.capability():

View File

@ -33,7 +33,7 @@ for _authentik_app in get_apps():
app_name=_authentik_app.name, app_name=_authentik_app.name,
) )
continue continue
urls: list = api_urls.api_urlpatterns urls: list = getattr(api_urls, "api_urlpatterns")
for url in urls: for url in urls:
if isinstance(url, URLPattern): if isinstance(url, URLPattern):
_other_urls.append(url) _other_urls.append(url)

View File

@ -7,16 +7,16 @@ from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, DateTimeField from rest_framework.fields import CharField, DateTimeField
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.serializers import ListSerializer from rest_framework.serializers import ListSerializer, ModelSerializer
from rest_framework.viewsets import ModelViewSet from rest_framework.viewsets import ModelViewSet
from authentik.api.decorators import permission_required
from authentik.blueprints.models import BlueprintInstance from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.v1.importer import Importer from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.oci import OCI_PREFIX from authentik.blueprints.v1.oci import OCI_PREFIX
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer from authentik.core.api.utils import JSONDictField, PassiveSerializer
from authentik.rbac.decorators import permission_required
class ManagedSerializer: class ManagedSerializer:
@ -39,7 +39,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
"""Ensure the path (if set) specified is retrievable""" """Ensure the path (if set) specified is retrievable"""
if path == "" or path.startswith(OCI_PREFIX): if path == "" or path.startswith(OCI_PREFIX):
return path return path
files: list[dict] = blueprints_find_dict.send().get_result(block=True) files: list[dict] = blueprints_find_dict.delay().get()
if path not in [file["path"] for file in files]: if path not in [file["path"] for file in files]:
raise ValidationError(_("Blueprint file does not exist")) raise ValidationError(_("Blueprint file does not exist"))
return path return path
@ -51,12 +51,8 @@ class BlueprintInstanceSerializer(ModelSerializer):
context = self.instance.context if self.instance else {} context = self.instance.context if self.instance else {}
valid, logs = Importer.from_string(content, context).validate() valid, logs = Importer.from_string(content, context).validate()
if not valid: if not valid:
raise ValidationError( text_logs = "\n".join([x["event"] for x in logs])
[ raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
_("Failed to validate blueprint"),
*[f"- {x.event}" for x in logs],
]
)
return content return content
def validate(self, attrs: dict) -> dict: def validate(self, attrs: dict) -> dict:
@ -115,7 +111,7 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
@action(detail=False, pagination_class=None, filter_backends=[]) @action(detail=False, pagination_class=None, filter_backends=[])
def available(self, request: Request) -> Response: def available(self, request: Request) -> Response:
"""Get blueprints""" """Get blueprints"""
files: list[dict] = blueprints_find_dict.send().get_result(block=True) files: list[dict] = blueprints_find_dict.delay().get()
return Response(files) return Response(files)
@permission_required("authentik_blueprints.view_blueprintinstance") @permission_required("authentik_blueprints.view_blueprintinstance")
@ -129,5 +125,5 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
def apply(self, request: Request, *args, **kwargs) -> Response: def apply(self, request: Request, *args, **kwargs) -> Response:
"""Apply a blueprint""" """Apply a blueprint"""
blueprint = self.get_object() blueprint = self.get_object()
apply_blueprint.send_with_options(args=(blueprint.pk,), rel_obj=blueprint) apply_blueprint.delay(str(blueprint.pk)).get()
return self.retrieve(request, *args, **kwargs) return self.retrieve(request, *args, **kwargs)

View File

@ -1,26 +1,20 @@
"""authentik Blueprints app""" """authentik Blueprints app"""
from collections.abc import Callable
from importlib import import_module from importlib import import_module
from inspect import ismethod from inspect import ismethod
from django.apps import AppConfig from django.apps import AppConfig
from django.db import DatabaseError, InternalError, ProgrammingError from django.db import DatabaseError, InternalError, ProgrammingError
from dramatiq.broker import get_broker
from structlog.stdlib import BoundLogger, get_logger from structlog.stdlib import BoundLogger, get_logger
from authentik.lib.utils.time import fqdn_rand
from authentik.root.signals import startup
from authentik.tasks.schedules.lib import ScheduleSpec
class ManagedAppConfig(AppConfig): class ManagedAppConfig(AppConfig):
"""Basic reconciliation logic for apps""" """Basic reconciliation logic for apps"""
logger: BoundLogger logger: BoundLogger
RECONCILE_GLOBAL_CATEGORY: str = "global" RECONCILE_GLOBAL_PREFIX: str = "reconcile_global_"
RECONCILE_TENANT_CATEGORY: str = "tenant" RECONCILE_TENANT_PREFIX: str = "reconcile_tenant_"
def __init__(self, app_name: str, *args, **kwargs) -> None: def __init__(self, app_name: str, *args, **kwargs) -> None:
super().__init__(app_name, *args, **kwargs) super().__init__(app_name, *args, **kwargs)
@ -28,16 +22,13 @@ class ManagedAppConfig(AppConfig):
def ready(self) -> None: def ready(self) -> None:
self.import_related() self.import_related()
startup.connect(self._on_startup_callback, dispatch_uid=self.label) self.reconcile_global()
self.reconcile_tenant()
return super().ready() return super().ready()
def _on_startup_callback(self, sender, **_):
self._reconcile_global()
self._reconcile_tenant()
def import_related(self): def import_related(self):
"""Automatically import related modules which rely on just being imported """Automatically import related modules which rely on just being imported
to register themselves (mainly django signals and tasks)""" to register themselves (mainly django signals and celery tasks)"""
def import_relative(rel_module: str): def import_relative(rel_module: str):
try: try:
@ -60,8 +51,7 @@ class ManagedAppConfig(AppConfig):
meth = getattr(self, meth_name) meth = getattr(self, meth_name)
if not ismethod(meth): if not ismethod(meth):
continue continue
category = getattr(meth, "_authentik_managed_reconcile", None) if not meth_name.startswith(prefix):
if category != prefix:
continue continue
name = meth_name.replace(prefix, "") name = meth_name.replace(prefix, "")
try: try:
@ -71,29 +61,7 @@ class ManagedAppConfig(AppConfig):
except (DatabaseError, ProgrammingError, InternalError) as exc: except (DatabaseError, ProgrammingError, InternalError) as exc:
self.logger.warning("Failed to run reconcile", name=name, exc=exc) self.logger.warning("Failed to run reconcile", name=name, exc=exc)
@staticmethod def reconcile_tenant(self) -> None:
def reconcile_tenant(func: Callable):
"""Mark a function to be called on startup (for each tenant)"""
func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_TENANT_CATEGORY
return func
@staticmethod
def reconcile_global(func: Callable):
"""Mark a function to be called on startup (globally)"""
func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY
return func
@property
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
"""Get a list of schedule specs that must exist in each tenant"""
return []
@property
def global_schedule_specs(self) -> list[ScheduleSpec]:
"""Get a list of schedule specs that must exist in the default tenant"""
return []
def _reconcile_tenant(self) -> None:
"""reconcile ourselves for tenanted methods""" """reconcile ourselves for tenanted methods"""
from authentik.tenants.models import Tenant from authentik.tenants.models import Tenant
@ -104,21 +72,17 @@ class ManagedAppConfig(AppConfig):
return return
for tenant in tenants: for tenant in tenants:
with tenant: with tenant:
self._reconcile(self.RECONCILE_TENANT_CATEGORY) self._reconcile(self.RECONCILE_TENANT_PREFIX)
def _reconcile_global(self) -> None: def reconcile_global(self) -> None:
""" """
reconcile ourselves for global methods. reconcile ourselves for global methods.
Used for signals, tasks, etc. Database queries should not be made in here. Used for signals, tasks, etc. Database queries should not be made in here.
""" """
from django_tenants.utils import get_public_schema_name, schema_context from django_tenants.utils import get_public_schema_name, schema_context
try: with schema_context(get_public_schema_name()):
with schema_context(get_public_schema_name()): self._reconcile(self.RECONCILE_GLOBAL_PREFIX)
self._reconcile(self.RECONCILE_GLOBAL_CATEGORY)
except (DatabaseError, ProgrammingError, InternalError) as exc:
self.logger.debug("Failed to access database to run reconcile", exc=exc)
return
class AuthentikBlueprintsConfig(ManagedAppConfig): class AuthentikBlueprintsConfig(ManagedAppConfig):
@ -129,29 +93,17 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
verbose_name = "authentik Blueprints" verbose_name = "authentik Blueprints"
default = True default = True
def reconcile_global_load_blueprints_v1_tasks(self):
"""Load v1 tasks"""
self.import_module("authentik.blueprints.v1.tasks")
def reconcile_tenant_blueprints_discovery(self):
"""Run blueprint discovery"""
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
blueprints_discovery.delay()
clear_failed_blueprints.delay()
def import_models(self): def import_models(self):
super().import_models() super().import_models()
self.import_module("authentik.blueprints.v1.meta.apply_blueprint") self.import_module("authentik.blueprints.v1.meta.apply_blueprint")
@ManagedAppConfig.reconcile_global
def tasks_middlewares(self):
from authentik.blueprints.v1.tasks import BlueprintWatcherMiddleware
get_broker().add_middleware(BlueprintWatcherMiddleware())
@property
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
return [
ScheduleSpec(
actor=blueprints_discovery,
crontab=f"{fqdn_rand('blueprints_v1_discover')} * * * *",
send_on_startup=True,
),
ScheduleSpec(
actor=clear_failed_blueprints,
crontab=f"{fqdn_rand('blueprints_v1_cleanup')} * * * *",
send_on_startup=True,
),
]

View File

@ -23,11 +23,9 @@ class Command(BaseCommand):
for blueprint_path in options.get("blueprints", []): for blueprint_path in options.get("blueprints", []):
content = BlueprintInstance(path=blueprint_path).retrieve() content = BlueprintInstance(path=blueprint_path).retrieve()
importer = Importer.from_string(content) importer = Importer.from_string(content)
valid, logs = importer.validate() valid, _ = importer.validate()
if not valid: if not valid:
self.stderr.write("Blueprint invalid") self.stderr.write("blueprint invalid")
for log in logs:
self.stderr.write(f"\t{log.logger}: {log.event}: {log.attributes}")
sys_exit(1) sys_exit(1)
importer.apply() importer.apply()

View File

@ -1,68 +0,0 @@
"""Test and debug Blueprints"""
import atexit
import readline
from pathlib import Path
from pprint import pformat
from sys import exit as sysexit
from textwrap import indent
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from yaml import load
from authentik.blueprints.v1.common import BlueprintLoader, EntryInvalidError
from authentik.core.management.commands.shell import get_banner_text
from authentik.lib.utils.errors import exception_to_string
LOGGER = get_logger()
class Command(BaseCommand):
"""Test and debug Blueprints"""
lines = []
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
histfolder = Path("~").expanduser() / Path(".local/share/authentik")
histfolder.mkdir(parents=True, exist_ok=True)
histfile = histfolder / Path("blueprint_shell_history")
readline.parse_and_bind("tab: complete")
readline.parse_and_bind("set editing-mode vi")
try:
readline.read_history_file(str(histfile))
except FileNotFoundError:
pass
atexit.register(readline.write_history_file, str(histfile))
@no_translations
def handle(self, *args, **options):
"""Interactively debug blueprint files"""
self.stdout.write(get_banner_text("Blueprint shell"))
self.stdout.write("Type '.eval' to evaluate previously entered statement(s).")
def do_eval():
yaml_input = "\n".join([line for line in self.lines if line])
data = load(yaml_input, BlueprintLoader)
self.stdout.write(pformat(data))
self.lines = []
while True:
try:
line = input("> ")
if line == ".eval":
do_eval()
else:
self.lines.append(line)
except EntryInvalidError as exc:
self.stdout.write("Failed to evaluate expression:")
self.stdout.write(indent(exception_to_string(exc), prefix=" "))
except EOFError:
break
except KeyboardInterrupt:
self.stdout.write()
sysexit(0)
self.stdout.write()

View File

@ -4,14 +4,12 @@ from json import dumps
from typing import Any from typing import Any
from django.core.management.base import BaseCommand, no_translations from django.core.management.base import BaseCommand, no_translations
from django.db.models import Model, fields from django.db.models import Model
from drf_jsonschema_serializer.convert import converter, field_to_converter from drf_jsonschema_serializer.convert import field_to_converter
from rest_framework.fields import Field, JSONField, UUIDField from rest_framework.fields import Field, JSONField, UUIDField
from rest_framework.relations import PrimaryKeyRelatedField
from rest_framework.serializers import Serializer from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik import __version__
from authentik.blueprints.v1.common import BlueprintEntryDesiredState from authentik.blueprints.v1.common import BlueprintEntryDesiredState
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
@ -20,23 +18,6 @@ from authentik.lib.models import SerializerModel
LOGGER = get_logger() LOGGER = get_logger()
@converter
class PrimaryKeyRelatedFieldConverter:
"""Custom primary key field converter which is aware of non-integer based PKs
This is not an exhaustive fix for other non-int PKs, however in authentik we either
use UUIDs or ints"""
field_class = PrimaryKeyRelatedField
def convert(self, field: PrimaryKeyRelatedField):
model: Model = field.queryset.model
pk_field = model._meta.pk
if isinstance(pk_field, fields.UUIDField):
return {"type": "string", "format": "uuid"}
return {"type": "integer"}
class Command(BaseCommand): class Command(BaseCommand):
"""Generate JSON Schema for blueprints""" """Generate JSON Schema for blueprints"""
@ -48,7 +29,7 @@ class Command(BaseCommand):
"$schema": "http://json-schema.org/draft-07/schema", "$schema": "http://json-schema.org/draft-07/schema",
"$id": "https://goauthentik.io/blueprints/schema.json", "$id": "https://goauthentik.io/blueprints/schema.json",
"type": "object", "type": "object",
"title": f"authentik {__version__} Blueprint schema", "title": "authentik Blueprint schema",
"required": ["version", "entries"], "required": ["version", "entries"],
"properties": { "properties": {
"version": { "version": {
@ -72,33 +53,20 @@ class Command(BaseCommand):
"additionalProperties": True, "additionalProperties": True,
}, },
"entries": { "entries": {
"anyOf": [ "type": "array",
{ "items": {
"type": "array", "oneOf": [],
"items": {"$ref": "#/$defs/blueprint_entry"}, },
},
{
"type": "object",
"additionalProperties": {
"type": "array",
"items": {"$ref": "#/$defs/blueprint_entry"},
},
},
],
}, },
}, },
"$defs": {"blueprint_entry": {"oneOf": []}}, "$defs": {},
} }
def add_arguments(self, parser):
parser.add_argument("--file", type=str)
@no_translations @no_translations
def handle(self, *args, file: str, **options): def handle(self, *args, **options):
"""Generate JSON Schema for blueprints""" """Generate JSON Schema for blueprints"""
self.build() self.build()
with open(file, "w") as _schema: self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default))
_schema.write(dumps(self.schema, indent=4, default=Command.json_default))
@staticmethod @staticmethod
def json_default(value: Any) -> Any: def json_default(value: Any) -> Any:
@ -125,21 +93,18 @@ class Command(BaseCommand):
} }
) )
model_path = f"{model._meta.app_label}.{model._meta.model_name}" model_path = f"{model._meta.app_label}.{model._meta.model_name}"
self.schema["$defs"]["blueprint_entry"]["oneOf"].append( self.schema["properties"]["entries"]["items"]["oneOf"].append(
self.template_entry(model_path, model, serializer) self.template_entry(model_path, serializer)
) )
def template_entry(self, model_path: str, model: type[Model], serializer: Serializer) -> dict: def template_entry(self, model_path: str, serializer: Serializer) -> dict:
"""Template entry for a single model""" """Template entry for a single model"""
model_schema = self.to_jsonschema(serializer) model_schema = self.to_jsonschema(serializer)
model_schema["required"] = [] model_schema["required"] = []
def_name = f"model_{model_path}" def_name = f"model_{model_path}"
def_path = f"#/$defs/{def_name}" def_path = f"#/$defs/{def_name}"
self.schema["$defs"][def_name] = model_schema self.schema["$defs"][def_name] = model_schema
def_name_perm = f"model_{model_path}_permissions" return {
def_path_perm = f"#/$defs/{def_name_perm}"
self.schema["$defs"][def_name_perm] = self.model_permissions(model)
template = {
"type": "object", "type": "object",
"required": ["model", "identifiers"], "required": ["model", "identifiers"],
"properties": { "properties": {
@ -147,20 +112,14 @@ class Command(BaseCommand):
"id": {"type": "string"}, "id": {"type": "string"},
"state": { "state": {
"type": "string", "type": "string",
"enum": sorted([s.value for s in BlueprintEntryDesiredState]), "enum": [s.value for s in BlueprintEntryDesiredState],
"default": "present", "default": "present",
}, },
"conditions": {"type": "array", "items": {"type": "boolean"}}, "conditions": {"type": "array", "items": {"type": "boolean"}},
"permissions": {"$ref": def_path_perm},
"attrs": {"$ref": def_path}, "attrs": {"$ref": def_path},
"identifiers": {"$ref": def_path}, "identifiers": {"$ref": def_path},
}, },
} }
# Meta models don't require identifiers, as there's no matching database model to find
if issubclass(model, BaseMetaModel):
del template["properties"]["identifiers"]
template["required"].remove("identifiers")
return template
def field_to_jsonschema(self, field: Field) -> dict: def field_to_jsonschema(self, field: Field) -> dict:
"""Convert a single field to json schema""" """Convert a single field to json schema"""
@ -207,20 +166,3 @@ class Command(BaseCommand):
if required: if required:
result["required"] = required result["required"] = required
return result return result
def model_permissions(self, model: type[Model]) -> dict:
perms = [x[0] for x in model._meta.permissions]
for action in model._meta.default_permissions:
perms.append(f"{action}_{model._meta.model_name}")
return {
"type": "array",
"items": {
"type": "object",
"required": ["permission"],
"properties": {
"permission": {"type": "string", "enum": sorted(perms)},
"user": {"type": "integer"},
"role": {"type": "string"},
},
},
}

View File

@ -29,7 +29,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
if version != 1: if version != 1:
return return
blueprint_file.seek(0) blueprint_file.seek(0)
instance = BlueprintInstance.objects.using(db_alias).filter(path=path).first() instance: BlueprintInstance = (
BlueprintInstance.objects.using(db_alias).filter(path=path).first()
)
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir"))) rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
meta = None meta = None
if metadata: if metadata:

View File

@ -3,7 +3,6 @@
from pathlib import Path from pathlib import Path
from uuid import uuid4 from uuid import uuid4
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
from django.db import models from django.db import models
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
@ -72,26 +71,6 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
enabled = models.BooleanField(default=True) enabled = models.BooleanField(default=True)
managed_models = ArrayField(models.TextField(), default=list) managed_models = ArrayField(models.TextField(), default=list)
# Manual link to tasks instead of using TasksModel because of loop imports
tasks = GenericRelation(
"authentik_tasks.Task",
content_type_field="rel_obj_content_type",
object_id_field="rel_obj_id",
)
class Meta:
verbose_name = _("Blueprint Instance")
verbose_name_plural = _("Blueprint Instances")
unique_together = (
(
"name",
"path",
),
)
def __str__(self) -> str:
return f"Blueprint Instance {self.name}"
def retrieve_oci(self) -> str: def retrieve_oci(self) -> str:
"""Get blueprint from an OCI registry""" """Get blueprint from an OCI registry"""
client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://")) client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://"))
@ -110,7 +89,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
raise BlueprintRetrievalFailed("Invalid blueprint path") raise BlueprintRetrievalFailed("Invalid blueprint path")
with full_path.open("r", encoding="utf-8") as _file: with full_path.open("r", encoding="utf-8") as _file:
return _file.read() return _file.read()
except OSError as exc: except (IOError, OSError) as exc:
raise BlueprintRetrievalFailed(exc) from exc raise BlueprintRetrievalFailed(exc) from exc
def retrieve(self) -> str: def retrieve(self) -> str:
@ -126,3 +105,16 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
from authentik.blueprints.api import BlueprintInstanceSerializer from authentik.blueprints.api import BlueprintInstanceSerializer
return BlueprintInstanceSerializer return BlueprintInstanceSerializer
def __str__(self) -> str:
return f"Blueprint Instance {self.name}"
class Meta:
verbose_name = _("Blueprint Instance")
verbose_name_plural = _("Blueprint Instances")
unique_together = (
(
"name",
"path",
),
)

View File

@ -0,0 +1,18 @@
"""blueprint Settings"""
from celery.schedules import crontab
from authentik.lib.utils.time import fqdn_rand
CELERY_BEAT_SCHEDULE = {
"blueprints_v1_discover": {
"task": "authentik.blueprints.v1.tasks.blueprints_discovery",
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
"options": {"queue": "authentik_scheduled"},
},
"blueprints_v1_cleanup": {
"task": "authentik.blueprints.v1.tasks.clear_failed_blueprints",
"schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"),
"options": {"queue": "authentik_scheduled"},
},
}

View File

@ -1,2 +0,0 @@
# Import all v1 tasks for auto task discovery
from authentik.blueprints.v1.tasks import * # noqa: F403

View File

@ -1,7 +1,7 @@
"""Blueprint helpers""" """Blueprint helpers"""
from collections.abc import Callable
from functools import wraps from functools import wraps
from typing import Callable
from django.apps import apps from django.apps import apps
@ -39,7 +39,7 @@ def reconcile_app(app_name: str):
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
config = apps.get_app_config(app_name) config = apps.get_app_config(app_name)
if isinstance(config, ManagedAppConfig): if isinstance(config, ManagedAppConfig):
config._on_startup_callback(None) config.ready()
return func(*args, **kwargs) return func(*args, **kwargs)
return wrapper return wrapper

View File

@ -1,24 +0,0 @@
version: 1
entries:
- model: authentik_core.user
id: user
identifiers:
username: "%(id)s"
attrs:
name: "%(id)s"
- model: authentik_rbac.role
id: role
identifiers:
name: "%(id)s"
- model: authentik_flows.flow
identifiers:
slug: "%(id)s"
attrs:
designation: authentication
name: foo
title: foo
permissions:
- permission: view_flow
user: !KeyOf user
- permission: view_flow
role: !KeyOf role

View File

@ -1,8 +0,0 @@
version: 1
entries:
- model: authentik_rbac.role
identifiers:
name: "%(id)s"
attrs:
permissions:
- authentik_blueprints.view_blueprintinstance

View File

@ -1,9 +0,0 @@
version: 1
entries:
- model: authentik_core.user
identifiers:
username: "%(id)s"
attrs:
name: "%(id)s"
permissions:
- authentik_blueprints.view_blueprintinstance

View File

@ -1,11 +1,10 @@
version: 1 version: 1
entries: entries:
foo: - identifiers:
- identifiers: name: "%(id)s"
name: "%(id)s" slug: "%(id)s"
slug: "%(id)s" model: authentik_flows.flow
model: authentik_flows.flow state: present
state: present attrs:
attrs: designation: stage_configuration
designation: stage_configuration title: foo
title: foo

View File

@ -37,7 +37,6 @@ entries:
- attrs: - attrs:
attributes: attributes:
env_null: !Env [bar-baz, null] env_null: !Env [bar-baz, null]
json_parse: !ParseJSON '{"foo": "bar"}'
policy_pk1: policy_pk1:
!Format [ !Format [
"%s-%s", "%s-%s",
@ -147,10 +146,6 @@ entries:
] ]
] ]
nested_context: !Context context2 nested_context: !Context context2
at_index_sequence: !AtIndex [!Context sequence, 0]
at_index_sequence_default: !AtIndex [!Context sequence, 100, "non existent"]
at_index_mapping: !AtIndex [!Context mapping, "key2"]
at_index_mapping_default: !AtIndex [!Context mapping, "invalid", "non existent"]
identifiers: identifiers:
name: test name: test
conditions: conditions:

View File

@ -1,14 +0,0 @@
from django.test import TestCase
from authentik.blueprints.apps import ManagedAppConfig
from authentik.enterprise.apps import EnterpriseConfig
from authentik.lib.utils.reflection import get_apps
class TestManagedAppConfig(TestCase):
def test_apps_use_managed_app_config(self):
for app in get_apps():
if app.name.startswith("authentik.enterprise"):
self.assertIn(EnterpriseConfig, app.__class__.__bases__)
else:
self.assertIn(ManagedAppConfig, app.__class__.__bases__)

View File

@ -1,7 +1,7 @@
"""test packaged blueprints""" """test packaged blueprints"""
from collections.abc import Callable
from pathlib import Path from pathlib import Path
from typing import Callable
from django.test import TransactionTestCase from django.test import TransactionTestCase
@ -27,14 +27,13 @@ def blueprint_tester(file_name: Path) -> Callable:
base = Path("blueprints/") base = Path("blueprints/")
rel_path = Path(file_name).relative_to(base) rel_path = Path(file_name).relative_to(base)
importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve()) importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve())
validation, logs = importer.validate() self.assertTrue(importer.validate()[0])
self.assertTrue(validation, logs)
self.assertTrue(importer.apply()) self.assertTrue(importer.apply())
return tester return tester
for blueprint_file in Path("blueprints/").glob("**/*.yaml"): for blueprint_file in Path("blueprints/").glob("**/*.yaml"):
if "local" in str(blueprint_file) or "testing" in str(blueprint_file): if "local" in str(blueprint_file):
continue continue
setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file)) setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file))

View File

@ -1,10 +1,11 @@
"""authentik managed models tests""" """authentik managed models tests"""
from collections.abc import Callable from typing import Callable, Type
from django.apps import apps from django.apps import apps
from django.test import TestCase from django.test import TestCase
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.lib.models import SerializerModel from authentik.lib.models import SerializerModel
from authentik.providers.oauth2.models import RefreshToken from authentik.providers.oauth2.models import RefreshToken
@ -13,7 +14,7 @@ class TestModels(TestCase):
"""Test Models""" """Test Models"""
def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable: def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
"""Test serializer""" """Test serializer"""
def tester(self: TestModels): def tester(self: TestModels):
@ -21,13 +22,10 @@ def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable:
return return
model_class = test_model() model_class = test_model()
self.assertTrue(isinstance(model_class, SerializerModel)) self.assertTrue(isinstance(model_class, SerializerModel))
# Models that have subclasses don't have to have a serializer
if len(test_model.__subclasses__()) > 0:
return
self.assertIsNotNone(model_class.serializer) self.assertIsNotNone(model_class.serializer)
if model_class.serializer.Meta().model == RefreshToken: if model_class.serializer.Meta().model == RefreshToken:
return return
self.assertTrue(issubclass(test_model, model_class.serializer.Meta().model)) self.assertEqual(model_class.serializer.Meta().model, test_model)
return tester return tester
@ -36,6 +34,6 @@ for app in apps.get_app_configs():
if not app.label.startswith("authentik"): if not app.label.startswith("authentik"):
continue continue
for model in app.get_models(): for model in app.get_models():
if not issubclass(model, SerializerModel): if not is_model_allowed(model):
continue continue
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model)) setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))

View File

@ -215,11 +215,6 @@ class TestBlueprintsV1(TransactionTestCase):
}, },
"nested_context": "context-nested-value", "nested_context": "context-nested-value",
"env_null": None, "env_null": None,
"json_parse": {"foo": "bar"},
"at_index_sequence": "foo",
"at_index_sequence_default": "non existent",
"at_index_mapping": 2,
"at_index_mapping_default": "non existent",
} }
).exists() ).exists()
) )

View File

@ -78,5 +78,5 @@ class TestBlueprintsV1API(APITestCase):
self.assertEqual(res.status_code, 400) self.assertEqual(res.status_code, 400)
self.assertJSONEqual( self.assertJSONEqual(
res.content.decode(), res.content.decode(),
{"content": ["Failed to validate blueprint", "- Invalid blueprint version"]}, {"content": ["Failed to validate blueprint: Invalid blueprint version"]},
) )

View File

@ -1,57 +0,0 @@
"""Test blueprints v1"""
from django.test import TransactionTestCase
from guardian.shortcuts import get_perms
from authentik.blueprints.v1.importer import Importer
from authentik.core.models import User
from authentik.flows.models import Flow
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
from authentik.rbac.models import Role
class TestBlueprintsV1RBAC(TransactionTestCase):
"""Test Blueprints rbac attribute"""
def test_user_permission(self):
"""Test permissions"""
uid = generate_id()
import_yaml = load_fixture("fixtures/rbac_user.yaml", id=uid)
importer = Importer.from_string(import_yaml)
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
user = User.objects.filter(username=uid).first()
self.assertIsNotNone(user)
self.assertTrue(user.has_perms(["authentik_blueprints.view_blueprintinstance"]))
def test_role_permission(self):
"""Test permissions"""
uid = generate_id()
import_yaml = load_fixture("fixtures/rbac_role.yaml", id=uid)
importer = Importer.from_string(import_yaml)
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
role = Role.objects.filter(name=uid).first()
self.assertIsNotNone(role)
self.assertEqual(
list(role.group.permissions.all().values_list("codename", flat=True)),
["view_blueprintinstance"],
)
def test_object_permission(self):
"""Test permissions"""
uid = generate_id()
import_yaml = load_fixture("fixtures/rbac_object.yaml", id=uid)
importer = Importer.from_string(import_yaml)
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
flow = Flow.objects.filter(slug=uid).first()
user = User.objects.filter(username=uid).first()
role = Role.objects.filter(name=uid).first()
self.assertIsNotNone(flow)
self.assertEqual(get_perms(user, flow), ["view_flow"])
self.assertEqual(get_perms(role.group, flow), ["view_flow"])

Some files were not shown because too many files have changed in this diff Show More