Compare commits

..

3 Commits

Author SHA1 Message Date
d660a392b9 punctuation fix 2024-08-09 15:57:35 -05:00
f530ce5e02 tweaks 2024-08-09 15:48:08 -05:00
d4012df59d add section about webhook cert config 2024-08-09 15:39:31 -05:00
2377 changed files with 110538 additions and 292894 deletions

View File

@ -1,5 +1,5 @@
[bumpversion]
current_version = 2025.6.3
current_version = 2024.6.3
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
@ -17,12 +17,8 @@ optional_value = final
[bumpversion:file:pyproject.toml]
[bumpversion:file:uv.lock]
[bumpversion:file:package.json]
[bumpversion:file:package-lock.json]
[bumpversion:file:docker-compose.yml]
[bumpversion:file:schema.yml]
@ -33,4 +29,4 @@ optional_value = final
[bumpversion:file:internal/constants/constants.go]
[bumpversion:file:lifecycle/aws/template.yaml]
[bumpversion:file:web/src/common/constants.ts]

View File

@ -5,10 +5,8 @@ dist/**
build/**
build_docs/**
*Dockerfile
**/*Dockerfile
blueprints/local
.git
!gen-ts-api/node_modules
!gen-ts-api/dist/**
!gen-go-api/
.venv

View File

@ -7,9 +7,6 @@ charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.toml]
indent_size = 2
[*.html]
indent_size = 2

View File

@ -28,11 +28,7 @@ Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
<!--
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
-->
- authentik version: [e.g. 2025.2.0]
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
**Additional context**

View File

@ -1,22 +0,0 @@
---
name: Documentation issue
about: Suggest an improvement or report a problem
title: ""
labels: documentation
assignees: ""
---
**Do you see an area that can be clarified or expanded, a technical inaccuracy, or a broken link? Please describe.**
A clear and concise description of what the problem is, or where the document can be improved. Ex. I believe we need more details about [...]
**Provide the URL or link to the exact page in the documentation to which you are referring.**
If there are multiple pages, list them all, and be sure to state the header or section where the content is.
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Additional context**
Add any other context or screenshots about the documentation issue here.
**Consider opening a PR!**
If the issue is one that you can fix, or even make a good pass at, we'd appreciate a PR. For more information about making a contribution to the docs, and using our Style Guide and our templates, refer to ["Writing documentation"](https://docs.goauthentik.io/docs/developer-docs/docs/writing-documentation).

View File

@ -20,12 +20,7 @@ Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
<!--
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
-->
- authentik version: [e.g. 2025.2.0]
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
**Additional context**

View File

@ -35,6 +35,14 @@ runs:
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
For arm64, use these values:
```shell
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
<details>
@ -52,6 +60,18 @@ runs:
tag: ${{ inputs.tag }}
```
For arm64, use these values:
```yaml
authentik:
outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
global:
image:
repository: ghcr.io/goauthentik/dev-server
tag: ${{ inputs.tag }}-arm64
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
edit-mode: replace

View File

@ -9,14 +9,11 @@ inputs:
image-arch:
required: false
description: "Docker image arch"
release:
required: true
description: "True if this is a release build, false if this is a dev/PR build"
outputs:
shouldPush:
description: "Whether to push the image or not"
value: ${{ steps.ev.outputs.shouldPush }}
shouldBuild:
description: "Whether to build image or not"
value: ${{ steps.ev.outputs.shouldBuild }}
sha:
description: "sha"
@ -32,24 +29,15 @@ outputs:
imageTags:
description: "Docker image tags"
value: ${{ steps.ev.outputs.imageTags }}
imageTagsJSON:
description: "Docker image tags, as a JSON array"
value: ${{ steps.ev.outputs.imageTagsJSON }}
attestImageNames:
description: "Docker image names used for attestation"
value: ${{ steps.ev.outputs.attestImageNames }}
cacheTo:
description: "cache-to value for the docker build step"
value: ${{ steps.ev.outputs.cacheTo }}
imageNames:
description: "Docker image names"
value: ${{ steps.ev.outputs.imageNames }}
imageMainTag:
description: "Docker image main tag"
value: ${{ steps.ev.outputs.imageMainTag }}
imageMainName:
description: "Docker image main name"
value: ${{ steps.ev.outputs.imageMainName }}
imageBuildArgs:
description: "Docker image build args"
value: ${{ steps.ev.outputs.imageBuildArgs }}
runs:
using: "composite"
@ -60,8 +48,6 @@ runs:
env:
IMAGE_NAME: ${{ inputs.image-name }}
IMAGE_ARCH: ${{ inputs.image-arch }}
RELEASE: ${{ inputs.release }}
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
REF: ${{ github.ref }}
run: |
python3 ${{ github.action_path }}/push_vars.py

View File

@ -2,20 +2,12 @@
import configparser
import os
from json import dumps
from time import time
parser = configparser.ConfigParser()
parser.read(".bumpversion.cfg")
# Decide if we should push the image or not
should_push = True
if len(os.environ.get("DOCKER_USERNAME", "")) < 1:
# Don't push if we don't have DOCKER_USERNAME, i.e. no secrets are available
should_push = False
if os.environ.get("GITHUB_REPOSITORY").lower() == "goauthentik/authentik-internal":
# Don't push on the internal repo
should_push = False
should_build = str(len(os.environ.get("DOCKER_USERNAME", "")) > 0).lower()
branch_name = os.environ["GITHUB_REF"]
if os.environ.get("GITHUB_HEAD_REF", "") != "":
@ -44,11 +36,12 @@ if is_release:
]
if not prerelease:
image_tags += [
f"{name}:latest",
f"{name}:{version_family}",
]
else:
suffix = ""
if image_arch:
if image_arch and image_arch != "amd64":
suffix = f"-{image_arch}"
for name in image_names:
image_tags += [
@ -58,43 +51,15 @@ else:
]
image_main_tag = image_tags[0].split(":")[-1]
def get_attest_image_names(image_with_tags: list[str]):
"""Attestation only for GHCR"""
image_tags = []
for image_name in set(name.split(":")[0] for name in image_with_tags):
if not image_name.startswith("ghcr.io"):
continue
image_tags.append(image_name)
return ",".join(set(image_tags))
# Generate `cache-to` param
cache_to = ""
if should_push:
_cache_tag = "buildcache"
if image_arch:
_cache_tag += f"-{image_arch}"
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
image_build_args = []
if os.getenv("RELEASE", "false").lower() == "true":
image_build_args = [f"VERSION={os.getenv('REF')}"]
else:
image_build_args = [f"GIT_BUILD_HASH={sha}"]
image_build_args = "\n".join(image_build_args)
image_tags_rendered = ",".join(image_tags)
image_names_rendered = ",".join(set(name.split(":")[0] for name in image_tags))
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"shouldPush={str(should_push).lower()}", file=_output)
print(f"shouldBuild={should_build}", file=_output)
print(f"sha={sha}", file=_output)
print(f"version={version}", file=_output)
print(f"prerelease={prerelease}", file=_output)
print(f"imageTags={','.join(image_tags)}", file=_output)
print(f"imageTagsJSON={dumps(image_tags)}", file=_output)
print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output)
print(f"imageTags={image_tags_rendered}", file=_output)
print(f"imageNames={image_names_rendered}", file=_output)
print(f"imageMainTag={image_main_tag}", file=_output)
print(f"imageMainName={image_tags[0]}", file=_output)
print(f"cacheTo={cache_to}", file=_output)
print(f"imageBuildArgs={image_build_args}", file=_output)

View File

@ -1,18 +1,7 @@
#!/bin/bash -x
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
# Non-pushing PR
GITHUB_OUTPUT=/dev/stdout \
GITHUB_REF=ref \
GITHUB_SHA=sha \
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
GITHUB_REPOSITORY=goauthentik/authentik \
python $SCRIPT_DIR/push_vars.py
# Pushing PR/main
GITHUB_OUTPUT=/dev/stdout \
GITHUB_REF=ref \
GITHUB_SHA=sha \
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
GITHUB_REPOSITORY=goauthentik/authentik \
DOCKER_USERNAME=foo \
python $SCRIPT_DIR/push_vars.py

View File

@ -9,22 +9,17 @@ inputs:
runs:
using: "composite"
steps:
- name: Install apt deps
- name: Install poetry & deps
shell: bash
run: |
pipx install poetry || true
sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server
- name: Install uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
- name: Setup python
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry
uses: actions/setup-python@v5
with:
python-version-file: "pyproject.toml"
- name: Install Python deps
shell: bash
run: uv sync --all-extras --dev --frozen
cache: "poetry"
- name: Setup node
uses: actions/setup-node@v4
with:
@ -35,18 +30,15 @@ runs:
uses: actions/setup-go@v5
with:
go-version-file: "go.mod"
- name: Setup docker cache
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
with:
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
- name: Setup dependencies
shell: bash
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/docker-compose.yml up -d
poetry install
cd web && npm ci
- name: Generate config
shell: uv run python {0}
shell: poetry run python {0}
run: |
from authentik.lib.generators import generate_id
from yaml import safe_dump

View File

@ -11,7 +11,7 @@ services:
- 5432:5432
restart: always
redis:
image: docker.io/library/redis:7
image: docker.io/library/redis
ports:
- 6379:6379
restart: always

View File

@ -1,32 +1,7 @@
akadmin
asgi
assertIn
authentik
authn
crate
docstrings
entra
goauthentik
gunicorn
hass
jwe
jwks
keypair
keypairs
kubernetes
oidc
ontext
openid
passwordless
plex
saml
scim
singed
slo
sso
totp
traefik
# https://github.com/codespell-project/codespell/issues/1224
upToDate
hass
warmup
webauthn
ontext
singed
assertIn

View File

@ -23,13 +23,8 @@ updates:
- package-ecosystem: npm
directories:
- "/web"
- "/web/packages/sfe"
- "/web/packages/core"
- "/web/packages/esbuild-plugin-live-reload"
- "/packages/prettier-config"
- "/packages/tsconfig"
- "/packages/docusaurus-config"
- "/packages/eslint-config"
- "/tests/wdio"
- "/web/sfe"
schedule:
interval: daily
time: "04:00"
@ -49,11 +44,9 @@ updates:
- "babel-*"
eslint:
patterns:
- "@eslint/*"
- "@typescript-eslint/*"
- "eslint-*"
- "eslint"
- "typescript-eslint"
- "eslint-*"
storybook:
patterns:
- "@storybook/*"
@ -61,22 +54,13 @@ updates:
esbuild:
patterns:
- "@esbuild/*"
- "esbuild*"
rollup:
patterns:
- "@rollup/*"
- "rollup-*"
- "rollup*"
swc:
patterns:
- "@swc/*"
- "swc-*"
wdio:
patterns:
- "@wdio/*"
goauthentik:
patterns:
- "@goauthentik/*"
- package-ecosystem: npm
directory: "/website"
schedule:
@ -91,33 +75,7 @@ updates:
docusaurus:
patterns:
- "@docusaurus/*"
build:
patterns:
- "@swc/*"
- "swc-*"
- "lightningcss*"
- "@rspack/binding*"
goauthentik:
patterns:
- "@goauthentik/*"
eslint:
patterns:
- "@eslint/*"
- "@typescript-eslint/*"
- "eslint-*"
- "eslint"
- "typescript-eslint"
- package-ecosystem: npm
directory: "/lifecycle/aws"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "lifecycle/aws:"
labels:
- dependencies
- package-ecosystem: uv
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
@ -137,15 +95,3 @@ updates:
prefix: "core:"
labels:
- dependencies
- package-ecosystem: docker-compose
directories:
# - /scripts # Maybe
- /tests/e2e
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies

View File

@ -1,7 +1,7 @@
<!--
👋 Hi there! Welcome.
Please check the Contributing guidelines: https://docs.goauthentik.io/docs/developer-docs/#how-can-i-contribute
Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute
-->
## Details

View File

@ -1,98 +0,0 @@
# Re-usable workflow for a single-architecture build
name: Single-arch Container build
on:
workflow_call:
inputs:
image_name:
required: true
type: string
image_arch:
required: true
type: string
runs-on:
required: true
type: string
registry_dockerhub:
default: false
type: boolean
registry_ghcr:
default: false
type: boolean
release:
default: false
type: boolean
outputs:
image-digest:
value: ${{ jobs.build.outputs.image-digest }}
jobs:
build:
name: Build ${{ inputs.image_arch }}
runs-on: ${{ inputs.runs-on }}
outputs:
image-digest: ${{ steps.push.outputs.digest }}
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
# Needed for checkout
contents: read
steps:
- uses: actions/checkout@v4
- uses: docker/setup-qemu-action@v3.6.0
- uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
image-arch: ${{ inputs.image_arch }}
release: ${{ inputs.release }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
if: ${{ inputs.release }}
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: generate ts client
if: ${{ !inputs.release }}
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@v6
id: push
with:
context: .
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
build-args: |
${{ steps.ev.outputs.imageBuildArgs }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/${{ inputs.image_arch }}
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
cache-to: ${{ steps.ev.outputs.cacheTo }}
- uses: actions/attest-build-provenance@v2
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

View File

@ -1,104 +0,0 @@
# Re-usable workflow for a multi-architecture build
name: Multi-arch container build
on:
workflow_call:
inputs:
image_name:
required: true
type: string
registry_dockerhub:
default: false
type: boolean
registry_ghcr:
default: true
type: boolean
release:
default: false
type: boolean
outputs: {}
jobs:
build-server-amd64:
uses: ./.github/workflows/_reusable-docker-build-single.yaml
secrets: inherit
with:
image_name: ${{ inputs.image_name }}
image_arch: amd64
runs-on: ubuntu-latest
registry_dockerhub: ${{ inputs.registry_dockerhub }}
registry_ghcr: ${{ inputs.registry_ghcr }}
release: ${{ inputs.release }}
build-server-arm64:
uses: ./.github/workflows/_reusable-docker-build-single.yaml
secrets: inherit
with:
image_name: ${{ inputs.image_name }}
image_arch: arm64
runs-on: ubuntu-22.04-arm
registry_dockerhub: ${{ inputs.registry_dockerhub }}
registry_ghcr: ${{ inputs.registry_ghcr }}
release: ${{ inputs.release }}
get-tags:
runs-on: ubuntu-latest
needs:
- build-server-amd64
- build-server-arm64
outputs:
tags: ${{ steps.ev.outputs.imageTagsJSON }}
shouldPush: ${{ steps.ev.outputs.shouldPush }}
steps:
- uses: actions/checkout@v4
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
merge-server:
runs-on: ubuntu-latest
if: ${{ needs.get-tags.outputs.shouldPush == 'true' }}
needs:
- get-tags
- build-server-amd64
- build-server-arm64
strategy:
fail-fast: false
matrix:
tag: ${{ fromJson(needs.get-tags.outputs.tags) }}
steps:
- uses: actions/checkout@v4
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@v2
id: build
with:
tags: ${{ matrix.tag }}
sources: |
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
- uses: actions/attest-build-provenance@v2
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.build.outputs.digest }}
push-to-registry: true

View File

@ -7,7 +7,6 @@ on:
workflow_dispatch:
jobs:
build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
permissions:
id-token: write
@ -30,6 +29,7 @@ jobs:
uses: actions/setup-python@v5
with:
python-version-file: "pyproject.toml"
cache: "poetry"
- name: Generate API Client
run: make gen-client-py
- name: Publish package

View File

@ -7,7 +7,6 @@ on:
workflow_dispatch:
jobs:
build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- id: generate_token
@ -41,7 +40,7 @@ jobs:
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- uses: peter-evans/create-pull-request@v7
- uses: peter-evans/create-pull-request@v6
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
@ -53,7 +52,6 @@ jobs:
signoff: true
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}

View File

@ -1,46 +0,0 @@
name: authentik-ci-aws-cfn
on:
push:
branches:
- main
- next
- version-*
pull_request:
branches:
- main
- version-*
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
check-changes-applied:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: actions/setup-node@v4
with:
node-version-file: lifecycle/aws/package.json
cache: "npm"
cache-dependency-path: lifecycle/aws/package-lock.json
- working-directory: lifecycle/aws/
run: |
npm ci
- name: Check changes have been applied
run: |
uv run make aws-cfn
git diff --exit-code
ci-aws-cfn-mark:
if: always()
needs:
- check-changes-applied
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}

View File

@ -1,29 +0,0 @@
---
name: authentik-ci-main-daily
on:
workflow_dispatch:
schedule:
# Every night at 3am
- cron: "0 3 * * *"
jobs:
test-container:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
version:
- docs
- version-2025-4
- version-2025-2
steps:
- uses: actions/checkout@v4
- run: |
current="$(pwd)"
dir="/tmp/authentik/${{ matrix.version }}"
mkdir -p $dir
cd $dir
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
${current}/scripts/test_docker.sh

View File

@ -34,7 +34,7 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run job
run: uv run make ci-${{ matrix.job }}
run: poetry run make ci-${{ matrix.job }}
test-migrations:
runs-on: ubuntu-latest
steps:
@ -42,34 +42,24 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run migrations
run: uv run python -m lifecycle.migrate
test-make-seed:
runs-on: ubuntu-latest
steps:
- id: seed
run: |
echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT"
outputs:
seed: ${{ steps.seed.outputs.seed }}
run: poetry run python -m lifecycle.migrate
test-migrations-from-stable:
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest
timeout-minutes: 20
needs: test-make-seed
strategy:
fail-fast: false
matrix:
psql:
- 15-alpine
- 16-alpine
- 17-alpine
run_id: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: checkout stable
run: |
# Delete all poetry envs
rm -rf /home/runner/.cache/pypoetry
# Copy current, latest config to local
cp authentik/lib/default.yml local.env.yml
cp -R .github ..
@ -82,7 +72,7 @@ jobs:
with:
postgresql_version: ${{ matrix.psql }}
- name: run migrations to stable
run: uv run python -m lifecycle.migrate
run: poetry run python -m lifecycle.migrate
- name: checkout current code
run: |
set -x
@ -90,35 +80,31 @@ jobs:
git reset --hard HEAD
git clean -d -fx .
git checkout $GITHUB_SHA
# Delete previous poetry env
rm -rf /home/runner/.cache/pypoetry/virtualenvs/*
- name: Setup authentik env (ensure latest deps are installed)
uses: ./.github/actions/setup
with:
postgresql_version: ${{ matrix.psql }}
- name: migrate to latest
run: |
uv run python -m lifecycle.migrate
poetry run python -m lifecycle.migrate
- name: run tests
env:
# Test in the main database that we just migrated from the previous stable version
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: |
uv run make ci-test
poetry run make test
test-unittest:
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
name: test-unittest - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest
timeout-minutes: 20
needs: test-make-seed
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
psql:
- 15-alpine
- 16-alpine
- 17-alpine
run_id: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- name: Setup authentik env
@ -126,23 +112,14 @@ jobs:
with:
postgresql_version: ${{ matrix.psql }}
- name: run unittest
env:
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: |
uv run make ci-test
poetry run make test
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: unit
token: ${{ secrets.CODECOV_TOKEN }}
- if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
flags: unit
file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }}
test-integration:
runs-on: ubuntu-latest
timeout-minutes: 30
@ -151,22 +128,16 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Create k8s Kind Cluster
uses: helm/kind-action@v1.12.0
uses: helm/kind-action@v1.10.0
- name: run integration
run: |
uv run coverage run manage.py test tests/integration
uv run coverage xml
poetry run coverage run manage.py test tests/integration
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: integration
token: ${{ secrets.CODECOV_TOKEN }}
- if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
flags: integration
file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }}
test-e2e:
name: test-e2e (${{ matrix.job.name }})
runs-on: ubuntu-latest
@ -197,12 +168,12 @@ jobs:
uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc)
run: |
docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull
docker compose -f tests/e2e/docker-compose.yml up -d
- id: cache-web
uses: actions/cache@v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web
@ -210,24 +181,16 @@ jobs:
npm ci
make -C .. gen-client-ts
npm run build
npm run build:sfe
- name: run e2e
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage xml
poetry run coverage run manage.py test ${{ matrix.job.glob }}
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: e2e
token: ${{ secrets.CODECOV_TOKEN }}
- if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
flags: e2e
file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }}
ci-core-mark:
if: always()
needs:
- lint
- test-migrations
@ -237,24 +200,70 @@ jobs:
- test-e2e
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
- run: echo mark
build:
strategy:
fail-fast: false
matrix:
arch:
- amd64
- arm64
needs: ci-core-mark
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
# Needed for checkout
contents: read
needs: ci-core-mark
uses: ./.github/workflows/_reusable-docker-build.yaml
secrets: inherit
with:
image_name: ${{ github.repository == 'goauthentik/authentik-internal' && 'ghcr.io/goauthentik/internal-server' || 'ghcr.io/goauthentik/dev-server' }}
release: false
timeout-minutes: 120
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/dev-server
image-arch: ${{ matrix.arch }}
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: generate ts client
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@v6
id: push
with:
context: .
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: ${{ steps.ev.outputs.imageTags }}
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache
cache-to: ${{ steps.ev.outputs.shouldBuild == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max' || '' }}
platforms: linux/${{ matrix.arch }}
- uses: actions/attest-build-provenance@v1
id: attest
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.imageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
pr-comment:
needs:
- build
@ -276,7 +285,7 @@ jobs:
with:
image-name: ghcr.io/goauthentik/dev-server
- name: Comment on PR
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
uses: ./.github/actions/comment-pr-instructions
with:
tag: ${{ steps.ev.outputs.imageMainTag }}

View File

@ -29,9 +29,9 @@ jobs:
- name: Generate API
run: make gen-client-go
- name: golangci-lint
uses: golangci/golangci-lint-action@v8
uses: golangci/golangci-lint-action@v6
with:
version: latest
version: v1.54.2
args: --timeout 5000s --verbose
skip-cache: true
test-unittest:
@ -49,17 +49,13 @@ jobs:
run: |
go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./...
ci-outpost-mark:
if: always()
needs:
- lint-golint
- test-unittest
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
- run: echo mark
build-container:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
timeout-minutes: 120
needs:
- ci-outpost-mark
@ -73,7 +69,7 @@ jobs:
- rac
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
@ -83,7 +79,7 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
@ -94,7 +90,7 @@ jobs:
with:
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
@ -108,18 +104,18 @@ jobs:
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: ${{ matrix.type }}.Dockerfile
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
platforms: linux/amd64,linux/arm64
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
- uses: actions/attest-build-provenance@v2
cache-to: ${{ steps.ev.outputs.shouldBuild == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
- uses: actions/attest-build-provenance@v1
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-name: ${{ steps.ev.outputs.imageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
build-binary:

View File

@ -24,11 +24,17 @@ jobs:
- prettier-check
project:
- web
- tests/wdio
include:
- command: tsc
project: web
- command: lit-analyse
project: web
exclude:
- command: lint:lockfile
project: tests/wdio
- command: tsc
project: tests/wdio
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
@ -39,12 +45,21 @@ jobs:
- working-directory: ${{ matrix.project }}/
run: |
npm ci
${{ matrix.extra_setup }}
- name: Generate API
run: make gen-client-ts
- name: Lint
working-directory: ${{ matrix.project }}/
run: npm run ${{ matrix.command }}
ci-web-mark:
needs:
- lint
runs-on: ubuntu-latest
steps:
- run: echo mark
build:
needs:
- ci-web-mark
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@ -60,16 +75,6 @@ jobs:
- name: build
working-directory: web/
run: npm run build
ci-web-mark:
if: always()
needs:
- build
- lint
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
test:
needs:
- ci-web-mark
@ -87,4 +92,4 @@ jobs:
run: make gen-client-ts
- name: test
working-directory: web/
run: npm run test || exit 0
run: npm run test

View File

@ -49,7 +49,6 @@ jobs:
matrix:
job:
- build
- build:integrations
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
@ -62,65 +61,11 @@ jobs:
- name: build
working-directory: website/
run: npm run ${{ matrix.job }}
build-container:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/dev-docs
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
platforms: linux/amd64,linux/arm64
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }}
- uses: actions/attest-build-provenance@v2
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
ci-website-mark:
if: always()
needs:
- lint
- test
- build
- build-container
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
allowed-skips: ${{ github.repository == 'goauthentik/authentik-internal' && 'build-container' || '[]' }}
- run: echo mark

View File

@ -2,7 +2,7 @@ name: "CodeQL"
on:
push:
branches: [main, next, version*]
branches: [main, "*", next, version*]
pull_request:
branches: [main]
schedule:

View File

@ -2,7 +2,7 @@ name: authentik-gen-update-webauthn-mds
on:
workflow_dispatch:
schedule:
- cron: "30 1 1,15 * *"
- cron: '30 1 1,15 * *'
env:
POSTGRES_DB: authentik
@ -11,7 +11,6 @@ env:
jobs:
build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- id: generate_token
@ -24,8 +23,8 @@ jobs:
token: ${{ steps.generate_token.outputs.token }}
- name: Setup authentik env
uses: ./.github/actions/setup
- run: uv run ak update_webauthn_mds
- uses: peter-evans/create-pull-request@v7
- run: poetry run ak update_webauthn_mds
- uses: peter-evans/create-pull-request@v6
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
@ -37,7 +36,6 @@ jobs:
signoff: true
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}

View File

@ -7,7 +7,6 @@ on:
jobs:
clean-ghcr:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
name: Delete old unused container images
runs-on: ubuntu-latest
steps:

View File

@ -42,7 +42,7 @@ jobs:
with:
githubToken: ${{ steps.generate_token.outputs.token }}
compressOnly: ${{ github.event_name != 'pull_request' }}
- uses: peter-evans/create-pull-request@v7
- uses: peter-evans/create-pull-request@v6
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
id: cpr
with:
@ -53,7 +53,6 @@ jobs:
body: ${{ steps.compress.outputs.markdown }}
delete-branch: true
signoff: true
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
with:

View File

@ -1,47 +0,0 @@
name: authentik-packages-npm-publish
on:
push:
branches: [main]
paths:
- packages/docusaurus-config/**
- packages/eslint-config/**
- packages/prettier-config/**
- packages/tsconfig/**
- web/packages/esbuild-plugin-live-reload/**
workflow_dispatch:
jobs:
publish:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
package:
- packages/docusaurus-config
- packages/eslint-config
- packages/prettier-config
- packages/tsconfig
- web/packages/esbuild-plugin-live-reload
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4
with:
node-version-file: ${{ matrix.package }}/package.json
registry-url: "https://registry.npmjs.org"
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
with:
files: |
${{ matrix.package }}/package.json
- name: Publish package
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ${{ matrix.package }}
run: |
npm ci
npm run build
npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}

View File

@ -12,7 +12,6 @@ env:
jobs:
publish-source-docs:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
@ -21,8 +20,8 @@ jobs:
uses: ./.github/actions/setup
- name: generate docs
run: |
uv run make migrate
uv run ak build_source_docs
poetry run make migrate
poetry run ak build_source_docs
- name: Publish
uses: netlify/actions/cli@master
with:

View File

@ -11,7 +11,6 @@ permissions:
jobs:
update-next:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
environment: internal-production
steps:

View File

@ -7,23 +7,9 @@ on:
jobs:
build-server:
uses: ./.github/workflows/_reusable-docker-build.yaml
secrets: inherit
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
with:
image_name: ghcr.io/goauthentik/server,beryju/authentik
release: true
registry_dockerhub: true
registry_ghcr: true
build-docs:
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
@ -31,7 +17,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
@ -40,33 +26,43 @@ jobs:
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/docs
image-name: ghcr.io/goauthentik/server,beryju/authentik
- name: Docker Login Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: Build Docker Image
id: push
uses: docker/build-push-action@v6
id: push
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
push: true
platforms: linux/amd64,linux/arm64
context: .
- uses: actions/attest-build-provenance@v2
push: true
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/amd64,linux/arm64
- uses: actions/attest-build-provenance@v1
id: attest
if: true
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-name: ${{ steps.ev.outputs.imageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
build-outpost:
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
@ -85,7 +81,7 @@ jobs:
with:
go-version-file: "go.mod"
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
@ -115,16 +111,14 @@ jobs:
id: push
with:
push: true
build-args: |
VERSION=${{ github.ref }}
tags: ${{ steps.ev.outputs.imageTags }}
file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
context: .
- uses: actions/attest-build-provenance@v2
- uses: actions/attest-build-provenance@v1
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-name: ${{ steps.ev.outputs.imageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
build-outpost-binary:
@ -171,27 +165,6 @@ jobs:
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
tag: ${{ github.ref }}
upload-aws-cfn-template:
permissions:
# Needed for AWS login
id-token: write
contents: read
needs:
- build-server
- build-outpost
env:
AWS_REGION: eu-central-1
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
aws-region: ${{ env.AWS_REGION }}
- name: Upload template
run: |
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
test-release:
needs:
- build-server
@ -229,13 +202,13 @@ jobs:
container=$(docker container create ${{ steps.ev.outputs.imageMainName }})
docker cp ${container}:web/ .
- name: Create a Sentry.io release
uses: getsentry/action-release@v3
uses: getsentry/action-release@v1
continue-on-error: true
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: authentik-security-inc
SENTRY_PROJECT: authentik
with:
release: authentik@${{ steps.ev.outputs.version }}
version: authentik@${{ steps.ev.outputs.version }}
sourcemaps: "./web/dist"
url_prefix: "~/static/dist"

View File

@ -14,7 +14,16 @@ jobs:
- uses: actions/checkout@v4
- name: Pre-release test
run: |
make test-docker
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
docker buildx install
mkdir -p ./gen-ts-api
docker build -t testing:latest .
echo "AUTHENTIK_IMAGE=testing" >> .env
echo "AUTHENTIK_TAG=latest" >> .env
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
- id: generate_token
uses: tibdex/github-app-token@v2
with:

View File

@ -1,21 +0,0 @@
name: "authentik-repo-mirror-cleanup"
on:
workflow_dispatch:
jobs:
to_internal:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- if: ${{ env.MIRROR_KEY != '' }}
uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb
with:
target_repo_url: git@github.com:goauthentik/authentik-internal.git
ssh_private_key: ${{ secrets.GH_MIRROR_KEY }}
args: --tags --force --prune
env:
MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }}

View File

@ -1,20 +0,0 @@
name: "authentik-repo-mirror"
on: [push, delete]
jobs:
to_internal:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- if: ${{ env.MIRROR_KEY != '' }}
uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb
with:
target_repo_url: git@github.com:goauthentik/authentik-internal.git
ssh_private_key: ${{ secrets.GH_MIRROR_KEY }}
args: --tags --force
env:
MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }}

View File

@ -1,8 +1,8 @@
name: "authentik-repo-stale"
name: 'authentik-repo-stale'
on:
schedule:
- cron: "30 1 * * *"
- cron: '30 1 * * *'
workflow_dispatch:
permissions:
@ -11,7 +11,6 @@ permissions:
jobs:
stale:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- id: generate_token
@ -25,7 +24,7 @@ jobs:
days-before-stale: 60
days-before-close: 7
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
stale-issue-label: status/stale
stale-issue-label: wontfix
stale-issue-message: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you

View File

@ -1,27 +0,0 @@
name: authentik-semgrep
on:
workflow_dispatch: {}
pull_request: {}
push:
branches:
- main
- master
paths:
- .github/workflows/semgrep.yml
schedule:
# random HH:MM to avoid a load spike on GitHub Actions at 00:00
- cron: '12 15 * * *'
jobs:
semgrep:
name: semgrep/ci
runs-on: ubuntu-latest
permissions:
contents: read
env:
SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }}
container:
image: semgrep/semgrep
if: (github.actor != 'dependabot[bot]')
steps:
- uses: actions/checkout@v4
- run: semgrep ci

View File

@ -1,13 +1,9 @@
---
name: authentik-translate-extract-compile
name: authentik-backend-translate-extract-compile
on:
schedule:
- cron: "0 0 * * *" # every day at midnight
workflow_dispatch:
pull_request:
branches:
- main
- version-*
env:
POSTGRES_DB: authentik
@ -16,35 +12,27 @@ env:
jobs:
compile:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- id: generate_token
if: ${{ github.event_name != 'pull_request' }}
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4
if: ${{ github.event_name != 'pull_request' }}
with:
token: ${{ steps.generate_token.outputs.token }}
- uses: actions/checkout@v4
if: ${{ github.event_name == 'pull_request' }}
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Generate API
run: make gen-client-ts
- name: run extract
run: |
uv run make i18n-extract
poetry run make i18n-extract
- name: run compile
run: |
uv run ak compilemessages
poetry run ak compilemessages
make web-check-compile
- name: Create Pull Request
if: ${{ github.event_name != 'pull_request' }}
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v6
with:
token: ${{ steps.generate_token.outputs.token }}
branch: extract-compile-backend-translation
@ -53,6 +41,3 @@ jobs:
body: "core, web: update translations"
delete-branch: true
signoff: true
labels: dependencies
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>

View File

@ -15,7 +15,6 @@ jobs:
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
steps:
- uses: actions/checkout@v4
- id: generate_token
uses: tibdex/github-app-token@v2
with:
@ -26,13 +25,23 @@ jobs:
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
title=$(gh pr view ${{ github.event.pull_request.number }} --json "title" -q ".title")
title=$(curl -q -L \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title)
echo "title=${title}" >> "$GITHUB_OUTPUT"
- name: Rename
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies
curl -L \
-X PATCH \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}

8
.gitignore vendored
View File

@ -11,10 +11,6 @@ local_settings.py
db.sqlite3
media
# Node
node_modules
# If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/
# in your Git repository. Update and uncomment the following line accordingly.
# <django-project-name>/staticfiles/
@ -37,7 +33,6 @@ eggs/
lib64/
parts/
dist/
out/
sdist/
var/
wheels/
@ -214,6 +209,3 @@ source_docs/
### Golang ###
/vendor/
### Docker ###
docker-compose.override.yml

View File

@ -1,47 +0,0 @@
# Prettier Ignorefile
## Static Files
**/LICENSE
authentik/stages/**/*
## Build asset directories
coverage
dist
out
.docusaurus
website/docs/developer-docs/api/**/*
## Environment
*.env
## Secrets
*.secrets
## Yarn
.yarn/**/*
## Node
node_modules
coverage
## Configs
*.log
*.yaml
*.yml
# Templates
# TODO: Rename affected files to *.template.* or similar.
*.html
*.mdx
*.md
## Import order matters
poly.ts
src/locale-codes.ts
src/locales/
# Storybook
storybook-static/
.storybook/css-import-maps*

View File

@ -2,7 +2,6 @@
"recommendations": [
"bashmish.es6-string-css",
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
"charliermarsh.ruff",
"dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig",
"esbenp.prettier-vscode",
@ -11,12 +10,12 @@
"Gruntfuggly.todo-tree",
"mechatroner.rainbow-csv",
"ms-python.black-formatter",
"ms-python.black-formatter",
"ms-python.debugpy",
"charliermarsh.ruff",
"ms-python.python",
"ms-python.vscode-pylance",
"ms-python.black-formatter",
"redhat.vscode-yaml",
"Tobermory.es6-string-html",
"unifiedjs.vscode-mdx",
"unifiedjs.vscode-mdx"
]
}

66
.vscode/launch.json vendored
View File

@ -2,76 +2,26 @@
"version": "0.2.0",
"configurations": [
{
"name": "Debug: Attach Server Core",
"type": "debugpy",
"name": "Python: PDB attach Server",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 9901
"port": 6800
},
"pathMappings": [
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}
],
"justMyCode": true,
"django": true
},
{
"name": "Debug: Attach Worker",
"type": "debugpy",
"name": "Python: PDB attach Worker",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 9901
"port": 6900
},
"pathMappings": [
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}
],
"justMyCode": true,
"django": true
},
{
"name": "Debug: Start Server Router",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/server",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start LDAP Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/ldap",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start Proxy Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/proxy",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start RAC Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/rac",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start Radius Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/radius",
"cwd": "${workspaceFolder}"
}
]
}

32
.vscode/settings.json vendored
View File

@ -1,4 +1,25 @@
{
"cSpell.words": [
"akadmin",
"asgi",
"authentik",
"authn",
"entra",
"goauthentik",
"jwks",
"kubernetes",
"oidc",
"openid",
"passwordless",
"plex",
"saml",
"scim",
"slo",
"sso",
"totp",
"traefik",
"webauthn"
],
"todo-tree.tree.showCountsInTree": true,
"todo-tree.tree.showBadges": true,
"yaml.customTags": [
@ -6,19 +27,16 @@
"!Context scalar",
"!Enumerate sequence",
"!Env scalar",
"!Env sequence",
"!Find sequence",
"!Format sequence",
"!If sequence",
"!Index scalar",
"!KeyOf scalar",
"!Value scalar",
"!AtIndex scalar",
"!ParseJSON scalar"
"!Value scalar"
],
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index",
"typescript.tsdk": "./node_modules/typescript/lib",
"typescript.tsdk": "./web/node_modules/typescript/lib",
"typescript.enablePromptUseWorkspaceTsdk": true,
"yaml.schemas": {
"./blueprints/schema.json": "blueprints/**/*.yaml"
@ -32,5 +50,7 @@
}
],
"go.testFlags": ["-count=1"],
"github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"]
"github-actions.workflows.pinned.workflows": [
".github/workflows/ci-main.yml"
]
}

46
.vscode/tasks.json vendored
View File

@ -3,13 +3,8 @@
"tasks": [
{
"label": "authentik/core: make",
"command": "uv",
"args": [
"run",
"make",
"lint-fix",
"lint"
],
"command": "poetry",
"args": ["run", "make", "lint-fix", "lint"],
"presentation": {
"panel": "new"
},
@ -17,12 +12,8 @@
},
{
"label": "authentik/core: run",
"command": "uv",
"args": [
"run",
"ak",
"server"
],
"command": "poetry",
"args": ["run", "ak", "server"],
"group": "build",
"presentation": {
"panel": "dedicated",
@ -32,17 +23,13 @@
{
"label": "authentik/web: make",
"command": "make",
"args": [
"web"
],
"args": ["web"],
"group": "build"
},
{
"label": "authentik/web: watch",
"command": "make",
"args": [
"web-watch"
],
"args": ["web-watch"],
"group": "build",
"presentation": {
"panel": "dedicated",
@ -52,26 +39,19 @@
{
"label": "authentik: install",
"command": "make",
"args": [
"install",
"-j4"
],
"args": ["install", "-j4"],
"group": "build"
},
{
"label": "authentik/website: make",
"command": "make",
"args": [
"website"
],
"args": ["website"],
"group": "build"
},
{
"label": "authentik/website: watch",
"command": "make",
"args": [
"website-watch"
],
"args": ["website-watch"],
"group": "build",
"presentation": {
"panel": "dedicated",
@ -80,12 +60,8 @@
},
{
"label": "authentik/api: generate",
"command": "uv",
"args": [
"run",
"make",
"gen"
],
"command": "poetry",
"args": ["run", "make", "gen"],
"group": "build"
}
]

View File

@ -10,30 +10,19 @@ schemas/ @goauthentik/backend
scripts/ @goauthentik/backend
tests/ @goauthentik/backend
pyproject.toml @goauthentik/backend
uv.lock @goauthentik/backend
poetry.lock @goauthentik/backend
go.mod @goauthentik/backend
go.sum @goauthentik/backend
# Infrastructure
.github/ @goauthentik/infrastructure
lifecycle/aws/ @goauthentik/infrastructure
Dockerfile @goauthentik/infrastructure
*Dockerfile @goauthentik/infrastructure
.dockerignore @goauthentik/infrastructure
docker-compose.yml @goauthentik/infrastructure
Makefile @goauthentik/infrastructure
.editorconfig @goauthentik/infrastructure
CODEOWNERS @goauthentik/infrastructure
# Web packages
packages/ @goauthentik/frontend
# Web
web/ @goauthentik/frontend
tests/wdio/ @goauthentik/frontend
# Locale
locale/ @goauthentik/backend @goauthentik/frontend
web/xliff/ @goauthentik/backend @goauthentik/frontend
# Docs & Website
website/ @goauthentik/docs
CODE_OF_CONDUCT.md @goauthentik/docs
# Security
SECURITY.md @goauthentik/security @goauthentik/docs
website/docs/security/ @goauthentik/security @goauthentik/docs
website/docs/security/ @goauthentik/security

View File

@ -5,7 +5,7 @@
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socioeconomic status,
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.

View File

@ -1 +1 @@
website/docs/developer-docs/index.md
website/developer-docs/index.md

View File

@ -1,7 +1,26 @@
# syntax=docker/dockerfile:1
# Stage 1: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/library/node:24-slim AS node-builder
# Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 as website-builder
ENV NODE_ENV=production
WORKDIR /work/website
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
--mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \
npm ci --include=dev
COPY ./website /work/website/
COPY ./blueprints /work/blueprints/
COPY ./schema.yml /work/
COPY ./SECURITY.md /work/
RUN npm run build-bundled
# Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 as web-builder
ARG GIT_BUILD_HASH
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
@ -13,7 +32,7 @@ RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
--mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \
--mount=type=bind,target=/work/web/scripts,src=./web/scripts \
--mount=type=cache,id=npm-ak,sharing=shared,target=/root/.npm \
--mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
npm ci --include=dev
COPY ./package.json /work
@ -21,11 +40,10 @@ COPY ./web /work/web/
COPY ./website /work/website/
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
RUN npm run build && \
npm run build:sfe
RUN npm run build
# Stage 2: Build go proxy
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder
# Stage 3: Build go proxy
FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.22-fips-bookworm AS go-builder
ARG TARGETOS
ARG TARGETARCH
@ -49,8 +67,8 @@ RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
COPY ./cmd /go/src/goauthentik.io/cmd
COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib
COPY ./web/static.go /go/src/goauthentik.io/web/static.go
COPY --from=node-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt
COPY --from=node-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt
COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt
COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt
COPY ./internal /go/src/goauthentik.io/internal
COPY ./go.mod /go/src/goauthentik.io/go.mod
COPY ./go.sum /go/src/goauthentik.io/go.sum
@ -58,96 +76,71 @@ COPY ./go.sum /go/src/goauthentik.io/go.sum
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \
CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \
CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \
go build -o /go/authentik ./cmd/server
# Stage 3: MaxMind GeoIP
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip
# Stage 4: MaxMind GeoIP
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 as geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
ENV GEOIPUPDATE_VERBOSE="1"
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
USER root
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
mkdir -p /usr/share/GeoIP && \
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 4: Download uv
FROM ghcr.io/astral-sh/uv:0.7.17 AS uv
# Stage 5: Base python image
FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base
# Stage 5: Python dependencies
FROM ghcr.io/goauthentik/fips-python:3.12.5-slim-bookworm-fips-full AS python-deps
ENV VENV_PATH="/ak-root/.venv" \
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
UV_COMPILE_BYTECODE=1 \
UV_LINK_MODE=copy \
UV_NATIVE_TLS=1 \
UV_PYTHON_DOWNLOADS=0
WORKDIR /ak-root/poetry
WORKDIR /ak-root/
COPY --from=uv /uv /uvx /bin/
# Stage 6: Python dependencies
FROM python-base AS python-deps
ARG TARGETARCH
ARG TARGETVARIANT
ENV VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false \
PATH="/ak-root/venv/bin:$PATH"
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
ENV PATH="/root/.cargo/bin:$PATH"
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
apt-get update && \
# Required for installing pip packages
apt-get install -y --no-install-recommends \
# Build essentials
build-essential pkg-config libffi-dev git \
# cryptography
curl \
# libxml
libxslt-dev zlib1g-dev \
# postgresql
libpq-dev \
# python-kadmin-rs
clang libkrb5-dev sccache \
# xmlsec
libltdl-dev && \
curl https://sh.rustup.rs -sSf | sh -s -- -y
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev
ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec"
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
--mount=type=cache,target=/root/.cache/pip \
--mount=type=cache,target=/root/.cache/pypoetry \
python -m venv /ak-root/venv/ && \
bash -c "source ${VENV_PATH}/bin/activate && \
pip3 install --upgrade pip && \
pip3 install poetry && \
poetry install --only=main --no-ansi --no-interaction --no-root && \
pip install --force-reinstall /wheels/*"
RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \
--mount=type=bind,target=uv.lock,src=uv.lock \
--mount=type=cache,target=/root/.cache/uv \
uv sync --frozen --no-install-project --no-dev
# Stage 6: Run
FROM ghcr.io/goauthentik/fips-python:3.12.5-slim-bookworm-fips-full AS final-image
# Stage 7: Run
FROM python-base AS final-image
ARG VERSION
ARG GIT_BUILD_HASH
ARG VERSION
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
LABEL org.opencontainers.image.url=https://goauthentik.io
LABEL org.opencontainers.image.description="goauthentik.io Main server image, see https://goauthentik.io for more info."
LABEL org.opencontainers.image.source=https://github.com/goauthentik/authentik
LABEL org.opencontainers.image.version=${VERSION}
LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH}
LABEL org.opencontainers.image.url https://goauthentik.io
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik
LABEL org.opencontainers.image.version ${VERSION}
LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH}
WORKDIR /
# We cannot cache this layer otherwise we'll end up with a bigger image
RUN apt-get update && \
apt-get upgrade -y && \
# Required for runtime
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates && \
# Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \
pip3 install --no-cache-dir --upgrade pip && \
apt-get clean && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
@ -158,18 +151,18 @@ RUN apt-get update && \
COPY ./authentik/ /authentik
COPY ./pyproject.toml /
COPY ./uv.lock /
COPY ./poetry.lock /
COPY ./schemas /schemas
COPY ./locale /locale
COPY ./tests /tests
COPY ./manage.py /
COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
COPY --from=go-builder /go/authentik /bin/authentik
COPY --from=python-deps /ak-root/.venv /ak-root/.venv
COPY --from=node-builder /work/web/dist/ /web/dist/
COPY --from=node-builder /work/web/authentik/ /web/authentik/
COPY --from=python-deps /ak-root/venv /ak-root/venv
COPY --from=web-builder /work/web/dist/ /web/dist/
COPY --from=web-builder /work/web/authentik/ /web/authentik/
COPY --from=website-builder /work/website/build/ /website/help/
COPY --from=geoip /usr/share/GeoIP /geoip
USER 1000
@ -177,7 +170,11 @@ USER 1000
ENV TMPDIR=/dev/shm/ \
PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
GOFIPS=1
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false
ENV GOFIPS=1
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]

146
Makefile
View File

@ -1,21 +1,35 @@
.PHONY: gen dev-reset all clean test web website
SHELL := /usr/bin/env bash
.SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail
.SHELLFLAGS += ${SHELLFLAGS} -e
PWD = $(shell pwd)
UID = $(shell id -u)
GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.generate_semver)
NPM_VERSION = $(shell python -m scripts.npm_version)
PY_SOURCES = authentik tests scripts lifecycle .github
DOCKER_IMAGE ?= "authentik:test"
GEN_API_TS = gen-ts-api
GEN_API_PY = gen-py-api
GEN_API_GO = gen-go-api
GEN_API_TS = "gen-ts-api"
GEN_API_PY = "gen-py-api"
GEN_API_GO = "gen-go-api"
pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null)
pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null)
pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null)
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
-I .github/codespell-words.txt \
-S 'web/src/locales/**' \
-S 'website/developer-docs/api/reference/**' \
authentik \
internal \
cmd \
web/src \
website/src \
website/blog \
website/developer-docs \
website/docs \
website/integrations \
website/src
all: lint-fix lint test gen web ## Lint, build, and test everything
@ -29,41 +43,44 @@ help: ## Show this help
sort
@echo ""
go-test:
test-go:
go test -timeout 0 -v -race -cover ./...
test-docker: ## Run all tests in a docker-compose
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
docker compose pull -q
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
rm -f .env
test: ## Run the server tests and produce a coverage report (locally)
uv run coverage run manage.py test --keepdb authentik
uv run coverage html
uv run coverage report
coverage run manage.py test --keepdb authentik
coverage html
coverage report
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
uv run black $(PY_SOURCES)
uv run ruff check --fix $(PY_SOURCES)
black $(PY_SOURCES)
ruff check --fix $(PY_SOURCES)
lint-codespell: ## Reports spelling errors.
uv run codespell -w
codespell -w $(CODESPELL_ARGS)
lint: ## Lint the python and golang sources
uv run bandit -c pyproject.toml -r $(PY_SOURCES)
bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules
golangci-lint run -v
core-install:
uv sync --frozen
poetry install
migrate: ## Run the Authentik Django server's migrations
uv run python -m lifecycle.migrate
python -m lifecycle.migrate
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
aws-cfn:
cd lifecycle/aws && npm run aws-cfn
run: ## Run the main authentik server process
uv run ak server
core-i18n-extract:
uv run ak makemessages \
ak makemessages \
--add-location file \
--no-obsolete \
--ignore web \
@ -86,10 +103,6 @@ dev-create-db:
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
update-test-mmdb: ## Update test GeoIP and ASN Databases
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb -o ${PWD}/tests/GeoLite2-ASN-Test.mmdb
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb -o ${PWD}/tests/GeoLite2-City-Test.mmdb
#########################
## API Schema
#########################
@ -98,11 +111,11 @@ gen-build: ## Extract the schema from the database
AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
uv run ak make_blueprint_schema --file blueprints/schema.json
ak make_blueprint_schema > blueprints/schema.json
AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
uv run ak spectacular --file schema.yml
ak spectacular --file schema.yml
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
@ -122,19 +135,14 @@ gen-diff: ## (Release) generate the changelog diff between the current schema a
npx prettier --write diff.md
gen-clean-ts: ## Remove generated API client for Typescript
rm -rf ${PWD}/${GEN_API_TS}/
rm -rf ${PWD}/web/node_modules/@goauthentik/api/
rm -rf ./${GEN_API_TS}/
rm -rf ./web/node_modules/@goauthentik/api/
gen-clean-go: ## Remove generated API client for Go
mkdir -p ${PWD}/${GEN_API_GO}
ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),)
make -C ${PWD}/${GEN_API_GO} clean
else
rm -rf ${PWD}/${GEN_API_GO}
endif
rm -rf ./${GEN_API_GO}/
gen-clean-py: ## Remove generated API client for Python
rm -rf ${PWD}/${GEN_API_PY}/
rm -rf ./${GEN_API_PY}/
gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients
@ -142,7 +150,7 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/${GEN_API_TS} \
@ -150,15 +158,15 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
--additional-properties=npmVersion=${NPM_VERSION} \
--git-repo-id authentik \
--git-user-id goauthentik
cd ${PWD}/${GEN_API_TS} && npm link
cd ${PWD}/web && npm link @goauthentik/api
mkdir -p web/node_modules/@goauthentik/api
cd ./${GEN_API_TS} && npm i
\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api
gen-client-py: gen-clean-py ## Build and install the authentik API for Python
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \
-i /local/schema.yml \
-g python \
-o /local/${GEN_API_PY} \
@ -166,20 +174,27 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python
--additional-properties=packageVersion=${NPM_VERSION} \
--git-repo-id authentik \
--git-user-id goauthentik
pip install ./${GEN_API_PY}
gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
mkdir -p ${PWD}/${GEN_API_GO}
ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),)
git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO}
else
cd ${PWD}/${GEN_API_GO} && git pull
endif
cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO}
make -C ${PWD}/${GEN_API_GO} build
mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache
cp schema.yml ./${GEN_API_GO}/
docker run \
--rm -v ${PWD}/${GEN_API_GO}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
-i /local/schema.yml \
-g go \
-o /local/ \
-c /local/config.yaml
go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
gen-dev-config: ## Generate a local development config file
uv run scripts/generate_config.py
python -m scripts.generate_config
gen: gen-build gen-client-ts
@ -195,9 +210,6 @@ web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting is
web-install: ## Install the necessary libraries to build the Authentik UI
cd web && npm ci
web-test: ## Run tests for the Authentik UI
cd web && npm run test
web-watch: ## Build and watch the Authentik UI for changes, updating automatically
rm -rf web/dist/
mkdir web/dist/
@ -246,9 +258,6 @@ docker: ## Build a docker image of the current source tree
mkdir -p ${GEN_API_TS}
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
test-docker:
BUILD=true ${PWD}/scripts/test_docker.sh
#########################
## CI
#########################
@ -260,21 +269,16 @@ ci--meta-debug:
node --version
ci-black: ci--meta-debug
uv run black --check $(PY_SOURCES)
black --check $(PY_SOURCES)
ci-ruff: ci--meta-debug
uv run ruff check $(PY_SOURCES)
ruff check $(PY_SOURCES)
ci-codespell: ci--meta-debug
uv run codespell -s
codespell $(CODESPELL_ARGS) -s
ci-bandit: ci--meta-debug
uv run bandit -r $(PY_SOURCES)
bandit -r $(PY_SOURCES)
ci-pending-migrations: ci--meta-debug
uv run ak makemigrations --check
ci-test: ci--meta-debug
uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
uv run coverage report
uv run coverage xml
ak makemigrations --check

View File

@ -15,9 +15,7 @@
## What is authentik?
authentik is an open-source Identity Provider that emphasizes flexibility and versatility, with support for a wide set of protocols.
Our [enterprise offer](https://goauthentik.io/pricing) can also be used as a self-hosted replacement for large-scale deployments of Okta/Auth0, Entra ID, Ping Identity, or other legacy IdPs for employees and B2B2C use.
authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them.
## Installation
@ -34,7 +32,7 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
## Development
See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github)
See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
## Security
@ -42,4 +40,4 @@ See [SECURITY.md](SECURITY.md)
## Adoption and Contributions
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github).
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).

View File

@ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di
## Independent audits and pentests
We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specific audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security).
In May/June of 2023 [Cure53](https://cure53.de) conducted an audit and pentest. The [results](https://cure53.de/pentest-report_authentik.pdf) are published on the [Cure53 website](https://cure53.de/#publications-2023). For more details about authentik's response to the findings of the audit refer to [2023-06 Cure53 Code audit](https://goauthentik.io/docs/security/2023-06-cure53).
## What authentik classifies as a CVE
@ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
(.x being the latest patch release for each version)
| Version | Supported |
| --------- | --------- |
| 2025.4.x | ✅ |
| 2025.6.x | ✅ |
| Version | Supported |
| -------- | --------- |
| 2024.4.x | ✅ |
| 2024.6.x | ✅ |
## Reporting a Vulnerability

View File

@ -2,7 +2,7 @@
from os import environ
__version__ = "2025.6.3"
__version__ = "2024.6.3"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
@ -16,5 +16,5 @@ def get_full_version() -> str:
"""Get full version, with build hash appended"""
version = __version__
if (build_hash := get_build_hash()) != "":
return f"{version}+{build_hash}"
version += "." + build_hash
return version

View File

@ -0,0 +1,79 @@
"""authentik administration metrics"""
from datetime import timedelta
from django.db.models.functions import ExtractHour
from drf_spectacular.utils import extend_schema, extend_schema_field
from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import IntegerField, SerializerMethodField
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.core.api.utils import PassiveSerializer
from authentik.events.models import EventAction
class CoordinateSerializer(PassiveSerializer):
"""Coordinates for diagrams"""
x_cord = IntegerField(read_only=True)
y_cord = IntegerField(read_only=True)
class LoginMetricsSerializer(PassiveSerializer):
"""Login Metrics per 1h"""
logins = SerializerMethodField()
logins_failed = SerializerMethodField()
authorizations = SerializerMethodField()
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins(self, _):
"""Get successful logins per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN_FAILED
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations(self, _):
"""Get successful authorizations per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
class AdministrationMetricsViewSet(APIView):
"""Login Metrics per 1h"""
permission_classes = [IsAuthenticated]
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
def get(self, request: Request) -> Response:
"""Login Metrics per 1h"""
serializer = LoginMetricsSerializer(True)
serializer.context["user"] = request.user
return Response(serializer.data)

View File

@ -7,9 +7,7 @@ from sys import version as python_version
from typing import TypedDict
from cryptography.hazmat.backends.openssl.backend import backend
from django.conf import settings
from django.utils.timezone import now
from django.views.debug import SafeExceptionReporterFilter
from drf_spectacular.utils import extend_schema
from rest_framework.fields import SerializerMethodField
from rest_framework.request import Request
@ -54,16 +52,10 @@ class SystemInfoSerializer(PassiveSerializer):
def get_http_headers(self, request: Request) -> dict[str, str]:
"""Get HTTP Request headers"""
headers = {}
raw_session = request._request.COOKIES.get(settings.SESSION_COOKIE_NAME)
for key, value in request.META.items():
if not isinstance(value, str):
continue
actual_value = value
if raw_session is not None and raw_session in actual_value:
actual_value = actual_value.replace(
raw_session, SafeExceptionReporterFilter.cleansed_substitute
)
headers[key] = actual_value
headers[key] = value
return headers
def get_http_host(self, request: Request) -> str:

View File

@ -1,7 +1,6 @@
"""authentik administration overview"""
from django.core.cache import cache
from django_tenants.utils import get_public_schema_name
from drf_spectacular.utils import extend_schema
from packaging.version import parse
from rest_framework.fields import SerializerMethodField
@ -13,8 +12,6 @@ from rest_framework.views import APIView
from authentik import __version__, get_build_hash
from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version
from authentik.core.api.utils import PassiveSerializer
from authentik.outposts.models import Outpost
from authentik.tenants.utils import get_current_tenant
class VersionSerializer(PassiveSerializer):
@ -25,7 +22,6 @@ class VersionSerializer(PassiveSerializer):
version_latest_valid = SerializerMethodField()
build_hash = SerializerMethodField()
outdated = SerializerMethodField()
outpost_outdated = SerializerMethodField()
def get_build_hash(self, _) -> str:
"""Get build hash, if version is not latest or released"""
@ -37,8 +33,6 @@ class VersionSerializer(PassiveSerializer):
def get_version_latest(self, _) -> str:
"""Get latest version from cache"""
if get_current_tenant().schema_name == get_public_schema_name():
return __version__
version_in_cache = cache.get(VERSION_CACHE_KEY)
if not version_in_cache: # pragma: no cover
update_latest_version.delay()
@ -53,15 +47,6 @@ class VersionSerializer(PassiveSerializer):
"""Check if we're running the latest version"""
return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance))
def get_outpost_outdated(self, _) -> bool:
"""Check if any outpost is outdated/has a version mismatch"""
any_outdated = False
for outpost in Outpost.objects.all():
for state in outpost.state:
if state.version_outdated:
any_outdated = True
return any_outdated
class VersionView(APIView):
"""Get running and latest version."""

View File

@ -1,33 +0,0 @@
from rest_framework.permissions import IsAdminUser
from rest_framework.viewsets import ReadOnlyModelViewSet
from authentik.admin.models import VersionHistory
from authentik.core.api.utils import ModelSerializer
class VersionHistorySerializer(ModelSerializer):
"""VersionHistory Serializer"""
class Meta:
model = VersionHistory
fields = [
"id",
"timestamp",
"version",
"build",
]
class VersionHistoryViewSet(ReadOnlyModelViewSet):
"""VersionHistory Viewset"""
queryset = VersionHistory.objects.all()
serializer_class = VersionHistorySerializer
permission_classes = [IsAdminUser]
filterset_fields = [
"version",
"build",
]
search_fields = ["version", "build"]
ordering = ["-timestamp"]
pagination_class = None

View File

@ -1,16 +1,12 @@
"""authentik administration overview"""
from socket import gethostname
from django.conf import settings
from drf_spectacular.utils import extend_schema, inline_serializer
from packaging.version import parse
from rest_framework.fields import BooleanField, CharField
from rest_framework.fields import IntegerField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik import get_full_version
from authentik.rbac.permissions import HasPermission
from authentik.root.celery import CELERY_APP
@ -20,38 +16,11 @@ class WorkerView(APIView):
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
@extend_schema(
responses=inline_serializer(
"Worker",
fields={
"worker_id": CharField(),
"version": CharField(),
"version_matching": BooleanField(),
},
many=True,
)
)
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
def get(self, request: Request) -> Response:
"""Get currently connected worker count."""
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
our_version = parse(get_full_version())
response = []
for worker in raw:
key = list(worker.keys())[0]
version = worker[key].get("version")
version_matching = False
if version:
version_matching = parse(version) == our_version
response.append(
{"worker_id": key, "version": version, "version_matching": version_matching}
)
count = len(CELERY_APP.control.ping(timeout=0.5))
# In debug we run with `task_always_eager`, so tasks are ran on the main process
if settings.DEBUG: # pragma: no cover
response.append(
{
"worker_id": f"authentik-debug@{gethostname()}",
"version": get_full_version(),
"version_matching": True,
}
)
return Response(response)
count += 1
return Response({"count": count})

View File

@ -1,10 +1,11 @@
"""authentik admin app config"""
from prometheus_client import Info
from prometheus_client import Gauge, Info
from authentik.blueprints.apps import ManagedAppConfig
PROM_INFO = Info("authentik_version", "Currently running authentik version")
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
class AuthentikAdminConfig(ManagedAppConfig):
@ -14,19 +15,3 @@ class AuthentikAdminConfig(ManagedAppConfig):
label = "authentik_admin"
verbose_name = "authentik Admin"
default = True
@ManagedAppConfig.reconcile_global
def clear_update_notifications(self):
"""Clear update notifications on startup if the notification was for the version
we're running now."""
from packaging.version import parse
from authentik.admin.tasks import LOCAL_VERSION
from authentik.events.models import EventAction, Notification
for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE):
if "new_version" not in notification.event.context:
continue
notification_version = notification.event.context["new_version"]
if LOCAL_VERSION >= parse(notification_version):
notification.delete()

View File

@ -1,22 +0,0 @@
"""authentik admin models"""
from django.db import models
from django.utils.translation import gettext_lazy as _
class VersionHistory(models.Model):
id = models.BigAutoField(primary_key=True)
timestamp = models.DateTimeField()
version = models.TextField()
build = models.TextField()
class Meta:
managed = False
db_table = "authentik_version_history"
ordering = ("-timestamp",)
verbose_name = _("Version history")
verbose_name_plural = _("Version history")
default_permissions = []
def __str__(self):
return f"{self.version}.{self.build} ({self.timestamp})"

View File

@ -1,7 +1,6 @@
"""authentik admin settings"""
from celery.schedules import crontab
from django_tenants.utils import get_public_schema_name
from authentik.lib.utils.time import fqdn_rand
@ -9,7 +8,6 @@ CELERY_BEAT_SCHEDULE = {
"admin_latest_version": {
"task": "authentik.admin.tasks.update_latest_version",
"schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"),
"tenant_schemas": [get_public_schema_name()],
"options": {"queue": "authentik_scheduled"},
}
}

View File

@ -1,35 +1,14 @@
"""admin signals"""
from django.dispatch import receiver
from packaging.version import parse
from prometheus_client import Gauge
from authentik import get_full_version
from authentik.admin.apps import GAUGE_WORKERS
from authentik.root.celery import CELERY_APP
from authentik.root.monitoring import monitoring_set
GAUGE_WORKERS = Gauge(
"authentik_admin_workers",
"Currently connected workers, their versions and if they are the same version as authentik",
["version", "version_matched"],
)
_version = parse(get_full_version())
@receiver(monitoring_set)
def monitoring_set_workers(sender, **kwargs):
"""Set worker gauge"""
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
worker_version_count = {}
for worker in raw:
key = list(worker.keys())[0]
version = worker[key].get("version")
version_matching = False
if version:
version_matching = parse(version) == _version
worker_version_count.setdefault(version, {"count": 0, "matching": version_matching})
worker_version_count[version]["count"] += 1
for version, stats in worker_version_count.items():
GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"])
count = len(CELERY_APP.control.ping(timeout=0.5))
GAUGE_WORKERS.set(count)

View File

@ -1,14 +1,17 @@
"""authentik admin tasks"""
import re
from django.core.cache import cache
from django.utils.translation import gettext_lazy as _
from django.core.validators import URLValidator
from django.db import DatabaseError, InternalError, ProgrammingError
from packaging.version import parse
from requests import RequestException
from structlog.stdlib import get_logger
from authentik import __version__, get_build_hash
from authentik.admin.apps import PROM_INFO
from authentik.events.models import Event, EventAction
from authentik.events.models import Event, EventAction, Notification
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
from authentik.lib.config import CONFIG
from authentik.lib.utils.http import get_http_session
@ -18,6 +21,8 @@ LOGGER = get_logger()
VERSION_NULL = "0.0.0"
VERSION_CACHE_KEY = "authentik_latest_version"
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
# Chop of the first ^ because we want to search the entire string
URL_FINDER = URLValidator.regex.pattern[1:]
LOCAL_VERSION = parse(__version__)
@ -32,6 +37,20 @@ def _set_prom_info():
)
@CELERY_APP.task(
throws=(DatabaseError, ProgrammingError, InternalError),
)
def clear_update_notifications():
"""Clear update notifications on startup if the notification was for the version
we're running now."""
for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE):
if "new_version" not in notification.event.context:
continue
notification_version = notification.event.context["new_version"]
if LOCAL_VERSION >= parse(notification_version):
notification.delete()
@CELERY_APP.task(bind=True, base=SystemTask)
@prefill_task
def update_latest_version(self: SystemTask):
@ -59,16 +78,10 @@ def update_latest_version(self: SystemTask):
context__new_version=upstream_version,
).exists():
return
Event.new(
EventAction.UPDATE_AVAILABLE,
message=_(
"New version {version} available!".format(
version=upstream_version,
)
),
new_version=upstream_version,
changelog=data.get("stable", {}).get("changelog_url"),
).save()
event_dict = {"new_version": upstream_version}
if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")):
event_dict["message"] = f"Changelog: {match.group()}"
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
except (RequestException, IndexError) as exc:
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
self.set_error(exc)

View File

@ -34,7 +34,12 @@ class TestAdminAPI(TestCase):
response = self.client.get(reverse("authentik_api:admin_workers"))
self.assertEqual(response.status_code, 200)
body = loads(response.content)
self.assertEqual(len(body), 0)
self.assertEqual(body["count"], 0)
def test_metrics(self):
"""Test metrics API"""
response = self.client.get(reverse("authentik_api:admin_metrics"))
self.assertEqual(response.status_code, 200)
def test_apps(self):
"""Test apps API"""

View File

@ -1,12 +1,12 @@
"""test admin tasks"""
from django.apps import apps
from django.core.cache import cache
from django.test import TestCase
from requests_mock import Mocker
from authentik.admin.tasks import (
VERSION_CACHE_KEY,
clear_update_notifications,
update_latest_version,
)
from authentik.events.models import Event, EventAction
@ -17,7 +17,6 @@ RESPONSE_VALID = {
"stable": {
"version": "99999999.9999999",
"changelog": "See https://goauthentik.io/test",
"changelog_url": "https://goauthentik.io/test",
"reason": "bugfix",
},
}
@ -36,7 +35,7 @@ class TestAdminTasks(TestCase):
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999",
context__message="New version 99999999.9999999 available!",
context__message="Changelog: https://goauthentik.io/test",
).exists()
)
# test that a consecutive check doesn't create a duplicate event
@ -46,7 +45,7 @@ class TestAdminTasks(TestCase):
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999",
context__message="New version 99999999.9999999 available!",
context__message="Changelog: https://goauthentik.io/test",
)
),
1,
@ -72,13 +71,12 @@ class TestAdminTasks(TestCase):
def test_clear_update_notifications(self):
"""Test clear of previous notification"""
admin_config = apps.get_app_config("authentik_admin")
Event.objects.create(
action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"}
)
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"})
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={})
admin_config.clear_update_notifications()
clear_update_notifications()
self.assertFalse(
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1"

View File

@ -3,16 +3,20 @@
from django.urls import path
from authentik.admin.api.meta import AppsViewSet, ModelViewSet
from authentik.admin.api.metrics import AdministrationMetricsViewSet
from authentik.admin.api.system import SystemView
from authentik.admin.api.version import VersionView
from authentik.admin.api.version_history import VersionHistoryViewSet
from authentik.admin.api.workers import WorkerView
api_urlpatterns = [
("admin/apps", AppsViewSet, "apps"),
("admin/models", ModelViewSet, "models"),
path(
"admin/metrics/",
AdministrationMetricsViewSet.as_view(),
name="admin_metrics",
),
path("admin/version/", VersionView.as_view(), name="admin_version"),
("admin/version/history", VersionHistoryViewSet, "version_history"),
path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
path("admin/system/", SystemView.as_view(), name="admin_system"),
]

View File

@ -1,13 +1,12 @@
"""authentik API AppConfig"""
from authentik.blueprints.apps import ManagedAppConfig
from django.apps import AppConfig
class AuthentikAPIConfig(ManagedAppConfig):
class AuthentikAPIConfig(AppConfig):
"""authentik API Config"""
name = "authentik.api"
label = "authentik_api"
mountpoint = "api/"
verbose_name = "authentik API"
default = True

View File

@ -1,12 +1,9 @@
"""API Authentication"""
from hmac import compare_digest
from pathlib import Path
from tempfile import gettempdir
from typing import Any
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from drf_spectacular.extensions import OpenApiAuthenticationExtension
from rest_framework.authentication import BaseAuthentication, get_authorization_header
from rest_framework.exceptions import AuthenticationFailed
@ -14,17 +11,11 @@ from rest_framework.request import Request
from structlog.stdlib import get_logger
from authentik.core.middleware import CTX_AUTH_VIA
from authentik.core.models import Token, TokenIntents, User, UserTypes
from authentik.core.models import Token, TokenIntents, User
from authentik.outposts.models import Outpost
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
LOGGER = get_logger()
_tmp = Path(gettempdir())
try:
with open(_tmp / "authentik-core-ipc.key") as _f:
ipc_key = _f.read()
except OSError:
ipc_key = None
def validate_auth(header: bytes) -> str | None:
@ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None:
if user:
CTX_AUTH_VIA.set("secret_key")
return user
# then try to auth via secret key (for embedded outpost/etc)
user = token_ipc(auth_credentials)
if user:
CTX_AUTH_VIA.set("ipc")
return user
raise AuthenticationFailed("Token invalid/expired")
@ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None:
return outpost.user
class IPCUser(AnonymousUser):
"""'Virtual' user for IPC communication between authentik core and the authentik router"""
username = "authentik:system"
is_active = True
is_superuser = True
@property
def type(self):
return UserTypes.INTERNAL_SERVICE_ACCOUNT
def has_perm(self, perm, obj=None):
return True
def has_perms(self, perm_list, obj=None):
return True
def has_module_perms(self, module):
return True
@property
def is_anonymous(self):
return False
@property
def is_authenticated(self):
return True
def token_ipc(value: str) -> User | None:
"""Check if the token is the secret key
and return the service account for the managed outpost"""
if not ipc_key or not compare_digest(value, ipc_key):
return None
return IPCUser()
class TokenAuthentication(BaseAuthentication):
"""Token-based authentication using HTTP Bearer authentication"""

View File

@ -0,0 +1,67 @@
"""API Authorization"""
from django.conf import settings
from django.db.models import Model
from django.db.models.query import QuerySet
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.authentication import get_authorization_header
from rest_framework.filters import BaseFilterBackend
from rest_framework.permissions import BasePermission
from rest_framework.request import Request
from authentik.api.authentication import validate_auth
from authentik.rbac.filters import ObjectFilter
class OwnerFilter(BaseFilterBackend):
"""Filter objects by their owner"""
owner_key = "user"
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
if request.user.is_superuser:
return queryset
return queryset.filter(**{self.owner_key: request.user})
class SecretKeyFilter(DjangoFilterBackend):
"""Allow access to all objects when authenticated with secret key as token.
Replaces both DjangoFilterBackend and ObjectFilter"""
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
auth_header = get_authorization_header(request)
token = validate_auth(auth_header)
if token and token == settings.SECRET_KEY:
return queryset
queryset = ObjectFilter().filter_queryset(request, queryset, view)
return super().filter_queryset(request, queryset, view)
class OwnerPermissions(BasePermission):
"""Authorize requests by an object's owner matching the requesting user"""
owner_key = "user"
def has_permission(self, request: Request, view) -> bool:
"""If the user is authenticated, we allow all requests here. For listing, the
object-level permissions are done by the filter backend"""
return request.user.is_authenticated
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
"""Check if the object's owner matches the currently logged in user"""
if not hasattr(obj, self.owner_key):
return False
owner = getattr(obj, self.owner_key)
if owner != request.user:
return False
return True
class OwnerSuperuserPermissions(OwnerPermissions):
"""Similar to OwnerPermissions, except always allow access for superusers"""
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
if request.user.is_superuser:
return True
return super().has_object_permission(request, view, obj)

View File

@ -54,7 +54,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom
return component
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): # noqa: W0613
"""Workaround to set a default response for endpoints.
Workaround suggested at
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>

View File

@ -7,7 +7,7 @@ API Browser - {{ brand.branding_title }}
{% endblock %}
{% block head %}
<script src="{% versioned_script 'dist/standalone/api-browser/index-%v.js' %}" type="module"></script>
{% versioned_script "dist/standalone/api-browser/index-%v.js" %}
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
{% endblock %}

View File

@ -7,7 +7,7 @@ from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, DateTimeField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.serializers import ListSerializer, ModelSerializer
from rest_framework.viewsets import ModelViewSet
from authentik.blueprints.models import BlueprintInstance
@ -15,7 +15,7 @@ from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.oci import OCI_PREFIX
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
from authentik.core.api.utils import JSONDictField, PassiveSerializer
from authentik.rbac.decorators import permission_required
@ -51,11 +51,9 @@ class BlueprintInstanceSerializer(ModelSerializer):
context = self.instance.context if self.instance else {}
valid, logs = Importer.from_string(content, context).validate()
if not valid:
text_logs = "\n".join([x["event"] for x in logs])
raise ValidationError(
[
_("Failed to validate blueprint"),
*[f"- {x.event}" for x in logs],
]
_("Failed to validate blueprint: {logs}".format_map({"logs": text_logs}))
)
return content

View File

@ -1,68 +0,0 @@
"""Test and debug Blueprints"""
import atexit
import readline
from pathlib import Path
from pprint import pformat
from sys import exit as sysexit
from textwrap import indent
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from yaml import load
from authentik.blueprints.v1.common import BlueprintLoader, EntryInvalidError
from authentik.core.management.commands.shell import get_banner_text
from authentik.lib.utils.errors import exception_to_string
LOGGER = get_logger()
class Command(BaseCommand):
"""Test and debug Blueprints"""
lines = []
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
histfolder = Path("~").expanduser() / Path(".local/share/authentik")
histfolder.mkdir(parents=True, exist_ok=True)
histfile = histfolder / Path("blueprint_shell_history")
readline.parse_and_bind("tab: complete")
readline.parse_and_bind("set editing-mode vi")
try:
readline.read_history_file(str(histfile))
except FileNotFoundError:
pass
atexit.register(readline.write_history_file, str(histfile))
@no_translations
def handle(self, *args, **options):
"""Interactively debug blueprint files"""
self.stdout.write(get_banner_text("Blueprint shell"))
self.stdout.write("Type '.eval' to evaluate previously entered statement(s).")
def do_eval():
yaml_input = "\n".join([line for line in self.lines if line])
data = load(yaml_input, BlueprintLoader)
self.stdout.write(pformat(data))
self.lines = []
while True:
try:
line = input("> ")
if line == ".eval":
do_eval()
else:
self.lines.append(line)
except EntryInvalidError as exc:
self.stdout.write("Failed to evaluate expression:")
self.stdout.write(indent(exception_to_string(exc), prefix=" "))
except EOFError:
break
except KeyboardInterrupt:
self.stdout.write()
sysexit(0)
self.stdout.write()

View File

@ -72,33 +72,20 @@ class Command(BaseCommand):
"additionalProperties": True,
},
"entries": {
"anyOf": [
{
"type": "array",
"items": {"$ref": "#/$defs/blueprint_entry"},
},
{
"type": "object",
"additionalProperties": {
"type": "array",
"items": {"$ref": "#/$defs/blueprint_entry"},
},
},
],
"type": "array",
"items": {
"oneOf": [],
},
},
},
"$defs": {"blueprint_entry": {"oneOf": []}},
"$defs": {},
}
def add_arguments(self, parser):
parser.add_argument("--file", type=str)
@no_translations
def handle(self, *args, file: str, **options):
def handle(self, *args, **options):
"""Generate JSON Schema for blueprints"""
self.build()
with open(file, "w") as _schema:
_schema.write(dumps(self.schema, indent=4, default=Command.json_default))
self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default))
@staticmethod
def json_default(value: Any) -> Any:
@ -125,7 +112,7 @@ class Command(BaseCommand):
}
)
model_path = f"{model._meta.app_label}.{model._meta.model_name}"
self.schema["$defs"]["blueprint_entry"]["oneOf"].append(
self.schema["properties"]["entries"]["items"]["oneOf"].append(
self.template_entry(model_path, model, serializer)
)
@ -139,7 +126,7 @@ class Command(BaseCommand):
def_name_perm = f"model_{model_path}_permissions"
def_path_perm = f"#/$defs/{def_name_perm}"
self.schema["$defs"][def_name_perm] = self.model_permissions(model)
template = {
return {
"type": "object",
"required": ["model", "identifiers"],
"properties": {
@ -147,7 +134,7 @@ class Command(BaseCommand):
"id": {"type": "string"},
"state": {
"type": "string",
"enum": sorted([s.value for s in BlueprintEntryDesiredState]),
"enum": [s.value for s in BlueprintEntryDesiredState],
"default": "present",
},
"conditions": {"type": "array", "items": {"type": "boolean"}},
@ -156,11 +143,6 @@ class Command(BaseCommand):
"identifiers": {"$ref": def_path},
},
}
# Meta models don't require identifiers, as there's no matching database model to find
if issubclass(model, BaseMetaModel):
del template["properties"]["identifiers"]
template["required"].remove("identifiers")
return template
def field_to_jsonschema(self, field: Field) -> dict:
"""Convert a single field to json schema"""
@ -218,7 +200,7 @@ class Command(BaseCommand):
"type": "object",
"required": ["permission"],
"properties": {
"permission": {"type": "string", "enum": sorted(perms)},
"permission": {"type": "string", "enum": perms},
"user": {"type": "integer"},
"role": {"type": "string"},
},

View File

@ -29,7 +29,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
if version != 1:
return
blueprint_file.seek(0)
instance = BlueprintInstance.objects.using(db_alias).filter(path=path).first()
instance: BlueprintInstance = (
BlueprintInstance.objects.using(db_alias).filter(path=path).first()
)
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
meta = None
if metadata:

View File

@ -1,11 +1,10 @@
version: 1
entries:
foo:
- identifiers:
name: "%(id)s"
slug: "%(id)s"
model: authentik_flows.flow
state: present
attrs:
designation: stage_configuration
title: foo
- identifiers:
name: "%(id)s"
slug: "%(id)s"
model: authentik_flows.flow
state: present
attrs:
designation: stage_configuration
title: foo

View File

@ -37,7 +37,6 @@ entries:
- attrs:
attributes:
env_null: !Env [bar-baz, null]
json_parse: !ParseJSON '{"foo": "bar"}'
policy_pk1:
!Format [
"%s-%s",
@ -147,10 +146,6 @@ entries:
]
]
nested_context: !Context context2
at_index_sequence: !AtIndex [!Context sequence, 0]
at_index_sequence_default: !AtIndex [!Context sequence, 100, "non existent"]
at_index_mapping: !AtIndex [!Context mapping, "key2"]
at_index_mapping_default: !AtIndex [!Context mapping, "invalid", "non existent"]
identifiers:
name: test
conditions:

View File

@ -1,14 +0,0 @@
from django.test import TestCase
from authentik.blueprints.apps import ManagedAppConfig
from authentik.enterprise.apps import EnterpriseConfig
from authentik.lib.utils.reflection import get_apps
class TestManagedAppConfig(TestCase):
def test_apps_use_managed_app_config(self):
for app in get_apps():
if app.name.startswith("authentik.enterprise"):
self.assertIn(EnterpriseConfig, app.__class__.__bases__)
else:
self.assertIn(ManagedAppConfig, app.__class__.__bases__)

View File

@ -27,14 +27,13 @@ def blueprint_tester(file_name: Path) -> Callable:
base = Path("blueprints/")
rel_path = Path(file_name).relative_to(base)
importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve())
validation, logs = importer.validate()
self.assertTrue(validation, logs)
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
return tester
for blueprint_file in Path("blueprints/").glob("**/*.yaml"):
if "local" in str(blueprint_file) or "testing" in str(blueprint_file):
if "local" in str(blueprint_file):
continue
setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file))

View File

@ -5,6 +5,7 @@ from collections.abc import Callable
from django.apps import apps
from django.test import TestCase
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.lib.models import SerializerModel
from authentik.providers.oauth2.models import RefreshToken
@ -21,13 +22,10 @@ def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable:
return
model_class = test_model()
self.assertTrue(isinstance(model_class, SerializerModel))
# Models that have subclasses don't have to have a serializer
if len(test_model.__subclasses__()) > 0:
return
self.assertIsNotNone(model_class.serializer)
if model_class.serializer.Meta().model == RefreshToken:
return
self.assertTrue(issubclass(test_model, model_class.serializer.Meta().model))
self.assertEqual(model_class.serializer.Meta().model, test_model)
return tester
@ -36,6 +34,6 @@ for app in apps.get_app_configs():
if not app.label.startswith("authentik"):
continue
for model in app.get_models():
if not issubclass(model, SerializerModel):
if not is_model_allowed(model):
continue
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))

View File

@ -215,11 +215,6 @@ class TestBlueprintsV1(TransactionTestCase):
},
"nested_context": "context-nested-value",
"env_null": None,
"json_parse": {"foo": "bar"},
"at_index_sequence": "foo",
"at_index_sequence_default": "non existent",
"at_index_mapping": 2,
"at_index_mapping_default": "non existent",
}
).exists()
)

View File

@ -78,5 +78,5 @@ class TestBlueprintsV1API(APITestCase):
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(
res.content.decode(),
{"content": ["Failed to validate blueprint", "- Invalid blueprint version"]},
{"content": ["Failed to validate blueprint: Invalid blueprint version"]},
)

View File

@ -6,7 +6,6 @@ from copy import copy
from dataclasses import asdict, dataclass, field, is_dataclass
from enum import Enum
from functools import reduce
from json import JSONDecodeError, loads
from operator import ixor
from os import getenv
from typing import Any, Literal, Union
@ -25,10 +24,6 @@ from authentik.lib.sentry import SentryIgnoredException
from authentik.policies.models import PolicyBindingModel
class UNSET:
"""Used to test whether a key has not been set."""
def get_attrs(obj: SerializerModel) -> dict[str, Any]:
"""Get object's attributes via their serializer, and convert it to a normal dict"""
serializer: Serializer = obj.serializer(obj)
@ -165,7 +160,9 @@ class BlueprintEntry:
"""Get the blueprint model, with yaml tags resolved if present"""
return str(self.tag_resolver(self.model, blueprint))
def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]:
def get_permissions(
self, blueprint: "Blueprint"
) -> Generator[BlueprintEntryPermission, None, None]:
"""Get permissions of this entry, with all yaml tags resolved"""
for perm in self.permissions:
yield BlueprintEntryPermission(
@ -192,25 +189,15 @@ class Blueprint:
"""Dataclass used for a full export"""
version: int = field(default=1)
entries: list[BlueprintEntry] | dict[str, list[BlueprintEntry]] = field(default_factory=list)
entries: list[BlueprintEntry] = field(default_factory=list)
context: dict = field(default_factory=dict)
metadata: BlueprintMetadata | None = field(default=None)
def iter_entries(self) -> Iterable[BlueprintEntry]:
if isinstance(self.entries, dict):
for _section, entries in self.entries.items():
yield from entries
else:
yield from self.entries
class YAMLTag:
"""Base class for all YAML Tags"""
def __repr__(self) -> str:
return str(self.resolve(BlueprintEntry(""), Blueprint()))
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
"""Implement yaml tag logic"""
raise NotImplementedError
@ -234,7 +221,7 @@ class KeyOf(YAMLTag):
self.id_from = node.value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
for _entry in blueprint.iter_entries():
for _entry in blueprint.entries:
if _entry.id == self.id_from and _entry._state.instance:
# Special handling for PolicyBindingModels, as they'll have a different PK
# which is used when creating policy bindings
@ -292,22 +279,6 @@ class Context(YAMLTag):
return value
class ParseJSON(YAMLTag):
"""Parse JSON from context/env/etc value"""
raw: str
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
super().__init__()
self.raw = node.value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
try:
return loads(self.raw)
except JSONDecodeError as exc:
raise EntryInvalidError.from_entry(exc, entry) from exc
class Format(YAMLTag):
"""Format a string"""
@ -585,53 +556,6 @@ class Value(EnumeratedItem):
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc
class AtIndex(YAMLTag):
"""Get value at index of a sequence or mapping"""
obj: YAMLTag | dict | list | tuple
attribute: int | str | YAMLTag
default: Any | UNSET
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.obj = loader.construct_object(node.value[0])
self.attribute = loader.construct_object(node.value[1])
if len(node.value) == 2: # noqa: PLR2004
self.default = UNSET
else:
self.default = loader.construct_object(node.value[2])
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
if isinstance(self.obj, YAMLTag):
obj = self.obj.resolve(entry, blueprint)
else:
obj = self.obj
if isinstance(self.attribute, YAMLTag):
attribute = self.attribute.resolve(entry, blueprint)
else:
attribute = self.attribute
if isinstance(obj, list | tuple):
try:
return obj[attribute]
except TypeError as exc:
raise EntryInvalidError.from_entry(
f"Invalid index for list: {attribute}", entry
) from exc
except IndexError as exc:
if self.default is UNSET:
raise EntryInvalidError.from_entry(
f"Index out of range: {attribute}", entry
) from exc
return self.default
if attribute in obj:
return obj[attribute]
else:
if self.default is UNSET:
raise EntryInvalidError.from_entry(f"Key does not exist: {attribute}", entry)
return self.default
class BlueprintDumper(SafeDumper):
"""Dump dataclasses to yaml"""
@ -682,8 +606,6 @@ class BlueprintLoader(SafeLoader):
self.add_constructor("!Enumerate", Enumerate)
self.add_constructor("!Value", Value)
self.add_constructor("!Index", Index)
self.add_constructor("!AtIndex", AtIndex)
self.add_constructor("!ParseJSON", ParseJSON)
class EntryInvalidError(SentryIgnoredException):

View File

@ -36,7 +36,6 @@ from authentik.core.models import (
GroupSourceConnection,
PropertyMapping,
Provider,
Session,
Source,
User,
UserSourceConnection,
@ -51,11 +50,7 @@ from authentik.enterprise.providers.microsoft_entra.models import (
MicrosoftEntraProviderGroup,
MicrosoftEntraProviderUser,
)
from authentik.enterprise.providers.ssf.models import StreamEvent
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
EndpointDevice,
EndpointDeviceConnection,
)
from authentik.enterprise.providers.rac.models import ConnectionToken
from authentik.events.logs import LogEvent, capture_logs
from authentik.events.models import SystemTask
from authentik.events.utils import cleanse_dict
@ -66,13 +61,7 @@ from authentik.lib.utils.reflection import get_apps
from authentik.outposts.models import OutpostServiceConnection
from authentik.policies.models import Policy, PolicyBindingModel
from authentik.policies.reputation.models import Reputation
from authentik.providers.oauth2.models import (
AccessToken,
AuthorizationCode,
DeviceToken,
RefreshToken,
)
from authentik.providers.rac.models import ConnectionToken
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
from authentik.rbac.models import Role
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
@ -80,7 +69,7 @@ from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
from authentik.tenants.models import Tenant
# Context set when the serializer is created in a blueprint context
# Update website/docs/customize/blueprints/v1/models.md when used
# Update website/developer-docs/blueprints/v1/models.md when used
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
@ -109,7 +98,6 @@ def excluded_models() -> list[type[Model]]:
Policy,
PolicyBindingModel,
# Classes that have other dependencies
Session,
AuthenticatedSession,
# Classes which are only internally managed
# FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin
@ -131,10 +119,6 @@ def excluded_models() -> list[type[Model]]:
GoogleWorkspaceProviderGroup,
MicrosoftEntraProviderUser,
MicrosoftEntraProviderGroup,
EndpointDevice,
EndpointDeviceConnection,
DeviceToken,
StreamEvent,
)
@ -303,11 +287,7 @@ class Importer:
serializer_kwargs = {}
model_instance = existing_models.first()
if (
not isinstance(model(), BaseMetaModel)
and model_instance
and entry.state != BlueprintEntryDesiredState.MUST_CREATED
):
if not isinstance(model(), BaseMetaModel) and model_instance:
self.logger.debug(
"Initialise serializer with instance",
model=model,
@ -317,12 +297,11 @@ class Importer:
serializer_kwargs["instance"] = model_instance
serializer_kwargs["partial"] = True
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
msg = (
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED.value} "
"and object exists already",
)
raise EntryInvalidError.from_entry(
ValidationError({k: msg for k in entry.identifiers.keys()}, "unique"),
(
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED} "
"and object exists already",
),
entry,
)
else:
@ -384,7 +363,7 @@ class Importer:
def _apply_models(self, raise_errors=False) -> bool:
"""Apply (create/update) models yaml"""
self.__pk_map = {}
for entry in self._import.iter_entries():
for entry in self._import.entries:
model_app_label, model_name = entry.get_model(self._import).split(".")
try:
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
@ -450,7 +429,7 @@ class Importer:
orig_import = deepcopy(self._import)
if self._import.version != 1:
self.logger.warning("Invalid blueprint version")
return False, [LogEvent("Invalid blueprint version", log_level="warning", logger=None)]
return False, [{"event": "Invalid blueprint version"}]
with (
transaction_rollback(),
capture_logs() as logs,

View File

@ -47,7 +47,7 @@ class MetaModelRegistry:
models = apps.get_models()
for _, value in self.models.items():
models.append(value)
return sorted(models, key=str)
return models
def get_model(self, app_label: str, model_id: str) -> type[Model]:
"""Get model checks if any virtual models are registered, and falls back

View File

@ -159,7 +159,7 @@ def blueprints_discovery(self: SystemTask, path: str | None = None):
check_blueprint_v1_file(blueprint)
count += 1
self.set_status(
TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=count))
TaskStatus.SUCCESSFUL, _("Successfully imported %(count)d files." % {"count": count})
)

View File

@ -14,10 +14,10 @@ from rest_framework.response import Response
from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import SecretKeyFilter
from authentik.brands.models import Brand
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
from authentik.rbac.filters import SecretKeyFilter
from authentik.tenants.utils import get_current_tenant
@ -49,8 +49,6 @@ class BrandSerializer(ModelSerializer):
"branding_title",
"branding_logo",
"branding_favicon",
"branding_custom_css",
"branding_default_flow_background",
"flow_authentication",
"flow_invalidation",
"flow_recovery",
@ -59,7 +57,6 @@ class BrandSerializer(ModelSerializer):
"flow_device_code",
"default_application",
"web_certificate",
"client_certificates",
"attributes",
]
extra_kwargs = {
@ -87,9 +84,8 @@ class CurrentBrandSerializer(PassiveSerializer):
matched_domain = CharField(source="domain")
branding_title = CharField()
branding_logo = CharField(source="branding_logo_url")
branding_favicon = CharField(source="branding_favicon_url")
branding_custom_css = CharField()
branding_logo = CharField()
branding_favicon = CharField()
ui_footer_links = ListField(
child=FooterLinkSerializer(),
read_only=True,
@ -121,7 +117,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
"domain",
"branding_title",
"web_certificate__name",
"client_certificates__name",
]
filterset_fields = [
"brand_uuid",
@ -130,7 +125,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
"branding_title",
"branding_logo",
"branding_favicon",
"branding_default_flow_background",
"flow_authentication",
"flow_invalidation",
"flow_recovery",
@ -138,7 +132,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
"flow_user_settings",
"flow_device_code",
"web_certificate",
"client_certificates",
]
ordering = ["domain"]

View File

@ -1,9 +1,9 @@
"""authentik brands app"""
from authentik.blueprints.apps import ManagedAppConfig
from django.apps import AppConfig
class AuthentikBrandsConfig(ManagedAppConfig):
class AuthentikBrandsConfig(AppConfig):
"""authentik Brand app"""
name = "authentik.brands"
@ -12,4 +12,3 @@ class AuthentikBrandsConfig(ManagedAppConfig):
mountpoints = {
"authentik.brands.urls_root": "",
}
default = True

View File

@ -4,7 +4,7 @@ from collections.abc import Callable
from django.http.request import HttpRequest
from django.http.response import HttpResponse
from django.utils.translation import override
from django.utils.translation import activate
from authentik.brands.utils import get_brand_for_request
@ -18,14 +18,10 @@ class BrandMiddleware:
self.get_response = get_response
def __call__(self, request: HttpRequest) -> HttpResponse:
locale_to_set = None
if not hasattr(request, "brand"):
brand = get_brand_for_request(request)
request.brand = brand
locale = brand.default_locale
if locale != "":
locale_to_set = locale
if locale_to_set:
with override(locale_to_set):
return self.get_response(request)
activate(locale)
return self.get_response(request)

View File

@ -1,35 +0,0 @@
# Generated by Django 5.0.12 on 2025-02-22 01:51
from pathlib import Path
from django.db import migrations, models
from django.apps.registry import Apps
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
Brand = apps.get_model("authentik_brands", "brand")
db_alias = schema_editor.connection.alias
path = Path("/web/dist/custom.css")
if not path.exists():
return
css = path.read_text()
Brand.objects.using(db_alias).all().update(branding_custom_css=css)
class Migration(migrations.Migration):
dependencies = [
("authentik_brands", "0007_brand_default_application"),
]
operations = [
migrations.AddField(
model_name="brand",
name="branding_custom_css",
field=models.TextField(blank=True, default=""),
),
migrations.RunPython(migrate_custom_css),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 5.0.13 on 2025-03-19 22:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_brands", "0008_brand_branding_custom_css"),
]
operations = [
migrations.AddField(
model_name="brand",
name="branding_default_flow_background",
field=models.TextField(default="/static/dist/assets/images/flow_background.jpg"),
),
]

View File

@ -1,37 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-19 15:09
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_brands", "0009_brand_branding_default_flow_background"),
("authentik_crypto", "0004_alter_certificatekeypair_name"),
]
operations = [
migrations.AddField(
model_name="brand",
name="client_certificates",
field=models.ManyToManyField(
blank=True,
default=None,
help_text="Certificates used for client authentication.",
to="authentik_crypto.certificatekeypair",
),
),
migrations.AlterField(
model_name="brand",
name="web_certificate",
field=models.ForeignKey(
default=None,
help_text="Web Certificate used by the authentik Core webserver.",
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
related_name="+",
to="authentik_crypto.certificatekeypair",
),
),
]

View File

@ -10,7 +10,6 @@ from structlog.stdlib import get_logger
from authentik.crypto.models import CertificateKeyPair
from authentik.flows.models import Flow
from authentik.lib.config import CONFIG
from authentik.lib.models import SerializerModel
LOGGER = get_logger()
@ -33,10 +32,6 @@ class Brand(SerializerModel):
branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg")
branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png")
branding_custom_css = models.TextField(default="", blank=True)
branding_default_flow_background = models.TextField(
default="/static/dist/assets/images/flow_background.jpg"
)
flow_authentication = models.ForeignKey(
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication"
@ -73,34 +68,9 @@ class Brand(SerializerModel):
default=None,
on_delete=models.SET_DEFAULT,
help_text=_("Web Certificate used by the authentik Core webserver."),
related_name="+",
)
client_certificates = models.ManyToManyField(
CertificateKeyPair,
default=None,
blank=True,
help_text=_("Certificates used for client authentication."),
)
attributes = models.JSONField(default=dict, blank=True)
def branding_logo_url(self) -> str:
"""Get branding_logo with the correct prefix"""
if self.branding_logo.startswith("/static"):
return CONFIG.get("web.path", "/")[:-1] + self.branding_logo
return self.branding_logo
def branding_favicon_url(self) -> str:
"""Get branding_favicon with the correct prefix"""
if self.branding_favicon.startswith("/static"):
return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon
return self.branding_favicon
def branding_default_flow_background_url(self) -> str:
"""Get branding_default_flow_background with the correct prefix"""
if self.branding_default_flow_background.startswith("/static"):
return CONFIG.get("web.path", "/")[:-1] + self.branding_default_flow_background
return self.branding_default_flow_background
@property
def serializer(self) -> Serializer:
from authentik.brands.api import BrandSerializer

View File

@ -24,7 +24,6 @@ class TestBrands(APITestCase):
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
"branding_favicon": "/static/dist/assets/icons/icon.png",
"branding_title": "authentik",
"branding_custom_css": "",
"matched_domain": brand.domain,
"ui_footer_links": [],
"ui_theme": Themes.AUTOMATIC,
@ -44,7 +43,6 @@ class TestBrands(APITestCase):
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
"branding_favicon": "/static/dist/assets/icons/icon.png",
"branding_title": "custom",
"branding_custom_css": "",
"matched_domain": "bar.baz",
"ui_footer_links": [],
"ui_theme": Themes.AUTOMATIC,
@ -61,7 +59,6 @@ class TestBrands(APITestCase):
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
"branding_favicon": "/static/dist/assets/icons/icon.png",
"branding_title": "authentik",
"branding_custom_css": "",
"matched_domain": "fallback",
"ui_footer_links": [],
"ui_theme": Themes.AUTOMATIC,
@ -124,38 +121,3 @@ class TestBrands(APITestCase):
"subject": None,
},
)
def test_branding_url(self):
"""Test branding attributes return correct values"""
brand = create_test_brand()
brand.branding_default_flow_background = "https://goauthentik.io/img/icon.png"
brand.branding_favicon = "https://goauthentik.io/img/icon.png"
brand.branding_logo = "https://goauthentik.io/img/icon.png"
brand.save()
self.assertEqual(
brand.branding_default_flow_background_url(), "https://goauthentik.io/img/icon.png"
)
self.assertJSONEqual(
self.client.get(reverse("authentik_api:brand-current")).content.decode(),
{
"branding_logo": "https://goauthentik.io/img/icon.png",
"branding_favicon": "https://goauthentik.io/img/icon.png",
"branding_title": "authentik",
"branding_custom_css": "",
"matched_domain": brand.domain,
"ui_footer_links": [],
"ui_theme": Themes.AUTOMATIC,
"default_locale": "",
},
)
def test_custom_css(self):
"""Test custom_css"""
brand = create_test_brand()
brand.branding_custom_css = """* {
font-family: "Foo bar";
}"""
brand.save()
res = self.client.get(reverse("authentik_core:if-user"))
self.assertEqual(res.status_code, 200)
self.assertIn(brand.branding_custom_css, res.content.decode())

View File

@ -5,12 +5,10 @@ from typing import Any
from django.db.models import F, Q
from django.db.models import Value as V
from django.http.request import HttpRequest
from django.utils.html import _json_script_escapes
from django.utils.safestring import mark_safe
from sentry_sdk import get_current_span
from authentik import get_full_version
from authentik.brands.models import Brand
from authentik.lib.sentry import get_http_meta
from authentik.tenants.models import Tenant
_q_default = Q(default=True)
@ -34,14 +32,13 @@ def context_processor(request: HttpRequest) -> dict[str, Any]:
"""Context Processor that injects brand object into every template"""
brand = getattr(request, "brand", DEFAULT_BRAND)
tenant = getattr(request, "tenant", Tenant())
# similarly to `json_script` we escape everything HTML-related, however django
# only directly exposes this as a function that also wraps it in a <script> tag
# which we dont want for CSS
brand_css = mark_safe(str(brand.branding_custom_css).translate(_json_script_escapes)) # nosec
trace = ""
span = get_current_span()
if span:
trace = span.to_traceparent()
return {
"brand": brand,
"brand_css": brand_css,
"footer_links": tenant.footer_links,
"html_meta": {**get_http_meta()},
"sentry_trace": trace,
"version": get_full_version(),
}

View File

@ -1,58 +0,0 @@
"""Application Roles API Viewset"""
from django.http import HttpRequest
from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import ValidationError
from rest_framework.viewsets import ModelViewSet
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import (
Application,
ApplicationEntitlement,
)
class ApplicationEntitlementSerializer(ModelSerializer):
"""ApplicationEntitlement Serializer"""
def validate_app(self, app: Application) -> Application:
"""Ensure user has permission to view"""
request: HttpRequest = self.context.get("request")
if not request and SERIALIZER_CONTEXT_BLUEPRINT in self.context:
return app
user = request.user
if user.has_perm("view_application", app) or user.has_perm(
"authentik_core.view_application"
):
return app
raise ValidationError(_("User does not have access to application."), code="invalid")
class Meta:
model = ApplicationEntitlement
fields = [
"pbm_uuid",
"name",
"app",
"attributes",
]
class ApplicationEntitlementViewSet(UsedByMixin, ModelViewSet):
"""ApplicationEntitlement Viewset"""
queryset = ApplicationEntitlement.objects.all()
serializer_class = ApplicationEntitlementSerializer
search_fields = [
"pbm_uuid",
"name",
"app",
"attributes",
]
filterset_fields = [
"pbm_uuid",
"name",
"app",
]
ordering = ["name"]

View File

@ -2,9 +2,11 @@
from collections.abc import Iterator
from copy import copy
from datetime import timedelta
from django.core.cache import cache
from django.db.models import QuerySet
from django.db.models.functions import ExtractHour
from django.shortcuts import get_object_or_404
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
@ -18,6 +20,7 @@ from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from authentik.admin.api.metrics import CoordinateSerializer
from authentik.api.pagination import Pagination
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.providers import ProviderSerializer
@ -25,6 +28,7 @@ from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import Application, User
from authentik.events.logs import LogEventSerializer, capture_logs
from authentik.events.models import EventAction
from authentik.lib.utils.file import (
FilePathSerializer,
FileUploadSerializer,
@ -42,7 +46,7 @@ LOGGER = get_logger()
def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str:
"""Cache key where application list for user is saved"""
key = f"{CACHE_PREFIX}app_access/{user_pk}"
key = f"{CACHE_PREFIX}/app_access/{user_pk}"
if page_number:
key += f"/{page_number}"
return key
@ -317,3 +321,18 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
"""Set application icon (as URL)"""
app: Application = self.get_object()
return set_file_url(request, app, "meta_icon")
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
@extend_schema(responses={200: CoordinateSerializer(many=True)})
@action(detail=True, pagination_class=None, filter_backends=[])
def metrics(self, request: Request, slug: str):
"""Metrics for application logins"""
app = self.get_object()
return Response(
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION,
context__authorized_application__pk=app.pk.hex,
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)

View File

@ -2,13 +2,16 @@
from typing import TypedDict
from django_filters.rest_framework import DjangoFilterBackend
from guardian.utils import get_anonymous_user
from rest_framework import mixins
from rest_framework.fields import SerializerMethodField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.serializers import CharField, DateTimeField, IPAddressField
from rest_framework.viewsets import GenericViewSet
from ua_parser import user_agent_parser
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import AuthenticatedSession
@ -55,11 +58,6 @@ class UserAgentDict(TypedDict):
class AuthenticatedSessionSerializer(ModelSerializer):
"""AuthenticatedSession Serializer"""
expires = DateTimeField(source="session.expires", read_only=True)
last_ip = IPAddressField(source="session.last_ip", read_only=True)
last_user_agent = CharField(source="session.last_user_agent", read_only=True)
last_used = DateTimeField(source="session.last_used", read_only=True)
current = SerializerMethodField()
user_agent = SerializerMethodField()
geo_ip = SerializerMethodField()
@ -68,19 +66,19 @@ class AuthenticatedSessionSerializer(ModelSerializer):
def get_current(self, instance: AuthenticatedSession) -> bool:
"""Check if session is currently active session"""
request: Request = self.context["request"]
return request._request.session.session_key == instance.session.session_key
return request._request.session.session_key == instance.session_key
def get_user_agent(self, instance: AuthenticatedSession) -> UserAgentDict:
"""Get parsed user agent"""
return user_agent_parser.Parse(instance.session.last_user_agent)
return user_agent_parser.Parse(instance.last_user_agent)
def get_geo_ip(self, instance: AuthenticatedSession) -> GeoIPDict | None: # pragma: no cover
"""Get GeoIP Data"""
return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.session.last_ip)
return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.last_ip)
def get_asn(self, instance: AuthenticatedSession) -> ASNDict | None: # pragma: no cover
"""Get ASN Data"""
return ASN_CONTEXT_PROCESSOR.asn_dict(instance.session.last_ip)
return ASN_CONTEXT_PROCESSOR.asn_dict(instance.last_ip)
class Meta:
model = AuthenticatedSession
@ -96,7 +94,6 @@ class AuthenticatedSessionSerializer(ModelSerializer):
"last_used",
"expires",
]
extra_args = {"uuid": {"read_only": True}}
class AuthenticatedSessionViewSet(
@ -108,10 +105,16 @@ class AuthenticatedSessionViewSet(
):
"""AuthenticatedSession Viewset"""
lookup_field = "uuid"
queryset = AuthenticatedSession.objects.select_related("session").all()
queryset = AuthenticatedSession.objects.all()
serializer_class = AuthenticatedSessionSerializer
search_fields = ["user__username", "session__last_ip", "session__last_user_agent"]
filterset_fields = ["user__username", "session__last_ip", "session__last_user_agent"]
search_fields = ["user__username", "last_ip", "last_user_agent"]
filterset_fields = ["user__username", "last_ip", "last_user_agent"]
ordering = ["user__username"]
owner_field = "user"
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
def get_queryset(self):
user = self.request.user if self.request else get_anonymous_user()
if user.is_superuser:
return super().get_queryset()
return super().get_queryset().filter(user=user.pk)

View File

@ -1,59 +1,39 @@
"""Authenticator Devices API Views"""
from drf_spectacular.utils import extend_schema
from guardian.shortcuts import get_objects_for_user
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, extend_schema
from rest_framework.fields import (
BooleanField,
CharField,
DateTimeField,
IntegerField,
SerializerMethodField,
)
from rest_framework.permissions import IsAuthenticated
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from authentik.core.api.users import ParamUserSerializer
from authentik.core.api.utils import MetaNameSerializer
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
from authentik.stages.authenticator import device_classes, devices_for_user
from authentik.stages.authenticator.models import Device
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
class DeviceSerializer(MetaNameSerializer):
"""Serializer for authenticator devices"""
"""Serializer for Duo authenticator devices"""
pk = CharField()
pk = IntegerField()
name = CharField()
type = SerializerMethodField()
confirmed = BooleanField()
created = DateTimeField(read_only=True)
last_updated = DateTimeField(read_only=True)
last_used = DateTimeField(read_only=True, allow_null=True)
extra_description = SerializerMethodField()
external_id = SerializerMethodField()
def get_type(self, instance: Device) -> str:
"""Get type of device"""
return instance._meta.label
def get_extra_description(self, instance: Device) -> str | None:
"""Get extra description"""
if isinstance(instance, WebAuthnDevice):
return instance.device_type.description if instance.device_type else None
if isinstance(instance, EndpointDevice):
return instance.data.get("deviceSignals", {}).get("deviceModel")
return None
def get_external_id(self, instance: Device) -> str | None:
"""Get external Device ID"""
if isinstance(instance, WebAuthnDevice):
return instance.device_type.aaguid if instance.device_type else None
if isinstance(instance, EndpointDevice):
return instance.data.get("deviceSignals", {}).get("deviceModel")
return None
class DeviceViewSet(ViewSet):
"""Viewset for authenticator devices"""
@ -61,6 +41,7 @@ class DeviceViewSet(ViewSet):
serializer_class = DeviceSerializer
permission_classes = [IsAuthenticated]
@extend_schema(responses={200: DeviceSerializer(many=True)})
def list(self, request: Request) -> Response:
"""Get all devices for current user"""
devices = devices_for_user(request.user)
@ -71,22 +52,27 @@ class AdminDeviceViewSet(ViewSet):
"""Viewset for authenticator devices"""
serializer_class = DeviceSerializer
permission_classes = []
permission_classes = [IsAdminUser]
def get_devices(self, **kwargs):
"""Get all devices in all child classes"""
for model in device_classes():
device_set = get_objects_for_user(
self.request.user, f"{model._meta.app_label}.view_{model._meta.model_name}", model
).filter(**kwargs)
device_set = model.objects.filter(**kwargs)
yield from device_set
@extend_schema(
parameters=[ParamUserSerializer],
parameters=[
OpenApiParameter(
name="user",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.INT,
)
],
responses={200: DeviceSerializer(many=True)},
)
def list(self, request: Request) -> Response:
"""Get all devices for current user"""
args = ParamUserSerializer(data=request.query_params)
args.is_valid(raise_exception=True)
return Response(DeviceSerializer(self.get_devices(**args.validated_data), many=True).data)
kwargs = {}
if "user" in request.query_params:
kwargs = {"user": request.query_params["user"]}
return Response(DeviceSerializer(self.get_devices(**kwargs), many=True).data)

Some files were not shown because too many files have changed in this diff Show More