Compare commits

..

1 Commits

Author SHA1 Message Date
e86a5cf5a7 retry
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2025-01-21 16:27:54 +01:00
723 changed files with 15087 additions and 45626 deletions

View File

@ -1,16 +1,16 @@
[bumpversion]
current_version = 2025.2.1
current_version = 2024.12.2
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
serialize =
serialize =
{major}.{minor}.{patch}-{rc_t}{rc_n}
{major}.{minor}.{patch}
message = release: {new_version}
tag_name = version/{new_version}
[bumpversion:part:rc_t]
values =
values =
rc
final
optional_value = final

View File

@ -28,11 +28,7 @@ Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
<!--
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
-->
- authentik version: [e.g. 2025.2.0]
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
**Additional context**

View File

@ -20,12 +20,7 @@ Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
<!--
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
-->
- authentik version: [e.g. 2025.2.0]
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
**Additional context**

View File

@ -9,9 +9,6 @@ inputs:
image-arch:
required: false
description: "Docker image arch"
release:
required: true
description: "True if this is a release build, false if this is a dev/PR build"
outputs:
shouldPush:
@ -47,9 +44,6 @@ outputs:
imageMainName:
description: "Docker image main name"
value: ${{ steps.ev.outputs.imageMainName }}
imageBuildArgs:
description: "Docker image build args"
value: ${{ steps.ev.outputs.imageBuildArgs }}
runs:
using: "composite"
@ -60,8 +54,6 @@ runs:
env:
IMAGE_NAME: ${{ inputs.image-name }}
IMAGE_ARCH: ${{ inputs.image-arch }}
RELEASE: ${{ inputs.release }}
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
REF: ${{ github.ref }}
run: |
python3 ${{ github.action_path }}/push_vars.py

View File

@ -80,13 +80,6 @@ if should_push:
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
image_build_args = []
if os.getenv("RELEASE", "false").lower() == "true":
image_build_args = [f"VERSION={os.getenv('REF')}"]
else:
image_build_args = [f"GIT_BUILD_HASH={sha}"]
image_build_args = "\n".join(image_build_args)
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"shouldPush={str(should_push).lower()}", file=_output)
print(f"sha={sha}", file=_output)
@ -98,4 +91,3 @@ with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"imageMainTag={image_main_tag}", file=_output)
print(f"imageMainName={image_tags[0]}", file=_output)
print(f"cacheTo={cache_to}", file=_output)
print(f"imageBuildArgs={image_build_args}", file=_output)

View File

@ -30,16 +30,12 @@ runs:
uses: actions/setup-go@v5
with:
go-version-file: "go.mod"
- name: Setup docker cache
uses: ScribeMD/docker-cache@0.5.0
with:
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
- name: Setup dependencies
shell: bash
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/docker-compose.yml up -d
poetry sync
poetry install --sync
cd web && npm ci
- name: Generate config
shell: poetry run python {0}

View File

@ -11,7 +11,7 @@ services:
- 5432:5432
restart: always
redis:
image: docker.io/library/redis:7
image: docker.io/library/redis
ports:
- 6379:6379
restart: always

View File

@ -1,32 +1,7 @@
akadmin
asgi
assertIn
authentik
authn
crate
docstrings
entra
goauthentik
gunicorn
hass
jwe
jwks
keypair
keypairs
kubernetes
oidc
ontext
openid
passwordless
plex
saml
scim
singed
slo
sso
totp
traefik
# https://github.com/codespell-project/codespell/issues/1224
upToDate
hass
warmup
webauthn
ontext
singed
assertIn

View File

@ -82,12 +82,6 @@ updates:
docusaurus:
patterns:
- "@docusaurus/*"
build:
patterns:
- "@swc/*"
- "swc-*"
- "lightningcss*"
- "@rspack/binding*"
- package-ecosystem: npm
directory: "/lifecycle/aws"
schedule:

View File

@ -40,7 +40,7 @@ jobs:
attestations: write
steps:
- uses: actions/checkout@v4
- uses: docker/setup-qemu-action@v3.6.0
- uses: docker/setup-qemu-action@v3.3.0
- uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
@ -50,7 +50,6 @@ jobs:
with:
image-name: ${{ inputs.image_name }}
image-arch: ${{ inputs.image_arch }}
release: ${{ inputs.release }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@v3
@ -77,19 +76,18 @@ jobs:
id: push
with:
context: .
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
push: true
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
build-args: |
${{ steps.ev.outputs.imageBuildArgs }}
VERSION=${{ github.ref }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/${{ inputs.image_arch }}
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
cache-to: ${{ steps.ev.outputs.cacheTo }}
- uses: actions/attest-build-provenance@v2
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}

View File

@ -46,7 +46,6 @@ jobs:
- build-server-arm64
outputs:
tags: ${{ steps.ev.outputs.imageTagsJSON }}
shouldPush: ${{ steps.ev.outputs.shouldPush }}
steps:
- uses: actions/checkout@v4
- name: prepare variables
@ -58,7 +57,6 @@ jobs:
image-name: ${{ inputs.image_name }}
merge-server:
runs-on: ubuntu-latest
if: ${{ needs.get-tags.outputs.shouldPush == 'true' }}
needs:
- get-tags
- build-server-amd64

View File

@ -1,28 +0,0 @@
---
name: authentik-ci-main-daily
on:
workflow_dispatch:
schedule:
# Every night at 3am
- cron: "0 3 * * *"
jobs:
test-container:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
version:
- docs
- version-2025-2
- version-2024-12
steps:
- uses: actions/checkout@v4
- run: |
current="$(pwd)"
dir="/tmp/authentik/${{ matrix.version }}"
mkdir -p $dir
cd $dir
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
${current}/scripts/test_docker.sh

View File

@ -43,26 +43,15 @@ jobs:
uses: ./.github/actions/setup
- name: run migrations
run: poetry run python -m lifecycle.migrate
test-make-seed:
runs-on: ubuntu-latest
steps:
- id: seed
run: |
echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT"
outputs:
seed: ${{ steps.seed.outputs.seed }}
test-migrations-from-stable:
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest
timeout-minutes: 20
needs: test-make-seed
strategy:
fail-fast: false
matrix:
psql:
- 15-alpine
- 16-alpine
run_id: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
with:
@ -104,23 +93,18 @@ jobs:
env:
# Test in the main database that we just migrated from the previous stable version
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: |
poetry run make ci-test
poetry run make test
test-unittest:
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
name: test-unittest - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest
timeout-minutes: 20
needs: test-make-seed
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
psql:
- 15-alpine
- 16-alpine
run_id: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- name: Setup authentik env
@ -128,12 +112,9 @@ jobs:
with:
postgresql_version: ${{ matrix.psql }}
- name: run unittest
env:
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: |
poetry run make ci-test
poetry run make test
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v5
with:

View File

@ -82,7 +82,7 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0
uses: docker/setup-qemu-action@v3.3.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables

View File

@ -9,17 +9,9 @@ jobs:
build-server:
uses: ./.github/workflows/_reusable-docker-build.yaml
secrets: inherit
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
with:
image_name: ghcr.io/goauthentik/server,beryju/authentik
release: true
registry_dockerhub: true
registry_ghcr: true
build-outpost:
runs-on: ubuntu-latest
permissions:
@ -42,7 +34,7 @@ jobs:
with:
go-version-file: "go.mod"
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0
uses: docker/setup-qemu-action@v3.3.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
@ -186,7 +178,7 @@ jobs:
container=$(docker container create ${{ steps.ev.outputs.imageMainName }})
docker cp ${container}:web/ .
- name: Create a Sentry.io release
uses: getsentry/action-release@v3
uses: getsentry/action-release@v1
continue-on-error: true
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}

View File

@ -14,7 +14,16 @@ jobs:
- uses: actions/checkout@v4
- name: Pre-release test
run: |
make test-docker
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
docker buildx install
mkdir -p ./gen-ts-api
docker build -t testing:latest .
echo "AUTHENTIK_IMAGE=testing" >> .env
echo "AUTHENTIK_TAG=latest" >> .env
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
- id: generate_token
uses: tibdex/github-app-token@v2
with:

View File

@ -1,8 +1,8 @@
name: "authentik-repo-stale"
name: 'authentik-repo-stale'
on:
schedule:
- cron: "30 1 * * *"
- cron: '30 1 * * *'
workflow_dispatch:
permissions:
@ -25,7 +25,7 @@ jobs:
days-before-stale: 60
days-before-close: 7
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
stale-issue-label: status/stale
stale-issue-label: wontfix
stale-issue-message: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you

View File

@ -1,13 +1,9 @@
---
name: authentik-translate-extract-compile
name: authentik-backend-translate-extract-compile
on:
schedule:
- cron: "0 0 * * *" # every day at midnight
workflow_dispatch:
pull_request:
branches:
- main
- version-*
env:
POSTGRES_DB: authentik
@ -19,21 +15,15 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
if: ${{ github.event_name != 'pull_request' }}
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4
if: ${{ github.event_name != 'pull_request' }}
with:
token: ${{ steps.generate_token.outputs.token }}
- uses: actions/checkout@v4
if: ${{ github.event_name == 'pull_request' }}
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Generate API
run: make gen-client-ts
- name: run extract
run: |
poetry run make i18n-extract
@ -42,7 +32,6 @@ jobs:
poetry run ak compilemessages
make web-check-compile
- name: Create Pull Request
if: ${{ github.event_name != 'pull_request' }}
uses: peter-evans/create-pull-request@v7
with:
token: ${{ steps.generate_token.outputs.token }}

3
.gitignore vendored
View File

@ -209,6 +209,3 @@ source_docs/
### Golang ###
/vendor/
### Docker ###
docker-compose.override.yml

View File

@ -2,7 +2,6 @@
"recommendations": [
"bashmish.es6-string-css",
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
"charliermarsh.ruff",
"dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig",
"esbenp.prettier-vscode",
@ -11,12 +10,12 @@
"Gruntfuggly.todo-tree",
"mechatroner.rainbow-csv",
"ms-python.black-formatter",
"ms-python.black-formatter",
"ms-python.debugpy",
"charliermarsh.ruff",
"ms-python.python",
"ms-python.vscode-pylance",
"ms-python.black-formatter",
"redhat.vscode-yaml",
"Tobermory.es6-string-html",
"unifiedjs.vscode-mdx",
"unifiedjs.vscode-mdx"
]
}

66
.vscode/launch.json vendored
View File

@ -2,76 +2,26 @@
"version": "0.2.0",
"configurations": [
{
"name": "Debug: Attach Server Core",
"type": "debugpy",
"name": "Python: PDB attach Server",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 9901
"port": 6800
},
"pathMappings": [
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}
],
"justMyCode": true,
"django": true
},
{
"name": "Debug: Attach Worker",
"type": "debugpy",
"name": "Python: PDB attach Worker",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 9901
"port": 6900
},
"pathMappings": [
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}
],
"justMyCode": true,
"django": true
},
{
"name": "Debug: Start Server Router",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/server",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start LDAP Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/ldap",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start Proxy Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/proxy",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start RAC Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/rac",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start Radius Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/radius",
"cwd": "${workspaceFolder}"
}
]
}

22
.vscode/settings.json vendored
View File

@ -1,4 +1,26 @@
{
"cSpell.words": [
"akadmin",
"asgi",
"authentik",
"authn",
"entra",
"goauthentik",
"jwe",
"jwks",
"kubernetes",
"oidc",
"openid",
"passwordless",
"plex",
"saml",
"scim",
"slo",
"sso",
"totp",
"traefik",
"webauthn"
],
"todo-tree.tree.showCountsInTree": true,
"todo-tree.tree.showBadges": true,
"yaml.customTags": [

View File

@ -5,7 +5,7 @@
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socioeconomic status,
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.

View File

@ -3,8 +3,7 @@
# Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder
ENV NODE_ENV=production \
GIT_UNAVAILABLE=true
ENV NODE_ENV=production
WORKDIR /work/website
@ -95,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 5: Python dependencies
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS python-deps
ARG TARGETARCH
ARG TARGETVARIANT
@ -133,14 +132,13 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
. "$HOME/.cargo/env" && \
python -m venv /ak-root/venv/ && \
bash -c "source ${VENV_PATH}/bin/activate && \
pip3 install --upgrade pip poetry && \
pip3 install --upgrade pip && \
pip3 install poetry && \
poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \
poetry install --only=main --no-ansi --no-interaction --no-root && \
pip uninstall cryptography -y && \
poetry install --only=main --no-ansi --no-interaction --no-root"
# Stage 6: Run
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS final-image
ARG VERSION
ARG GIT_BUILD_HASH
@ -152,39 +150,34 @@ LABEL org.opencontainers.image.source=https://github.com/goauthentik/authentik
LABEL org.opencontainers.image.version=${VERSION}
LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH}
WORKDIR /
# We cannot cache this layer otherwise we'll end up with a bigger image
RUN apt-get update && \
apt-get upgrade -y && \
# Required for runtime
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \
# Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \
pip3 install --no-cache-dir --upgrade pip && \
apt-get clean && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
adduser --system --no-create-home --uid 1000 --group --home /ak-root authentik && \
mkdir -p /certs /media /blueprints && \
mkdir -p /authentik/.ssh && \
mkdir -p /ak-root && \
chown authentik:authentik /certs /media /authentik/.ssh /ak-root
mkdir -p /ak-root/authentik/.ssh && \
chown authentik:authentik /certs /media /ak-root/authentik/.ssh /ak-root
COPY ./authentik/ /authentik
COPY ./pyproject.toml /
COPY ./poetry.lock /
COPY ./schemas /schemas
COPY ./locale /locale
COPY ./tests /tests
COPY ./manage.py /
COPY ./authentik/ /ak-root/authentik
COPY ./pyproject.toml /ak-root
COPY ./poetry.lock /ak-root
COPY ./schemas /ak-root/schemas
COPY ./locale /ak-root/locale
COPY ./tests /ak-root/tests
COPY ./manage.py /ak-root
COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle
COPY ./lifecycle/ /ak-root/lifecycle
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
COPY --from=go-builder /go/authentik /bin/authentik
COPY --from=python-deps /ak-root/venv /ak-root/venv
COPY --from=web-builder /work/web/dist/ /web/dist/
COPY --from=web-builder /work/web/authentik/ /web/authentik/
COPY --from=website-builder /work/website/build/ /website/help/
COPY --from=web-builder /work/web/dist/ /ak-root/web/dist/
COPY --from=web-builder /work/web/authentik/ /ak-root/web/authentik/
COPY --from=website-builder /work/website/build/ /ak-root/website/help/
COPY --from=geoip /usr/share/GeoIP /geoip
USER 1000
@ -192,11 +185,13 @@ USER 1000
ENV TMPDIR=/dev/shm/ \
PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
PATH="/ak-root/venv/bin:/ak-root/lifecycle:$PATH" \
VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false \
GOFIPS=1
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
WORKDIR /ak-root
ENTRYPOINT [ "dumb-init", "--", "ak" ]

View File

@ -4,7 +4,7 @@
PWD = $(shell pwd)
UID = $(shell id -u)
GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.generate_semver)
NPM_VERSION = $(shell python -m scripts.npm_version)
PY_SOURCES = authentik tests scripts lifecycle .github
DOCKER_IMAGE ?= "authentik:test"
@ -12,9 +12,23 @@ GEN_API_TS = "gen-ts-api"
GEN_API_PY = "gen-py-api"
GEN_API_GO = "gen-go-api"
pg_user := $(shell poetry run python -m authentik.lib.config postgresql.user 2>/dev/null)
pg_host := $(shell poetry run python -m authentik.lib.config postgresql.host 2>/dev/null)
pg_name := $(shell poetry run python -m authentik.lib.config postgresql.name 2>/dev/null)
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
-I .github/codespell-words.txt \
-S 'web/src/locales/**' \
-S 'website/docs/developer-docs/api/reference/**' \
authentik \
internal \
cmd \
web/src \
website/src \
website/blog \
website/docs \
website/integrations \
website/src
all: lint-fix lint test gen web ## Lint, build, and test everything
@ -31,27 +45,36 @@ help: ## Show this help
go-test:
go test -timeout 0 -v -race -cover ./...
test-docker: ## Run all tests in a docker-compose
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
docker compose pull -q
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
rm -f .env
test: ## Run the server tests and produce a coverage report (locally)
poetry run coverage run manage.py test --keepdb authentik
poetry run coverage html
poetry run coverage report
coverage run manage.py test --keepdb authentik
coverage html
coverage report
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
poetry run black $(PY_SOURCES)
poetry run ruff check --fix $(PY_SOURCES)
black $(PY_SOURCES)
ruff check --fix $(PY_SOURCES)
lint-codespell: ## Reports spelling errors.
poetry run codespell -w
codespell -w $(CODESPELL_ARGS)
lint: ## Lint the python and golang sources
poetry run bandit -c pyproject.toml -r $(PY_SOURCES)
bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules
golangci-lint run -v
core-install:
poetry install
migrate: ## Run the Authentik Django server's migrations
poetry run python -m lifecycle.migrate
python -m lifecycle.migrate
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
@ -59,7 +82,7 @@ aws-cfn:
cd lifecycle/aws && npm run aws-cfn
core-i18n-extract:
poetry run ak makemessages \
ak makemessages \
--add-location file \
--no-obsolete \
--ignore web \
@ -90,11 +113,11 @@ gen-build: ## Extract the schema from the database
AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
poetry run ak make_blueprint_schema > blueprints/schema.json
ak make_blueprint_schema > blueprints/schema.json
AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
poetry run ak spectacular --file schema.yml
ak spectacular --file schema.yml
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
@ -129,7 +152,7 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/${GEN_API_TS} \
@ -145,7 +168,7 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \
-i /local/schema.yml \
-g python \
-o /local/${GEN_API_PY} \
@ -173,7 +196,7 @@ gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
gen-dev-config: ## Generate a local development config file
poetry run scripts/generate_config.py
python -m scripts.generate_config
gen: gen-build gen-client-ts
@ -240,9 +263,6 @@ docker: ## Build a docker image of the current source tree
mkdir -p ${GEN_API_TS}
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
test-docker:
BUILD=true ./scripts/test_docker.sh
#########################
## CI
#########################
@ -254,21 +274,16 @@ ci--meta-debug:
node --version
ci-black: ci--meta-debug
poetry run black --check $(PY_SOURCES)
black --check $(PY_SOURCES)
ci-ruff: ci--meta-debug
poetry run ruff check $(PY_SOURCES)
ruff check $(PY_SOURCES)
ci-codespell: ci--meta-debug
poetry run codespell -s
codespell $(CODESPELL_ARGS) -s
ci-bandit: ci--meta-debug
poetry run bandit -r $(PY_SOURCES)
bandit -r $(PY_SOURCES)
ci-pending-migrations: ci--meta-debug
poetry run ak makemigrations --check
ci-test: ci--meta-debug
poetry run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
poetry run coverage report
poetry run coverage xml
ak makemigrations --check

View File

@ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di
## Independent audits and pentests
We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specific audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security).
We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specfic audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security).
## What authentik classifies as a CVE
@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
| Version | Supported |
| --------- | --------- |
| 2024.10.x | ✅ |
| 2024.12.x | ✅ |
| 2025.2.x | ✅ |
## Reporting a Vulnerability

View File

@ -2,7 +2,7 @@
from os import environ
__version__ = "2025.2.1"
__version__ = "2024.12.2"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -50,7 +50,7 @@ from authentik.enterprise.providers.microsoft_entra.models import (
MicrosoftEntraProviderGroup,
MicrosoftEntraProviderUser,
)
from authentik.enterprise.providers.ssf.models import StreamEvent
from authentik.enterprise.providers.rac.models import ConnectionToken
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
EndpointDevice,
EndpointDeviceConnection,
@ -71,7 +71,6 @@ from authentik.providers.oauth2.models import (
DeviceToken,
RefreshToken,
)
from authentik.providers.rac.models import ConnectionToken
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
from authentik.rbac.models import Role
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
@ -132,7 +131,6 @@ def excluded_models() -> list[type[Model]]:
EndpointDevice,
EndpointDeviceConnection,
DeviceToken,
StreamEvent,
)

View File

@ -3,7 +3,6 @@
from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, extend_schema
from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import (
BooleanField,
CharField,
@ -17,6 +16,7 @@ from rest_framework.viewsets import ViewSet
from authentik.core.api.utils import MetaNameSerializer
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
from authentik.rbac.decorators import permission_required
from authentik.stages.authenticator import device_classes, devices_for_user
from authentik.stages.authenticator.models import Device
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
@ -73,9 +73,7 @@ class AdminDeviceViewSet(ViewSet):
def get_devices(self, **kwargs):
"""Get all devices in all child classes"""
for model in device_classes():
device_set = get_objects_for_user(
self.request.user, f"{model._meta.app_label}.view_{model._meta.model_name}", model
).filter(**kwargs)
device_set = model.objects.filter(**kwargs)
yield from device_set
@extend_schema(
@ -88,6 +86,10 @@ class AdminDeviceViewSet(ViewSet):
],
responses={200: DeviceSerializer(many=True)},
)
@permission_required(
None,
[f"{model._meta.app_label}.view_{model._meta.model_name}" for model in device_classes()],
)
def list(self, request: Request) -> Response:
"""Get all devices for current user"""
kwargs = {}

View File

@ -4,7 +4,6 @@ from json import loads
from django.db.models import Prefetch
from django.http import Http404
from django.utils.translation import gettext as _
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
from django_filters.filterset import FilterSet
from drf_spectacular.utils import (
@ -82,37 +81,9 @@ class GroupSerializer(ModelSerializer):
if not self.instance or not parent:
return parent
if str(parent.group_uuid) == str(self.instance.group_uuid):
raise ValidationError(_("Cannot set group as parent of itself."))
raise ValidationError("Cannot set group as parent of itself.")
return parent
def validate_is_superuser(self, superuser: bool):
"""Ensure that the user creating this group has permissions to set the superuser flag"""
request: Request = self.context.get("request", None)
if not request:
return superuser
# If we're updating an instance, and the state hasn't changed, we don't need to check perms
if self.instance and superuser == self.instance.is_superuser:
return superuser
user: User = request.user
perm = (
"authentik_core.enable_group_superuser"
if superuser
else "authentik_core.disable_group_superuser"
)
has_perm = user.has_perm(perm)
if self.instance and not has_perm:
has_perm = user.has_perm(perm, self.instance)
if not has_perm:
raise ValidationError(
_(
(
"User does not have permission to set "
"superuser status to {superuser_status}."
).format_map({"superuser_status": superuser})
)
)
return superuser
class Meta:
model = Group
fields = [

View File

@ -5,7 +5,6 @@ from collections.abc import Iterable
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework import mixins
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request
@ -86,7 +85,7 @@ class SourceViewSet(
serializer_class = SourceSerializer
lookup_field = "slug"
search_fields = ["slug", "name"]
filterset_fields = ["slug", "name", "managed", "pbm_uuid"]
filterset_fields = ["slug", "name", "managed"]
def get_queryset(self): # pragma: no cover
return Source.objects.select_subclasses()
@ -155,17 +154,6 @@ class SourceViewSet(
matching_sources.append(source_settings.validated_data)
return Response(matching_sources)
def destroy(self, request: Request, *args, **kwargs):
"""Prevent deletion of built-in sources"""
instance: Source = self.get_object()
if instance.managed == Source.MANAGED_INBUILT:
raise ValidationError(
{"detail": "Built-in sources cannot be deleted"}, code="protected"
)
return super().destroy(request, *args, **kwargs)
class UserSourceConnectionSerializer(SourceSerializer):
"""User source connection"""

View File

@ -236,11 +236,9 @@ class UserSerializer(ModelSerializer):
"path",
"type",
"uuid",
"password_change_date",
]
extra_kwargs = {
"name": {"allow_blank": True},
"password_change_date": {"read_only": True},
}

View File

@ -32,5 +32,5 @@ class AuthentikCoreConfig(ManagedAppConfig):
"name": "authentik Built-in",
"slug": "authentik-built-in",
},
managed=Source.MANAGED_INBUILT,
managed="goauthentik.io/sources/inbuilt",
)

View File

@ -5,7 +5,6 @@ from typing import TextIO
from daphne.management.commands.runserver import Command as RunServer
from daphne.server import Server
from authentik.lib.debug import start_debug_server
from authentik.root.signals import post_startup, pre_startup, startup
@ -14,7 +13,6 @@ class SignalServer(Server):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
start_debug_server()
def ready_callable():
pre_startup.send(sender=self)

View File

@ -9,7 +9,6 @@ from django.db import close_old_connections
from structlog.stdlib import get_logger
from authentik.lib.config import CONFIG
from authentik.lib.debug import start_debug_server
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
@ -29,7 +28,10 @@ class Command(BaseCommand):
def handle(self, **options):
LOGGER.debug("Celery options", **options)
close_old_connections()
start_debug_server()
if CONFIG.get_bool("remote_debug"):
import debugpy
debugpy.listen(("0.0.0.0", 6900)) # nosec
worker: Worker = CELERY_APP.Worker(
no_color=False,
quiet=True,

View File

@ -1,26 +0,0 @@
# Generated by Django 5.0.11 on 2025-01-30 23:55
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
]
operations = [
migrations.AlterModelOptions(
name="group",
options={
"permissions": [
("add_user_to_group", "Add user to group"),
("remove_user_from_group", "Remove user from group"),
("enable_group_superuser", "Enable superuser status"),
("disable_group_superuser", "Disable superuser status"),
],
"verbose_name": "Group",
"verbose_name_plural": "Groups",
},
),
]

View File

@ -204,8 +204,6 @@ class Group(SerializerModel, AttributesMixin):
permissions = [
("add_user_to_group", _("Add user to group")),
("remove_user_from_group", _("Remove user from group")),
("enable_group_superuser", _("Enable superuser status")),
("disable_group_superuser", _("Disable superuser status")),
]
def __str__(self):
@ -601,14 +599,6 @@ class Application(SerializerModel, PolicyBindingModel):
return None
return candidates[-1]
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
"""Get Backchannel provider for a specific type"""
providers = self.backchannel_providers.filter(
**{f"{provider_type._meta.model_name}__isnull": False},
**kwargs,
)
return getattr(providers.first(), provider_type._meta.model_name)
def __str__(self):
return str(self.name)
@ -678,8 +668,6 @@ class SourceGroupMatchingModes(models.TextChoices):
class Source(ManagedModel, SerializerModel, PolicyBindingModel):
"""Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server"""
MANAGED_INBUILT = "goauthentik.io/sources/inbuilt"
name = models.TextField(help_text=_("Source's display Name."))
slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True)

View File

@ -35,7 +35,8 @@ from authentik.flows.planner import (
FlowPlanner,
)
from authentik.flows.stage import StageView
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN
from authentik.lib.utils.urls import redirect_with_qs
from authentik.lib.views import bad_request_message
from authentik.policies.denied import AccessDeniedResponse
from authentik.policies.utils import delete_none_values
@ -46,9 +47,8 @@ from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH
LOGGER = get_logger()
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages"
SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
class MessageStage(StageView):
@ -219,28 +219,28 @@ class SourceFlowManager:
}
)
flow_context.update(self.policy_context)
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug)
plan = token.plan
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
plan.context.update(flow_context)
for stage in self.get_stages_to_append(flow):
plan.append_stage(stage)
if stages:
for stage in stages:
plan.append_stage(stage)
self.request.session[SESSION_KEY_PLAN] = plan
flow_slug = token.flow.slug
token.delete()
return redirect_with_qs(
"authentik_core:if-flow",
self.request.GET,
flow_slug=flow_slug,
)
flow_context.setdefault(PLAN_CONTEXT_REDIRECT, final_redirect)
if not flow:
# We only check for the flow token here if we don't have a flow, otherwise we rely on
# SESSION_KEY_SOURCE_FLOW_STAGES to delegate the usage of this token and dynamically add
# stages that deal with this token to return to another flow
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
self._logger.info(
"Replacing source flow with overridden flow", flow=token.flow.slug
)
plan = token.plan
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
plan.context.update(flow_context)
for stage in self.get_stages_to_append(flow):
plan.append_stage(stage)
if stages:
for stage in stages:
plan.append_stage(stage)
redirect = plan.to_redirect(self.request, token.flow)
token.delete()
return redirect
return bad_request_message(
self.request,
_("Configured flow does not exist."),
@ -259,8 +259,6 @@ class SourceFlowManager:
if stages:
for stage in stages:
plan.append_stage(stage)
for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []):
plan.append_stage(stage)
return plan.to_redirect(self.request, flow)
def handle_auth(
@ -297,8 +295,6 @@ class SourceFlowManager:
# When request isn't authenticated we jump straight to auth
if not self.request.user.is_authenticated:
return self.handle_auth(connection)
# When an override flow token exists we actually still use a flow for link
# to continue the existing flow we came from
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
return self._prepare_flow(None, connection)
connection.save()

View File

@ -67,8 +67,6 @@ def clean_expired_models(self: SystemTask):
raise ImproperlyConfigured(
"Invalid session_storage setting, allowed values are db and cache"
)
if CONFIG.get("session_storage", "cache") == "db":
DBSessionStore.clear_expired()
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")

View File

@ -11,7 +11,6 @@
build: "{{ build }}",
api: {
base: "{{ base_url }}",
relBase: "{{ base_url_rel }}",
},
};
window.addEventListener("DOMContentLoaded", function () {

View File

@ -8,8 +8,6 @@
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
{# Darkreader breaks the site regardless of theme as its not compatible with webcomponents, and we default to a dark theme based on preferred colour-scheme #}
<meta name="darkreader-lock">
<title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title>
<link rel="icon" href="{{ brand.branding_favicon_url }}">
<link rel="shortcut icon" href="{{ brand.branding_favicon_url }}">

View File

@ -4,7 +4,7 @@ from django.urls.base import reverse
from guardian.shortcuts import assign_perm
from rest_framework.test import APITestCase
from authentik.core.models import Group
from authentik.core.models import Group, User
from authentik.core.tests.utils import create_test_admin_user, create_test_user
from authentik.lib.generators import generate_id
@ -14,7 +14,7 @@ class TestGroupsAPI(APITestCase):
def setUp(self) -> None:
self.login_user = create_test_user()
self.user = create_test_user()
self.user = User.objects.create(username="test-user")
def test_list_with_users(self):
"""Test listing with users"""
@ -109,57 +109,3 @@ class TestGroupsAPI(APITestCase):
},
)
self.assertEqual(res.status_code, 400)
def test_superuser_no_perm(self):
"""Test creating a superuser group without permission"""
assign_perm("authentik_core.add_group", self.login_user)
self.client.force_login(self.login_user)
res = self.client.post(
reverse("authentik_api:group-list"),
data={"name": generate_id(), "is_superuser": True},
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(
res.content,
{"is_superuser": ["User does not have permission to set superuser status to True."]},
)
def test_superuser_update_no_perm(self):
"""Test updating a superuser group without permission"""
group = Group.objects.create(name=generate_id(), is_superuser=True)
assign_perm("view_group", self.login_user, group)
assign_perm("change_group", self.login_user, group)
self.client.force_login(self.login_user)
res = self.client.patch(
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
data={"is_superuser": False},
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(
res.content,
{"is_superuser": ["User does not have permission to set superuser status to False."]},
)
def test_superuser_update_no_change(self):
"""Test updating a superuser group without permission
and without changing the superuser status"""
group = Group.objects.create(name=generate_id(), is_superuser=True)
assign_perm("view_group", self.login_user, group)
assign_perm("change_group", self.login_user, group)
self.client.force_login(self.login_user)
res = self.client.patch(
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
data={"name": generate_id(), "is_superuser": True},
)
self.assertEqual(res.status_code, 200)
def test_superuser_create(self):
"""Test creating a superuser group with permission"""
assign_perm("authentik_core.add_group", self.login_user)
assign_perm("authentik_core.enable_group_superuser", self.login_user)
self.client.force_login(self.login_user)
res = self.client.post(
reverse("authentik_api:group-list"),
data={"name": generate_id(), "is_superuser": True},
)
self.assertEqual(res.status_code, 201)

View File

@ -55,7 +55,7 @@ class RedirectToAppLaunch(View):
)
except FlowNonApplicableException:
raise Http404 from None
plan.append_stage(in_memory_stage(RedirectToAppStage))
plan.insert_stage(in_memory_stage(RedirectToAppStage))
return plan.to_redirect(request, flow)

View File

@ -53,7 +53,6 @@ class InterfaceView(TemplateView):
kwargs["build"] = get_build_hash()
kwargs["url_kwargs"] = self.kwargs
kwargs["base_url"] = self.request.build_absolute_uri(CONFIG.get("web.path", "/"))
kwargs["base_url_rel"] = CONFIG.get("web.path", "/")
return super().get_context_data(**kwargs)

View File

@ -97,8 +97,6 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
thread_kwargs: dict | None = None,
**_,
):
if not self.enabled:
return super().post_save_handler(request, sender, instance, created, thread_kwargs, **_)
if not should_log_model(instance):
return None
thread_kwargs = {}
@ -124,8 +122,6 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
):
thread_kwargs = {}
m2m_field = None
if not self.enabled:
return super().m2m_changed_handler(request, sender, instance, action, thread_kwargs)
# For the audit log we don't care about `pre_` or `post_` so we trim that part off
_, _, action_direction = action.partition("_")
# resolve the "through" model to an actual field

View File

@ -37,7 +37,6 @@ class GoogleWorkspaceProviderSerializer(EnterpriseRequiredMixin, ProviderSeriali
"user_delete_action",
"group_delete_action",
"default_group_email_domain",
"dry_run",
]
extra_kwargs = {}

View File

@ -8,10 +8,9 @@ from httplib2 import HttpLib2Error, HttpLib2ErrorWithResponse
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
from authentik.lib.sync.outgoing import HTTP_CONFLICT
from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
from authentik.lib.sync.outgoing.exceptions import (
BadRequestSyncException,
DryRunRejected,
NotFoundSyncException,
ObjectExistsSyncException,
StopSync,
@ -44,8 +43,6 @@ class GoogleWorkspaceSyncClient[TModel: Model, TConnection: Model, TSchema: dict
self.domains.append(domain_name)
def _request(self, request: HttpRequest):
if self.provider.dry_run and request.method.upper() not in SAFE_METHODS:
raise DryRunRejected(request.uri, request.method, request.body)
try:
response = request.execute()
except GoogleAuthError as exc:

View File

@ -1,24 +0,0 @@
# Generated by Django 5.0.12 on 2025-02-24 19:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_providers_google_workspace",
"0003_googleworkspaceprovidergroup_attributes_and_more",
),
]
operations = [
migrations.AddField(
model_name="googleworkspaceprovider",
name="dry_run",
field=models.BooleanField(
default=False,
help_text="When enabled, provider will not modify or create objects in the remote system.",
),
),
]

View File

@ -36,7 +36,6 @@ class MicrosoftEntraProviderSerializer(EnterpriseRequiredMixin, ProviderSerializ
"filter_group",
"user_delete_action",
"group_delete_action",
"dry_run",
]
extra_kwargs = {}

View File

@ -3,7 +3,6 @@ from collections.abc import Coroutine
from dataclasses import asdict
from typing import Any
import httpx
from azure.core.exceptions import (
ClientAuthenticationError,
ServiceRequestError,
@ -13,7 +12,6 @@ from azure.identity.aio import ClientSecretCredential
from django.db.models import Model
from django.http import HttpResponseBadRequest, HttpResponseNotFound
from kiota_abstractions.api_error import APIError
from kiota_abstractions.request_information import RequestInformation
from kiota_authentication_azure.azure_identity_authentication_provider import (
AzureIdentityAuthenticationProvider,
)
@ -23,15 +21,13 @@ from msgraph.generated.models.o_data_errors.o_data_error import ODataError
from msgraph.graph_request_adapter import GraphRequestAdapter, options
from msgraph.graph_service_client import GraphServiceClient
from msgraph_core import GraphClientFactory
from opentelemetry import trace
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
from authentik.events.utils import sanitize_item
from authentik.lib.sync.outgoing import HTTP_CONFLICT
from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
from authentik.lib.sync.outgoing.exceptions import (
BadRequestSyncException,
DryRunRejected,
NotFoundSyncException,
ObjectExistsSyncException,
StopSync,
@ -39,24 +35,20 @@ from authentik.lib.sync.outgoing.exceptions import (
)
class AuthentikRequestAdapter(GraphRequestAdapter):
def __init__(self, auth_provider, provider: MicrosoftEntraProvider, client=None):
super().__init__(auth_provider, client)
self._provider = provider
def get_request_adapter(
credentials: ClientSecretCredential, scopes: list[str] | None = None
) -> GraphRequestAdapter:
if scopes:
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials, scopes=scopes)
else:
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials)
async def get_http_response_message(
self,
request_info: RequestInformation,
parent_span: trace.Span,
claims: str = "",
) -> httpx.Response:
if self._provider.dry_run and request_info.http_method.value.upper() not in SAFE_METHODS:
raise DryRunRejected(
url=request_info.url,
method=request_info.http_method.value,
body=request_info.content.decode("utf-8"),
)
return await super().get_http_response_message(request_info, parent_span, claims=claims)
return GraphRequestAdapter(
auth_provider=auth_provider,
client=GraphClientFactory.create_with_default_middleware(
options=options, client=KiotaClientFactory.get_default_client()
),
)
class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict](
@ -71,27 +63,9 @@ class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict]
self.credentials = provider.microsoft_credentials()
self.__prefetch_domains()
def get_request_adapter(
self, credentials: ClientSecretCredential, scopes: list[str] | None = None
) -> AuthentikRequestAdapter:
if scopes:
auth_provider = AzureIdentityAuthenticationProvider(
credentials=credentials, scopes=scopes
)
else:
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials)
return AuthentikRequestAdapter(
auth_provider=auth_provider,
provider=self.provider,
client=GraphClientFactory.create_with_default_middleware(
options=options, client=KiotaClientFactory.get_default_client()
),
)
@property
def client(self):
return GraphServiceClient(request_adapter=self.get_request_adapter(**self.credentials))
return GraphServiceClient(request_adapter=get_request_adapter(**self.credentials))
def _request[T](self, request: Coroutine[Any, Any, T]) -> T:
try:

View File

@ -1,24 +0,0 @@
# Generated by Django 5.0.12 on 2025-02-24 19:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_providers_microsoft_entra",
"0002_microsoftentraprovidergroup_attributes_and_more",
),
]
operations = [
migrations.AddField(
model_name="microsoftentraprovider",
name="dry_run",
field=models.BooleanField(
default=False,
help_text="When enabled, provider will not modify or create objects in the remote system.",
),
),
]

View File

@ -32,6 +32,7 @@ class MicrosoftEntraUserTests(APITestCase):
@apply_blueprint("system/providers-microsoft-entra.yaml")
def setUp(self) -> None:
# Delete all users and groups as the mocked HTTP responses only return one ID
# which will cause errors with multiple users
Tenant.objects.update(avatars="none")
@ -96,38 +97,6 @@ class MicrosoftEntraUserTests(APITestCase):
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
user_create.assert_called_once()
def test_user_create_dry_run(self):
"""Test user creation (dry run)"""
self.provider.dry_run = True
self.provider.save()
uid = generate_id()
with (
patch(
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
MagicMock(return_value={"credentials": self.creds}),
),
patch(
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
AsyncMock(
return_value=OrganizationCollectionResponse(
value=[
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
]
)
),
),
):
user = User.objects.create(
username=uid,
name=f"{uid} {uid}",
email=f"{uid}@goauthentik.io",
)
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
provider=self.provider, user=user
).first()
self.assertIsNone(microsoft_user)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
def test_user_not_created(self):
"""Test without property mappings, no group is created"""
self.provider.property_mappings.clear()

View File

@ -6,12 +6,13 @@ from rest_framework.viewsets import GenericViewSet
from authentik.core.api.groups import GroupMemberSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.providers.rac.api.endpoints import EndpointSerializer
from authentik.providers.rac.api.providers import RACProviderSerializer
from authentik.providers.rac.models import ConnectionToken
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.providers.rac.api.endpoints import EndpointSerializer
from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer
from authentik.enterprise.providers.rac.models import ConnectionToken
class ConnectionTokenSerializer(ModelSerializer):
class ConnectionTokenSerializer(EnterpriseRequiredMixin, ModelSerializer):
"""ConnectionToken Serializer"""
provider_obj = RACProviderSerializer(source="provider", read_only=True)

View File

@ -14,9 +14,10 @@ from structlog.stdlib import get_logger
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import Provider
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer
from authentik.enterprise.providers.rac.models import Endpoint
from authentik.policies.engine import PolicyEngine
from authentik.providers.rac.api.providers import RACProviderSerializer
from authentik.providers.rac.models import Endpoint
from authentik.rbac.filters import ObjectFilter
LOGGER = get_logger()
@ -27,7 +28,7 @@ def user_endpoint_cache_key(user_pk: str) -> str:
return f"goauthentik.io/providers/rac/endpoint_access/{user_pk}"
class EndpointSerializer(ModelSerializer):
class EndpointSerializer(EnterpriseRequiredMixin, ModelSerializer):
"""Endpoint Serializer"""
provider_obj = RACProviderSerializer(source="provider", read_only=True)

View File

@ -10,7 +10,7 @@ from rest_framework.viewsets import ModelViewSet
from authentik.core.api.property_mappings import PropertyMappingSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import JSONDictField
from authentik.providers.rac.models import RACPropertyMapping
from authentik.enterprise.providers.rac.models import RACPropertyMapping
class RACPropertyMappingSerializer(PropertyMappingSerializer):

View File

@ -5,10 +5,11 @@ from rest_framework.viewsets import ModelViewSet
from authentik.core.api.providers import ProviderSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.providers.rac.models import RACProvider
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.providers.rac.models import RACProvider
class RACProviderSerializer(ProviderSerializer):
class RACProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
"""RACProvider Serializer"""
outpost_set = ListField(child=CharField(), read_only=True, source="outpost_set.all")

View File

@ -0,0 +1,14 @@
"""RAC app config"""
from authentik.enterprise.apps import EnterpriseConfig
class AuthentikEnterpriseProviderRAC(EnterpriseConfig):
"""authentik enterprise rac app config"""
name = "authentik.enterprise.providers.rac"
label = "authentik_providers_rac"
verbose_name = "authentik Enterprise.Providers.RAC"
default = True
mountpoint = ""
ws_mountpoint = "authentik.enterprise.providers.rac.urls"

View File

@ -7,22 +7,22 @@ from channels.generic.websocket import AsyncWebsocketConsumer
from django.http.request import QueryDict
from structlog.stdlib import BoundLogger, get_logger
from authentik.enterprise.providers.rac.models import ConnectionToken, RACProvider
from authentik.outposts.consumer import OUTPOST_GROUP_INSTANCE
from authentik.outposts.models import Outpost, OutpostState, OutpostType
from authentik.providers.rac.models import ConnectionToken, RACProvider
# Global broadcast group, which messages are sent to when the outpost connects back
# to authentik for a specific connection
# The `RACClientConsumer` consumer adds itself to this group on connection,
# and removes itself once it has been assigned a specific outpost channel
RAC_CLIENT_GROUP = "group_rac_client"
RAC_CLIENT_GROUP = "group_enterprise_rac_client"
# A group for all connections in a given authentik session ID
# A disconnect message is sent to this group when the session expires/is deleted
RAC_CLIENT_GROUP_SESSION = "group_rac_client_%(session)s"
RAC_CLIENT_GROUP_SESSION = "group_enterprise_rac_client_%(session)s"
# A group for all connections with a specific token, which in almost all cases
# is just one connection, however this is used to disconnect the connection
# when the token is deleted
RAC_CLIENT_GROUP_TOKEN = "group_rac_token_%(token)s" # nosec
RAC_CLIENT_GROUP_TOKEN = "group_enterprise_rac_token_%(token)s" # nosec
# Step 1: Client connects to this websocket endpoint
# Step 2: We prepare all the connection args for Guac

View File

@ -3,7 +3,7 @@
from channels.exceptions import ChannelFull
from channels.generic.websocket import AsyncWebsocketConsumer
from authentik.providers.rac.consumer_client import RAC_CLIENT_GROUP
from authentik.enterprise.providers.rac.consumer_client import RAC_CLIENT_GROUP
class RACOutpostConsumer(AsyncWebsocketConsumer):

View File

@ -74,7 +74,7 @@ class RACProvider(Provider):
@property
def serializer(self) -> type[Serializer]:
from authentik.providers.rac.api.providers import RACProviderSerializer
from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer
return RACProviderSerializer
@ -100,7 +100,7 @@ class Endpoint(SerializerModel, PolicyBindingModel):
@property
def serializer(self) -> type[Serializer]:
from authentik.providers.rac.api.endpoints import EndpointSerializer
from authentik.enterprise.providers.rac.api.endpoints import EndpointSerializer
return EndpointSerializer
@ -129,7 +129,7 @@ class RACPropertyMapping(PropertyMapping):
@property
def serializer(self) -> type[Serializer]:
from authentik.providers.rac.api.property_mappings import (
from authentik.enterprise.providers.rac.api.property_mappings import (
RACPropertyMappingSerializer,
)

View File

@ -4,17 +4,18 @@ from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from django.contrib.auth.signals import user_logged_out
from django.core.cache import cache
from django.db.models.signals import post_delete, post_save, pre_delete
from django.db.models import Model
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.http import HttpRequest
from authentik.core.models import User
from authentik.providers.rac.api.endpoints import user_endpoint_cache_key
from authentik.providers.rac.consumer_client import (
from authentik.enterprise.providers.rac.api.endpoints import user_endpoint_cache_key
from authentik.enterprise.providers.rac.consumer_client import (
RAC_CLIENT_GROUP_SESSION,
RAC_CLIENT_GROUP_TOKEN,
)
from authentik.providers.rac.models import ConnectionToken, Endpoint
from authentik.enterprise.providers.rac.models import ConnectionToken, Endpoint
@receiver(user_logged_out)
@ -45,8 +46,12 @@ def pre_delete_connection_token_disconnect(sender, instance: ConnectionToken, **
)
@receiver([post_save, post_delete], sender=Endpoint)
def post_save_post_delete_endpoint(**_):
"""Clear user's endpoint cache upon endpoint creation or deletion"""
@receiver(post_save, sender=Endpoint)
def post_save_endpoint(sender: type[Model], instance, created: bool, **_):
"""Clear user's endpoint cache upon endpoint creation"""
if not created: # pragma: no cover
return
# Delete user endpoint cache
keys = cache.keys(user_endpoint_cache_key("*"))
cache.delete_many(keys)

View File

@ -3,7 +3,7 @@
{% load authentik_core %}
{% block head %}
<script src="{% versioned_script 'dist/rac/index-%v.js' %}" type="module"></script>
<script src="{% versioned_script 'dist/enterprise/rac/index-%v.js' %}" type="module"></script>
<meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)">
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)">
<link rel="icon" href="{{ tenant.branding_favicon_url }}">

View File

@ -1,9 +1,16 @@
"""Test RAC Provider"""
from datetime import timedelta
from time import mktime
from unittest.mock import MagicMock, patch
from django.urls import reverse
from django.utils.timezone import now
from rest_framework.test import APITestCase
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.enterprise.license import LicenseKey
from authentik.enterprise.models import License
from authentik.lib.generators import generate_id
@ -13,8 +20,21 @@ class TestAPI(APITestCase):
def setUp(self) -> None:
self.user = create_test_admin_user()
@patch(
"authentik.enterprise.license.LicenseKey.validate",
MagicMock(
return_value=LicenseKey(
aud="",
exp=int(mktime((now() + timedelta(days=3000)).timetuple())),
name=generate_id(),
internal_users=100,
external_users=100,
)
),
)
def test_create(self):
"""Test creation of RAC Provider"""
License.objects.create(key=generate_id())
self.client.force_login(self.user)
response = self.client.post(
reverse("authentik_api:racprovider-list"),

View File

@ -5,10 +5,10 @@ from rest_framework.test import APITestCase
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_admin_user
from authentik.enterprise.providers.rac.models import Endpoint, Protocols, RACProvider
from authentik.lib.generators import generate_id
from authentik.policies.dummy.models import DummyPolicy
from authentik.policies.models import PolicyBinding
from authentik.providers.rac.models import Endpoint, Protocols, RACProvider
class TestEndpointsAPI(APITestCase):

View File

@ -4,14 +4,14 @@ from django.test import TransactionTestCase
from authentik.core.models import Application, AuthenticatedSession
from authentik.core.tests.utils import create_test_admin_user
from authentik.lib.generators import generate_id
from authentik.providers.rac.models import (
from authentik.enterprise.providers.rac.models import (
ConnectionToken,
Endpoint,
Protocols,
RACPropertyMapping,
RACProvider,
)
from authentik.lib.generators import generate_id
class TestModels(TransactionTestCase):

View File

@ -1,17 +1,23 @@
"""RAC Views tests"""
from datetime import timedelta
from json import loads
from time import mktime
from unittest.mock import MagicMock, patch
from django.urls import reverse
from django.utils.timezone import now
from rest_framework.test import APITestCase
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.enterprise.license import LicenseKey
from authentik.enterprise.models import License
from authentik.enterprise.providers.rac.models import Endpoint, Protocols, RACProvider
from authentik.lib.generators import generate_id
from authentik.policies.denied import AccessDeniedResponse
from authentik.policies.dummy.models import DummyPolicy
from authentik.policies.models import PolicyBinding
from authentik.providers.rac.models import Endpoint, Protocols, RACProvider
class TestRACViews(APITestCase):
@ -33,8 +39,21 @@ class TestRACViews(APITestCase):
provider=self.provider,
)
@patch(
"authentik.enterprise.license.LicenseKey.validate",
MagicMock(
return_value=LicenseKey(
aud="",
exp=int(mktime((now() + timedelta(days=3000)).timetuple())),
name=generate_id(),
internal_users=100,
external_users=100,
)
),
)
def test_no_policy(self):
"""Test request"""
License.objects.create(key=generate_id())
self.client.force_login(self.user)
response = self.client.get(
reverse(
@ -51,6 +70,18 @@ class TestRACViews(APITestCase):
final_response = self.client.get(next_url)
self.assertEqual(final_response.status_code, 200)
@patch(
"authentik.enterprise.license.LicenseKey.validate",
MagicMock(
return_value=LicenseKey(
aud="",
exp=int(mktime((now() + timedelta(days=3000)).timetuple())),
name=generate_id(),
internal_users=100,
external_users=100,
)
),
)
def test_app_deny(self):
"""Test request (deny on app level)"""
PolicyBinding.objects.create(
@ -58,6 +89,7 @@ class TestRACViews(APITestCase):
policy=DummyPolicy.objects.create(name="deny", result=False, wait_min=1, wait_max=2),
order=0,
)
License.objects.create(key=generate_id())
self.client.force_login(self.user)
response = self.client.get(
reverse(
@ -67,6 +99,18 @@ class TestRACViews(APITestCase):
)
self.assertIsInstance(response, AccessDeniedResponse)
@patch(
"authentik.enterprise.license.LicenseKey.validate",
MagicMock(
return_value=LicenseKey(
aud="",
exp=int(mktime((now() + timedelta(days=3000)).timetuple())),
name=generate_id(),
internal_users=100,
external_users=100,
)
),
)
def test_endpoint_deny(self):
"""Test request (deny on endpoint level)"""
PolicyBinding.objects.create(
@ -74,6 +118,7 @@ class TestRACViews(APITestCase):
policy=DummyPolicy.objects.create(name="deny", result=False, wait_min=1, wait_max=2),
order=0,
)
License.objects.create(key=generate_id())
self.client.force_login(self.user)
response = self.client.get(
reverse(

View File

@ -4,14 +4,14 @@ from channels.auth import AuthMiddleware
from channels.sessions import CookieMiddleware
from django.urls import path
from authentik.enterprise.providers.rac.api.connection_tokens import ConnectionTokenViewSet
from authentik.enterprise.providers.rac.api.endpoints import EndpointViewSet
from authentik.enterprise.providers.rac.api.property_mappings import RACPropertyMappingViewSet
from authentik.enterprise.providers.rac.api.providers import RACProviderViewSet
from authentik.enterprise.providers.rac.consumer_client import RACClientConsumer
from authentik.enterprise.providers.rac.consumer_outpost import RACOutpostConsumer
from authentik.enterprise.providers.rac.views import RACInterface, RACStartView
from authentik.outposts.channels import TokenOutpostMiddleware
from authentik.providers.rac.api.connection_tokens import ConnectionTokenViewSet
from authentik.providers.rac.api.endpoints import EndpointViewSet
from authentik.providers.rac.api.property_mappings import RACPropertyMappingViewSet
from authentik.providers.rac.api.providers import RACProviderViewSet
from authentik.providers.rac.consumer_client import RACClientConsumer
from authentik.providers.rac.consumer_outpost import RACOutpostConsumer
from authentik.providers.rac.views import RACInterface, RACStartView
from authentik.root.asgi_middleware import SessionMiddleware
from authentik.root.middleware import ChannelsLoggingMiddleware

View File

@ -10,6 +10,8 @@ from django.utils.translation import gettext as _
from authentik.core.models import Application, AuthenticatedSession
from authentik.core.views.interface import InterfaceView
from authentik.enterprise.policy import EnterprisePolicyAccessView
from authentik.enterprise.providers.rac.models import ConnectionToken, Endpoint, RACProvider
from authentik.events.models import Event, EventAction
from authentik.flows.challenge import RedirectChallenge
from authentik.flows.exceptions import FlowNonApplicableException
@ -18,11 +20,9 @@ from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, FlowPlanner
from authentik.flows.stage import RedirectStage
from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.engine import PolicyEngine
from authentik.policies.views import PolicyAccessView
from authentik.providers.rac.models import ConnectionToken, Endpoint, RACProvider
class RACStartView(PolicyAccessView):
class RACStartView(EnterprisePolicyAccessView):
"""Start a RAC connection by checking access and creating a connection token"""
endpoint: Endpoint
@ -46,7 +46,7 @@ class RACStartView(PolicyAccessView):
)
except FlowNonApplicableException:
raise Http404 from None
plan.append_stage(
plan.insert_stage(
in_memory_stage(
RACFinalStage,
application=self.application,

View File

@ -1,64 +0,0 @@
"""SSF Provider API Views"""
from django.urls import reverse
from rest_framework.fields import SerializerMethodField
from rest_framework.request import Request
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.providers import ProviderSerializer
from authentik.core.api.tokens import TokenSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.providers.ssf.models import SSFProvider
class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
"""SSFProvider Serializer"""
ssf_url = SerializerMethodField()
token_obj = TokenSerializer(source="token", required=False, read_only=True)
def get_ssf_url(self, instance: SSFProvider) -> str | None:
request: Request = self._context.get("request")
if not request:
return None
if not instance.backchannel_application:
return None
return request.build_absolute_uri(
reverse(
"authentik_providers_ssf:configuration",
kwargs={
"application_slug": instance.backchannel_application.slug,
},
)
)
class Meta:
model = SSFProvider
fields = [
"pk",
"name",
"component",
"verbose_name",
"verbose_name_plural",
"meta_model_name",
"signing_key",
"token_obj",
"oidc_auth_providers",
"ssf_url",
"event_retention",
]
extra_kwargs = {}
class SSFProviderViewSet(UsedByMixin, ModelViewSet):
"""SSFProvider Viewset"""
queryset = SSFProvider.objects.all()
serializer_class = SSFProviderSerializer
filterset_fields = {
"application": ["isnull"],
"name": ["iexact"],
}
search_fields = ["name"]
ordering = ["name"]

View File

@ -1,37 +0,0 @@
"""SSF Stream API Views"""
from rest_framework.viewsets import ReadOnlyModelViewSet
from authentik.core.api.utils import ModelSerializer
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
from authentik.enterprise.providers.ssf.models import Stream
class SSFStreamSerializer(ModelSerializer):
"""SSFStream Serializer"""
provider_obj = SSFProviderSerializer(source="provider", read_only=True)
class Meta:
model = Stream
fields = [
"pk",
"provider",
"provider_obj",
"delivery_method",
"endpoint_url",
"events_requested",
"format",
"aud",
"iss",
]
class SSFStreamViewSet(ReadOnlyModelViewSet):
"""SSFStream Viewset"""
queryset = Stream.objects.all()
serializer_class = SSFStreamSerializer
filterset_fields = ["provider", "endpoint_url", "delivery_method"]
search_fields = ["provider__name", "endpoint_url"]
ordering = ["provider", "uuid"]

View File

@ -1,13 +0,0 @@
"""SSF app config"""
from authentik.enterprise.apps import EnterpriseConfig
class AuthentikEnterpriseProviderSSF(EnterpriseConfig):
"""authentik enterprise ssf app config"""
name = "authentik.enterprise.providers.ssf"
label = "authentik_providers_ssf"
verbose_name = "authentik Enterprise.Providers.SSF"
default = True
mountpoint = ""

View File

@ -1,201 +0,0 @@
# Generated by Django 5.0.11 on 2025-02-05 16:20
import authentik.lib.utils.time
import django.contrib.postgres.fields
import django.db.models.deletion
import uuid
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
("authentik_crypto", "0004_alter_certificatekeypair_name"),
("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"),
]
operations = [
migrations.CreateModel(
name="SSFProvider",
fields=[
(
"provider_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_core.provider",
),
),
(
"event_retention",
models.TextField(
default="days=30",
validators=[authentik.lib.utils.time.timedelta_string_validator],
),
),
(
"oidc_auth_providers",
models.ManyToManyField(
blank=True, default=None, to="authentik_providers_oauth2.oauth2provider"
),
),
(
"signing_key",
models.ForeignKey(
help_text="Key used to sign the SSF Events.",
on_delete=django.db.models.deletion.CASCADE,
to="authentik_crypto.certificatekeypair",
verbose_name="Signing Key",
),
),
(
"token",
models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="authentik_core.token",
),
),
],
options={
"verbose_name": "Shared Signals Framework Provider",
"verbose_name_plural": "Shared Signals Framework Providers",
"permissions": [("add_stream", "Add stream to SSF provider")],
},
bases=("authentik_core.provider",),
),
migrations.CreateModel(
name="Stream",
fields=[
(
"uuid",
models.UUIDField(
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
),
),
(
"delivery_method",
models.TextField(
choices=[
(
"https://schemas.openid.net/secevent/risc/delivery-method/push",
"Risc Push",
),
(
"https://schemas.openid.net/secevent/risc/delivery-method/poll",
"Risc Poll",
),
]
),
),
("endpoint_url", models.TextField(null=True)),
(
"events_requested",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(
choices=[
(
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
"Caep Session Revoked",
),
(
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"Caep Credential Change",
),
(
"https://schemas.openid.net/secevent/ssf/event-type/verification",
"Set Verification",
),
]
),
default=list,
size=None,
),
),
("format", models.TextField()),
(
"aud",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(), default=list, size=None
),
),
("iss", models.TextField()),
(
"provider",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="authentik_providers_ssf.ssfprovider",
),
),
],
options={
"verbose_name": "SSF Stream",
"verbose_name_plural": "SSF Streams",
"default_permissions": ["change", "delete", "view"],
},
),
migrations.CreateModel(
name="StreamEvent",
fields=[
("created", models.DateTimeField(auto_now_add=True)),
("last_updated", models.DateTimeField(auto_now=True)),
("expires", models.DateTimeField(default=None, null=True)),
("expiring", models.BooleanField(default=True)),
(
"uuid",
models.UUIDField(
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
),
),
(
"status",
models.TextField(
choices=[
("pending_new", "Pending New"),
("pending_failed", "Pending Failed"),
("sent", "Sent"),
]
),
),
(
"type",
models.TextField(
choices=[
(
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
"Caep Session Revoked",
),
(
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"Caep Credential Change",
),
(
"https://schemas.openid.net/secevent/ssf/event-type/verification",
"Set Verification",
),
]
),
),
("payload", models.JSONField(default=dict)),
(
"stream",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="authentik_providers_ssf.stream",
),
),
],
options={
"verbose_name": "SSF Stream Event",
"verbose_name_plural": "SSF Stream Events",
"ordering": ("-created",),
},
),
]

View File

@ -1,178 +0,0 @@
from datetime import datetime
from functools import cached_property
from uuid import uuid4
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.templatetags.static import static
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _
from jwt import encode
from authentik.core.models import BackchannelProvider, ExpiringModel, Token
from authentik.crypto.models import CertificateKeyPair
from authentik.lib.models import CreatedUpdatedModel
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
from authentik.providers.oauth2.models import JWTAlgorithms, OAuth2Provider
class EventTypes(models.TextChoices):
"""SSF Event types supported by authentik"""
CAEP_SESSION_REVOKED = "https://schemas.openid.net/secevent/caep/event-type/session-revoked"
CAEP_CREDENTIAL_CHANGE = "https://schemas.openid.net/secevent/caep/event-type/credential-change"
SET_VERIFICATION = "https://schemas.openid.net/secevent/ssf/event-type/verification"
class DeliveryMethods(models.TextChoices):
"""SSF Delivery methods"""
RISC_PUSH = "https://schemas.openid.net/secevent/risc/delivery-method/push"
RISC_POLL = "https://schemas.openid.net/secevent/risc/delivery-method/poll"
class SSFEventStatus(models.TextChoices):
"""SSF Event status"""
PENDING_NEW = "pending_new"
PENDING_FAILED = "pending_failed"
SENT = "sent"
class SSFProvider(BackchannelProvider):
"""Shared Signals Framework provider to allow applications to
receive user events from authentik."""
signing_key = models.ForeignKey(
CertificateKeyPair,
verbose_name=_("Signing Key"),
on_delete=models.CASCADE,
help_text=_("Key used to sign the SSF Events."),
)
oidc_auth_providers = models.ManyToManyField(OAuth2Provider, blank=True, default=None)
token = models.ForeignKey(Token, on_delete=models.CASCADE, null=True, default=None)
event_retention = models.TextField(
default="days=30",
validators=[timedelta_string_validator],
)
@cached_property
def jwt_key(self) -> tuple[PrivateKeyTypes, str]:
"""Get either the configured certificate or the client secret"""
key: CertificateKeyPair = self.signing_key
private_key = key.private_key
if isinstance(private_key, RSAPrivateKey):
return private_key, JWTAlgorithms.RS256
if isinstance(private_key, EllipticCurvePrivateKey):
return private_key, JWTAlgorithms.ES256
raise ValueError(f"Invalid private key type: {type(private_key)}")
@property
def service_account_identifier(self) -> str:
return f"ak-providers-ssf-{self.pk}"
@property
def serializer(self):
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
return SSFProviderSerializer
@property
def icon_url(self) -> str | None:
return static("authentik/sources/ssf.svg")
@property
def component(self) -> str:
return "ak-provider-ssf-form"
class Meta:
verbose_name = _("Shared Signals Framework Provider")
verbose_name_plural = _("Shared Signals Framework Providers")
permissions = [
# This overrides the default "add_stream" permission of the Stream object,
# as the user requesting to add a stream must have the permission on the provider
("add_stream", _("Add stream to SSF provider")),
]
class Stream(models.Model):
"""SSF Stream"""
uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False)
provider = models.ForeignKey(SSFProvider, on_delete=models.CASCADE)
delivery_method = models.TextField(choices=DeliveryMethods.choices)
endpoint_url = models.TextField(null=True)
events_requested = ArrayField(models.TextField(choices=EventTypes.choices), default=list)
format = models.TextField()
aud = ArrayField(models.TextField(), default=list)
iss = models.TextField()
class Meta:
verbose_name = _("SSF Stream")
verbose_name_plural = _("SSF Streams")
default_permissions = ["change", "delete", "view"]
def __str__(self) -> str:
return "SSF Stream"
def prepare_event_payload(self, type: EventTypes, event_data: dict, **kwargs) -> dict:
jti = uuid4()
_now = now()
return {
"uuid": jti,
"stream_id": str(self.pk),
"type": type,
"expiring": True,
"status": SSFEventStatus.PENDING_NEW,
"expires": _now + timedelta_from_string(self.provider.event_retention),
"payload": {
"jti": jti.hex,
"aud": self.aud,
"iat": int(datetime.now().timestamp()),
"iss": self.iss,
"events": {type: event_data},
**kwargs,
},
}
def encode(self, data: dict) -> str:
headers = {}
if self.provider.signing_key:
headers["kid"] = self.provider.signing_key.kid
key, alg = self.provider.jwt_key
return encode(data, key, algorithm=alg, headers=headers)
class StreamEvent(CreatedUpdatedModel, ExpiringModel):
"""Single stream event to be sent"""
uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False)
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
status = models.TextField(choices=SSFEventStatus.choices)
type = models.TextField(choices=EventTypes.choices)
payload = models.JSONField(default=dict)
def expire_action(self, *args, **kwargs):
"""Only allow automatic cleanup of successfully sent event"""
if self.status != SSFEventStatus.SENT:
return
return super().expire_action(*args, **kwargs)
def __str__(self):
return f"Stream event {self.type}"
class Meta:
verbose_name = _("SSF Stream Event")
verbose_name_plural = _("SSF Stream Events")
ordering = ("-created",)

View File

@ -1,193 +0,0 @@
from hashlib import sha256
from django.contrib.auth.signals import user_logged_out
from django.db.models import Model
from django.db.models.signals import post_delete, post_save, pre_delete
from django.dispatch import receiver
from django.http.request import HttpRequest
from guardian.shortcuts import assign_perm
from authentik.core.models import (
USER_PATH_SYSTEM_PREFIX,
AuthenticatedSession,
Token,
TokenIntents,
User,
UserTypes,
)
from authentik.core.signals import password_changed
from authentik.enterprise.providers.ssf.models import (
EventTypes,
SSFProvider,
)
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
from authentik.events.middleware import audit_ignore
from authentik.stages.authenticator.models import Device
from authentik.stages.authenticator_duo.models import DuoDevice
from authentik.stages.authenticator_static.models import StaticDevice
from authentik.stages.authenticator_totp.models import TOTPDevice
from authentik.stages.authenticator_webauthn.models import (
UNKNOWN_DEVICE_TYPE_AAGUID,
WebAuthnDevice,
)
USER_PATH_PROVIDERS_SSF = USER_PATH_SYSTEM_PREFIX + "/providers/ssf"
@receiver(post_save, sender=SSFProvider)
def ssf_providers_post_save(sender: type[Model], instance: SSFProvider, created: bool, **_):
"""Create service account before provider is saved"""
identifier = instance.service_account_identifier
user, _ = User.objects.update_or_create(
username=identifier,
defaults={
"name": f"SSF Provider {instance.name} Service-Account",
"type": UserTypes.INTERNAL_SERVICE_ACCOUNT,
"path": USER_PATH_PROVIDERS_SSF,
},
)
assign_perm("add_stream", user, instance)
token, token_created = Token.objects.update_or_create(
identifier=identifier,
defaults={
"user": user,
"intent": TokenIntents.INTENT_API,
"expiring": False,
"managed": f"goauthentik.io/providers/ssf/{instance.pk}",
},
)
if created or token_created:
with audit_ignore():
instance.token = token
instance.save()
@receiver(user_logged_out)
def ssf_user_logged_out_session_revoked(sender, request: HttpRequest, user: User, **_):
"""Session revoked trigger (user logged out)"""
if not request.session or not request.session.session_key or not user:
return
send_ssf_event(
EventTypes.CAEP_SESSION_REVOKED,
{
"initiating_entity": "user",
},
sub_id={
"format": "complex",
"session": {
"format": "opaque",
"id": sha256(request.session.session_key.encode("ascii")).hexdigest(),
},
"user": {
"format": "email",
"email": user.email,
},
},
request=request,
)
@receiver(pre_delete, sender=AuthenticatedSession)
def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSession, **_):
"""Session revoked trigger (users' session has been deleted)
As this signal is also triggered with a regular logout, we can't be sure
if the session has been deleted by an admin or by the user themselves."""
send_ssf_event(
EventTypes.CAEP_SESSION_REVOKED,
{
"initiating_entity": "user",
},
sub_id={
"format": "complex",
"session": {
"format": "opaque",
"id": sha256(instance.session_key.encode("ascii")).hexdigest(),
},
"user": {
"format": "email",
"email": instance.user.email,
},
},
)
@receiver(password_changed)
def ssf_password_changed_cred_change(sender, user: User, password: str | None, **_):
"""Credential change trigger (password changed)"""
send_ssf_event(
EventTypes.CAEP_CREDENTIAL_CHANGE,
{
"credential_type": "password",
"change_type": "revoke" if password is None else "update",
},
sub_id={
"format": "complex",
"user": {
"format": "email",
"email": user.email,
},
},
)
device_type_map = {
StaticDevice: "pin",
TOTPDevice: "pin",
WebAuthnDevice: "fido-u2f",
DuoDevice: "app",
}
@receiver(post_save)
def ssf_device_post_save(sender: type[Model], instance: Device, created: bool, **_):
if not isinstance(instance, Device):
return
if not instance.confirmed:
return
device_type = device_type_map.get(instance.__class__)
data = {
"credential_type": device_type,
"change_type": "create" if created else "update",
"friendly_name": instance.name,
}
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
data["fido2_aaguid"] = instance.aaguid
send_ssf_event(
EventTypes.CAEP_CREDENTIAL_CHANGE,
data,
sub_id={
"format": "complex",
"user": {
"format": "email",
"email": instance.user.email,
},
},
)
@receiver(post_delete)
def ssf_device_post_delete(sender: type[Model], instance: Device, **_):
if not isinstance(instance, Device):
return
if not instance.confirmed:
return
device_type = device_type_map.get(instance.__class__)
data = {
"credential_type": device_type,
"change_type": "delete",
"friendly_name": instance.name,
}
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
data["fido2_aaguid"] = instance.aaguid
send_ssf_event(
EventTypes.CAEP_CREDENTIAL_CHANGE,
data,
sub_id={
"format": "complex",
"user": {
"format": "email",
"email": instance.user.email,
},
},
)

View File

@ -1,136 +0,0 @@
from celery import group
from django.http import HttpRequest
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _
from requests.exceptions import RequestException
from structlog.stdlib import get_logger
from authentik.core.models import User
from authentik.enterprise.providers.ssf.models import (
DeliveryMethods,
EventTypes,
SSFEventStatus,
Stream,
StreamEvent,
)
from authentik.events.logs import LogEvent
from authentik.events.models import TaskStatus
from authentik.events.system_tasks import SystemTask
from authentik.lib.utils.http import get_http_session
from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.engine import PolicyEngine
from authentik.root.celery import CELERY_APP
session = get_http_session()
LOGGER = get_logger()
def send_ssf_event(
event_type: EventTypes,
data: dict,
stream_filter: dict | None = None,
request: HttpRequest | None = None,
**extra_data,
):
"""Wrapper to send an SSF event to multiple streams"""
payload = []
if not stream_filter:
stream_filter = {}
stream_filter["events_requested__contains"] = [event_type]
if request and hasattr(request, "request_id"):
extra_data.setdefault("txn", request.request_id)
for stream in Stream.objects.filter(**stream_filter):
event_data = stream.prepare_event_payload(event_type, data, **extra_data)
payload.append((str(stream.uuid), event_data))
return _send_ssf_event.delay(payload)
def _check_app_access(stream_uuid: str, event_data: dict) -> bool:
"""Check if event is related to user and if so, check
if the user has access to the application"""
stream = Stream.objects.filter(pk=stream_uuid).first()
if not stream:
return False
# `event_data` is a dict version of a StreamEvent
sub_id = event_data.get("payload", {}).get("sub_id", {})
email = sub_id.get("user", {}).get("email", None)
if not email:
return True
user = User.objects.filter(email=email).first()
if not user:
return True
engine = PolicyEngine(stream.provider.backchannel_application, user)
engine.use_cache = False
engine.build()
return engine.passing
@CELERY_APP.task()
def _send_ssf_event(event_data: list[tuple[str, dict]]):
tasks = []
for stream, data in event_data:
if not _check_app_access(stream, data):
continue
event = StreamEvent.objects.create(**data)
tasks.extend(send_single_ssf_event(stream, str(event.uuid)))
main_task = group(*tasks)
main_task()
def send_single_ssf_event(stream_id: str, evt_id: str):
stream = Stream.objects.filter(pk=stream_id).first()
if not stream:
return
event = StreamEvent.objects.filter(pk=evt_id).first()
if not event:
return
if event.status == SSFEventStatus.SENT:
return
if stream.delivery_method == DeliveryMethods.RISC_PUSH:
return [ssf_push_event.si(str(event.pk))]
return []
@CELERY_APP.task(bind=True, base=SystemTask)
def ssf_push_event(self: SystemTask, event_id: str):
self.save_on_success = False
event = StreamEvent.objects.filter(pk=event_id).first()
if not event:
return
self.set_uid(event_id)
if event.status == SSFEventStatus.SENT:
self.set_status(TaskStatus.SUCCESSFUL)
return
try:
response = session.post(
event.stream.endpoint_url,
data=event.stream.encode(event.payload),
headers={"Content-Type": "application/secevent+jwt", "Accept": "application/json"},
)
response.raise_for_status()
event.status = SSFEventStatus.SENT
event.save()
self.set_status(TaskStatus.SUCCESSFUL)
return
except RequestException as exc:
LOGGER.warning("Failed to send SSF event", exc=exc)
self.set_status(TaskStatus.ERROR)
attrs = {}
if exc.response:
attrs["response"] = {
"content": exc.response.text,
"status": exc.response.status_code,
}
self.set_error(
exc,
LogEvent(
_("Failed to send request"),
log_level="warning",
logger=self.__name__,
attributes=attrs,
),
)
# Re-up the expiry of the stream event
event.expires = now() + timedelta_from_string(event.stream.provider.event_retention)
event.status = SSFEventStatus.PENDING_FAILED
event.save()

View File

@ -1,46 +0,0 @@
import json
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_cert
from authentik.enterprise.providers.ssf.models import (
SSFProvider,
)
from authentik.lib.generators import generate_id
class TestConfiguration(APITestCase):
def setUp(self):
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
self.provider = SSFProvider.objects.create(
name=generate_id(),
signing_key=create_test_cert(),
backchannel_application=self.application,
)
def test_config_fetch(self):
"""test SSF configuration (unauthenticated)"""
res = self.client.get(
reverse(
"authentik_providers_ssf:configuration",
kwargs={"application_slug": self.application.slug},
),
)
self.assertEqual(res.status_code, 200)
content = json.loads(res.content)
self.assertEqual(content["spec_version"], "1_0-ID2")
def test_config_fetch_authenticated(self):
"""test SSF configuration (authenticated)"""
res = self.client.get(
reverse(
"authentik_providers_ssf:configuration",
kwargs={"application_slug": self.application.slug},
),
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 200)
content = json.loads(res.content)
self.assertEqual(content["spec_version"], "1_0-ID2")

View File

@ -1,51 +0,0 @@
"""JWKS tests"""
import base64
import json
from cryptography.hazmat.backends import default_backend
from cryptography.x509 import load_der_x509_certificate
from django.test import TestCase
from django.urls.base import reverse
from jwt import PyJWKSet
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_cert
from authentik.enterprise.providers.ssf.models import SSFProvider
from authentik.lib.generators import generate_id
class TestJWKS(TestCase):
"""Test JWKS view"""
def test_rs256(self):
"""Test JWKS request with RS256"""
provider = SSFProvider.objects.create(
name=generate_id(),
signing_key=create_test_cert(),
)
app = Application.objects.create(name=generate_id(), slug=generate_id())
app.backchannel_providers.add(provider)
response = self.client.get(
reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug})
)
body = json.loads(response.content.decode())
self.assertEqual(len(body["keys"]), 1)
PyJWKSet.from_dict(body)
key = body["keys"][0]
load_der_x509_certificate(base64.b64decode(key["x5c"][0]), default_backend()).public_key()
def test_es256(self):
"""Test JWKS request with ES256"""
provider = SSFProvider.objects.create(
name=generate_id(),
signing_key=create_test_cert(),
)
app = Application.objects.create(name=generate_id(), slug=generate_id())
app.backchannel_providers.add(provider)
response = self.client.get(
reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug})
)
body = json.loads(response.content.decode())
self.assertEqual(len(body["keys"]), 1)
PyJWKSet.from_dict(body)

View File

@ -1,168 +0,0 @@
from uuid import uuid4
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.models import Application, Group
from authentik.core.tests.utils import (
create_test_cert,
create_test_user,
)
from authentik.enterprise.providers.ssf.models import (
EventTypes,
SSFEventStatus,
SSFProvider,
Stream,
StreamEvent,
)
from authentik.lib.generators import generate_id
from authentik.policies.models import PolicyBinding
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
class TestSignals(APITestCase):
"""Test individual SSF Signals"""
def setUp(self):
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
self.provider = SSFProvider.objects.create(
name=generate_id(),
signing_key=create_test_cert(),
backchannel_application=self.application,
)
res = self.client.post(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
data={
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
"aud": ["https://app.authentik.company"],
"delivery": {
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
"endpoint_url": "https://app.authentik.company",
},
"events_requested": [
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
],
"format": "iss_sub",
},
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 201, res.content)
def test_signal_logout(self):
"""Test user logout"""
user = create_test_user()
self.client.force_login(user)
self.client.logout()
stream = Stream.objects.filter(provider=self.provider).first()
self.assertIsNotNone(stream)
event = StreamEvent.objects.filter(stream=stream).first()
self.assertIsNotNone(event)
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
event_payload = event.payload["events"][
"https://schemas.openid.net/secevent/caep/event-type/session-revoked"
]
self.assertEqual(event_payload["initiating_entity"], "user")
self.assertEqual(event.payload["sub_id"]["format"], "complex")
self.assertEqual(event.payload["sub_id"]["session"]["format"], "opaque")
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
def test_signal_password_change(self):
"""Test user password change"""
user = create_test_user()
self.client.force_login(user)
user.set_password(generate_id())
user.save()
stream = Stream.objects.filter(provider=self.provider).first()
self.assertIsNotNone(stream)
event = StreamEvent.objects.filter(stream=stream).first()
self.assertIsNotNone(event)
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
event_payload = event.payload["events"][
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
]
self.assertEqual(event_payload["change_type"], "update")
self.assertEqual(event_payload["credential_type"], "password")
self.assertEqual(event.payload["sub_id"]["format"], "complex")
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
def test_signal_authenticator_added(self):
"""Test authenticator creation signal"""
user = create_test_user()
self.client.force_login(user)
dev = WebAuthnDevice.objects.create(
user=user,
name=generate_id(),
credential_id=generate_id(),
public_key=generate_id(),
aaguid=str(uuid4()),
)
stream = Stream.objects.filter(provider=self.provider).first()
self.assertIsNotNone(stream)
event = StreamEvent.objects.filter(stream=stream).exclude().first()
self.assertIsNotNone(event)
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
event_payload = event.payload["events"][
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
]
self.assertEqual(event_payload["change_type"], "create")
self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid)
self.assertEqual(event_payload["friendly_name"], dev.name)
self.assertEqual(event_payload["credential_type"], "fido-u2f")
self.assertEqual(event.payload["sub_id"]["format"], "complex")
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
def test_signal_authenticator_deleted(self):
"""Test authenticator deletion signal"""
user = create_test_user()
self.client.force_login(user)
dev = WebAuthnDevice.objects.create(
user=user,
name=generate_id(),
credential_id=generate_id(),
public_key=generate_id(),
aaguid=str(uuid4()),
)
dev.delete()
stream = Stream.objects.filter(provider=self.provider).first()
self.assertIsNotNone(stream)
event = StreamEvent.objects.filter(stream=stream).exclude().first()
self.assertIsNotNone(event)
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
event_payload = event.payload["events"][
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
]
self.assertEqual(event_payload["change_type"], "delete")
self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid)
self.assertEqual(event_payload["friendly_name"], dev.name)
self.assertEqual(event_payload["credential_type"], "fido-u2f")
self.assertEqual(event.payload["sub_id"]["format"], "complex")
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
def test_signal_policy_ignore(self):
"""Test event not being created for user that doesn't have access to the application"""
PolicyBinding.objects.create(
target=self.application, group=Group.objects.create(name=generate_id()), order=0
)
user = create_test_user()
self.client.force_login(user)
user.set_password(generate_id())
user.save()
stream = Stream.objects.filter(provider=self.provider).first()
self.assertIsNotNone(stream)
event = StreamEvent.objects.filter(
stream=stream, type=EventTypes.CAEP_CREDENTIAL_CHANGE
).first()
self.assertIsNone(event)

View File

@ -1,154 +0,0 @@
import json
from dataclasses import asdict
from django.urls import reverse
from django.utils import timezone
from rest_framework.test import APITestCase
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
from authentik.enterprise.providers.ssf.models import (
SSFEventStatus,
SSFProvider,
Stream,
StreamEvent,
)
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.id_token import IDToken
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
class TestStream(APITestCase):
def setUp(self):
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
self.provider = SSFProvider.objects.create(
name=generate_id(),
signing_key=create_test_cert(),
backchannel_application=self.application,
)
def test_stream_add_token(self):
"""test stream add (token auth)"""
res = self.client.post(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
data={
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
"aud": ["https://app.authentik.company"],
"delivery": {
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
"endpoint_url": "https://app.authentik.company",
},
"events_requested": [
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
],
"format": "iss_sub",
},
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 201)
stream = Stream.objects.filter(provider=self.provider).first()
self.assertIsNotNone(stream)
event = StreamEvent.objects.filter(stream=stream).first()
self.assertIsNotNone(event)
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
self.assertEqual(
event.payload["events"],
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
)
def test_stream_add_poll(self):
"""test stream add - poll method"""
res = self.client.post(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
data={
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
"aud": ["https://app.authentik.company"],
"delivery": {
"method": "https://schemas.openid.net/secevent/risc/delivery-method/poll",
},
"events_requested": [
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
],
"format": "iss_sub",
},
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(
res.content,
{"delivery": {"method": ["Polling for SSF events is not currently supported."]}},
)
def test_stream_add_oidc(self):
"""test stream add (oidc auth)"""
provider = OAuth2Provider.objects.create(
name=generate_id(),
authorization_flow=create_test_flow(),
)
self.application.provider = provider
self.application.save()
user = create_test_admin_user()
token = AccessToken.objects.create(
provider=provider,
user=user,
token=generate_id(),
auth_time=timezone.now(),
_scope="openid user profile",
_id_token=json.dumps(
asdict(
IDToken("foo", "bar"),
)
),
)
res = self.client.post(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
data={
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
"aud": ["https://app.authentik.company"],
"delivery": {
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
"endpoint_url": "https://app.authentik.company",
},
"events_requested": [
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
],
"format": "iss_sub",
},
HTTP_AUTHORIZATION=f"Bearer {token.token}",
)
self.assertEqual(res.status_code, 201)
stream = Stream.objects.filter(provider=self.provider).first()
self.assertIsNotNone(stream)
event = StreamEvent.objects.filter(stream=stream).first()
self.assertIsNotNone(event)
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
self.assertEqual(
event.payload["events"],
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
)
def test_stream_delete(self):
"""delete stream"""
stream = Stream.objects.create(provider=self.provider)
res = self.client.delete(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 204)
self.assertFalse(Stream.objects.filter(pk=stream.pk).exists())

View File

@ -1,32 +0,0 @@
"""SSF provider URLs"""
from django.urls import path
from authentik.enterprise.providers.ssf.api.providers import SSFProviderViewSet
from authentik.enterprise.providers.ssf.api.streams import SSFStreamViewSet
from authentik.enterprise.providers.ssf.views.configuration import ConfigurationView
from authentik.enterprise.providers.ssf.views.jwks import JWKSview
from authentik.enterprise.providers.ssf.views.stream import StreamView
urlpatterns = [
path(
"application/ssf/<slug:application_slug>/ssf-jwks/",
JWKSview.as_view(),
name="jwks",
),
path(
".well-known/ssf-configuration/<slug:application_slug>",
ConfigurationView.as_view(),
name="configuration",
),
path(
"application/ssf/<slug:application_slug>/stream/",
StreamView.as_view(),
name="stream",
),
]
api_urlpatterns = [
("providers/ssf", SSFProviderViewSet),
("ssf/streams", SSFStreamViewSet),
]

View File

@ -1,66 +0,0 @@
"""SSF Token auth"""
from typing import TYPE_CHECKING, Any
from django.db.models import Q
from rest_framework.authentication import BaseAuthentication, get_authorization_header
from rest_framework.request import Request
from authentik.core.models import Token, TokenIntents, User
from authentik.enterprise.providers.ssf.models import SSFProvider
from authentik.providers.oauth2.models import AccessToken
if TYPE_CHECKING:
from authentik.enterprise.providers.ssf.views.base import SSFView
class SSFTokenAuth(BaseAuthentication):
"""SSF Token auth"""
view: "SSFView"
def __init__(self, view: "SSFView") -> None:
super().__init__()
self.view = view
def check_token(self, key: str) -> Token | None:
"""Check that a token exists, is not expired, and is assigned to the correct provider"""
token = Token.filter_not_expired(key=key, intent=TokenIntents.INTENT_API).first()
if not token:
return None
provider: SSFProvider = token.ssfprovider_set.first()
if not provider:
return None
self.view.application = provider.backchannel_application
self.view.provider = provider
return token
def check_jwt(self, jwt: str) -> AccessToken | None:
"""Check JWT-based authentication, this supports tokens issued either by providers
configured directly in the provider, and by providers assigned to the application
that the SSF provider is a backchannel provider of."""
token = AccessToken.filter_not_expired(token=jwt, revoked=False).first()
if not token:
return None
ssf_provider = SSFProvider.objects.filter(
Q(oidc_auth_providers__in=[token.provider])
| Q(backchannel_application__provider__in=[token.provider]),
).first()
if not ssf_provider:
return None
self.view.application = ssf_provider.backchannel_application
self.view.provider = ssf_provider
return token
def authenticate(self, request: Request) -> tuple[User, Any] | None:
auth = get_authorization_header(request).decode()
auth_type, _, key = auth.partition(" ")
if auth_type != "Bearer":
return None
token = self.check_token(key)
if token:
return (token.user, token)
jwt_token = self.check_jwt(key)
if jwt_token:
return (jwt_token.user, token)
return None

View File

@ -1,23 +0,0 @@
from django.http import HttpRequest
from rest_framework.permissions import IsAuthenticated
from rest_framework.views import APIView
from structlog.stdlib import BoundLogger, get_logger
from authentik.core.models import Application
from authentik.enterprise.providers.ssf.models import SSFProvider
from authentik.enterprise.providers.ssf.views.auth import SSFTokenAuth
class SSFView(APIView):
application: Application
provider: SSFProvider
logger: BoundLogger
permission_classes = [IsAuthenticated]
def setup(self, request: HttpRequest, *args, **kwargs) -> None:
self.logger = get_logger().bind()
super().setup(request, *args, **kwargs)
def get_authenticators(self):
return [SSFTokenAuth(self)]

View File

@ -1,55 +0,0 @@
from django.http import Http404, HttpRequest, HttpResponse, JsonResponse
from django.shortcuts import get_object_or_404
from django.urls import reverse
from rest_framework.permissions import AllowAny
from authentik.core.models import Application
from authentik.enterprise.providers.ssf.models import DeliveryMethods, SSFProvider
from authentik.enterprise.providers.ssf.views.base import SSFView
class ConfigurationView(SSFView):
"""SSF configuration endpoint"""
permission_classes = [AllowAny]
def get_authenticators(self):
return []
def get(self, request: HttpRequest, application_slug: str, *args, **kwargs) -> HttpResponse:
application = get_object_or_404(Application, slug=application_slug)
provider = application.backchannel_provider_for(SSFProvider)
if not provider:
raise Http404
data = {
"spec_version": "1_0-ID2",
"issuer": self.request.build_absolute_uri(
reverse(
"authentik_providers_ssf:configuration",
kwargs={
"application_slug": application.slug,
},
)
),
"jwks_uri": self.request.build_absolute_uri(
reverse(
"authentik_providers_ssf:jwks",
kwargs={
"application_slug": application.slug,
},
)
),
"configuration_endpoint": self.request.build_absolute_uri(
reverse(
"authentik_providers_ssf:stream",
kwargs={
"application_slug": application.slug,
},
)
),
"delivery_methods_supported": [
DeliveryMethods.RISC_PUSH,
],
"authorization_schemes": [{"spec_urn": "urn:ietf:rfc:6749"}],
}
return JsonResponse(data)

View File

@ -1,31 +0,0 @@
from django.http import Http404, HttpRequest, HttpResponse, JsonResponse
from django.shortcuts import get_object_or_404
from django.views import View
from authentik.core.models import Application
from authentik.crypto.models import CertificateKeyPair
from authentik.enterprise.providers.ssf.models import SSFProvider
from authentik.providers.oauth2.views.jwks import JWKSView as OAuthJWKSView
class JWKSview(View):
"""SSF JWKS endpoint, similar to the OAuth2 provider's endpoint"""
def get(self, request: HttpRequest, application_slug: str) -> HttpResponse:
"""Show JWK Key data for Provider"""
application = get_object_or_404(Application, slug=application_slug)
provider = application.backchannel_provider_for(SSFProvider)
if not provider:
raise Http404
signing_key: CertificateKeyPair = provider.signing_key
response_data = {}
jwk = OAuthJWKSView.get_jwk_for_key(signing_key, "sig")
if jwk:
response_data["keys"] = [jwk]
response = JsonResponse(response_data)
response["Access-Control-Allow-Origin"] = "*"
return response

View File

@ -1,130 +0,0 @@
from django.http import HttpRequest
from django.urls import reverse
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.fields import CharField, ChoiceField, ListField, SerializerMethodField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ModelSerializer
from structlog.stdlib import get_logger
from authentik.core.api.utils import PassiveSerializer
from authentik.enterprise.providers.ssf.models import (
DeliveryMethods,
EventTypes,
SSFProvider,
Stream,
)
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
from authentik.enterprise.providers.ssf.views.base import SSFView
LOGGER = get_logger()
class StreamDeliverySerializer(PassiveSerializer):
method = ChoiceField(choices=[(x.value, x.value) for x in DeliveryMethods])
endpoint_url = CharField(required=False)
def validate_method(self, method: DeliveryMethods):
"""Currently only push is supported"""
if method == DeliveryMethods.RISC_POLL:
raise ValidationError("Polling for SSF events is not currently supported.")
return method
def validate(self, attrs: dict) -> dict:
if attrs["method"] == DeliveryMethods.RISC_PUSH:
if not attrs.get("endpoint_url"):
raise ValidationError("Endpoint URL is required when using push.")
return attrs
class StreamSerializer(ModelSerializer):
delivery = StreamDeliverySerializer()
events_requested = ListField(
child=ChoiceField(choices=[(x.value, x.value) for x in EventTypes])
)
format = CharField()
aud = ListField(child=CharField())
def create(self, validated_data):
provider: SSFProvider = validated_data["provider"]
request: HttpRequest = self.context["request"]
iss = request.build_absolute_uri(
reverse(
"authentik_providers_ssf:configuration",
kwargs={
"application_slug": provider.backchannel_application.slug,
},
)
)
# Ensure that streams always get SET verification events sent to them
validated_data["events_requested"].append(EventTypes.SET_VERIFICATION)
return super().create(
{
"delivery_method": validated_data["delivery"]["method"],
"endpoint_url": validated_data["delivery"].get("endpoint_url"),
"format": validated_data["format"],
"provider": validated_data["provider"],
"events_requested": validated_data["events_requested"],
"aud": validated_data["aud"],
"iss": iss,
}
)
class Meta:
model = Stream
fields = [
"delivery",
"events_requested",
"format",
"aud",
]
class StreamResponseSerializer(PassiveSerializer):
stream_id = CharField(source="pk")
iss = CharField()
aud = ListField(child=CharField())
delivery = SerializerMethodField()
format = CharField()
events_requested = ListField(child=CharField())
events_supported = SerializerMethodField()
events_delivered = ListField(child=CharField(), source="events_requested")
def get_delivery(self, instance: Stream) -> StreamDeliverySerializer:
return {
"method": instance.delivery_method,
"endpoint_url": instance.endpoint_url,
}
def get_events_supported(self, instance: Stream) -> list[str]:
return [x.value for x in EventTypes]
class StreamView(SSFView):
def post(self, request: Request, *args, **kwargs) -> Response:
stream = StreamSerializer(data=request.data, context={"request": request})
stream.is_valid(raise_exception=True)
if not request.user.has_perm("authentik_providers_ssf.add_stream", self.provider):
raise PermissionDenied(
"User does not have permission to create stream for this provider."
)
instance: Stream = stream.save(provider=self.provider)
send_ssf_event(
EventTypes.SET_VERIFICATION,
{
"state": None,
},
stream_filter={"pk": instance.uuid},
sub_id={"format": "opaque", "id": str(instance.uuid)},
)
response = StreamResponseSerializer(instance=instance, context={"request": request}).data
return Response(response, status=201)
def delete(self, request: Request, *args, **kwargs) -> Response:
streams = Stream.objects.filter(provider=self.provider)
# Technically this parameter is required by the spec...
if "stream_id" in request.query_params:
streams = streams.filter(stream_id=request.query_params["stream_id"])
streams.delete()
return Response(status=204)

View File

@ -16,7 +16,7 @@ TENANT_APPS = [
"authentik.enterprise.audit",
"authentik.enterprise.providers.google_workspace",
"authentik.enterprise.providers.microsoft_entra",
"authentik.enterprise.providers.ssf",
"authentik.enterprise.providers.rac",
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
"authentik.enterprise.stages.source",
]

Some files were not shown because too many files have changed in this diff Show More