Compare commits

..

10 Commits

Author SHA1 Message Date
7d40e00263 root: deny unauthenticated websocket messages consumer
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-02-27 20:31:32 +01:00
42501f6d1e only send messages for stuff non-redirecting
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-02-27 17:46:17 +01:00
2759b1c089 gen api for translate
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2025-02-27 17:32:37 +01:00
ce6d76babe fix-tests
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-02-27 17:28:29 +01:00
5cc2bd5b36 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-02-27 17:10:33 +01:00
bad8a8ead5 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-02-27 17:08:46 +01:00
1f7a2d5194 I WROTE JS AND IT WORKED FIRST TIME
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-02-27 16:33:58 +01:00
5e328403d6 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-02-27 15:31:30 +01:00
f03e56af93 Merge branch 'main' into flow-no-websocket 2025-02-27 14:50:24 +01:00
516aa9d9b1 web/flow: remove websocket connection
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-02-27 14:49:22 +01:00
668 changed files with 13191 additions and 56490 deletions

View File

@ -1,5 +1,5 @@
[bumpversion] [bumpversion]
current_version = 2025.2.4 current_version = 2025.2.1
tag = True tag = True
commit = True commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
@ -17,8 +17,6 @@ optional_value = final
[bumpversion:file:pyproject.toml] [bumpversion:file:pyproject.toml]
[bumpversion:file:uv.lock]
[bumpversion:file:package.json] [bumpversion:file:package.json]
[bumpversion:file:docker-compose.yml] [bumpversion:file:docker-compose.yml]

View File

@ -1,22 +0,0 @@
---
name: Documentation issue
about: Suggest an improvement or report a problem
title: ""
labels: documentation
assignees: ""
---
**Do you see an area that can be clarified or expanded, a technical inaccuracy, or a broken link? Please describe.**
A clear and concise description of what the problem is, or where the document can be improved. Ex. I believe we need more details about [...]
**Provide the URL or link to the exact page in the documentation to which you are referring.**
If there are multiple pages, list them all, and be sure to state the header or section where the content is.
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Additional context**
Add any other context or screenshots about the documentation issue here.
**Consider opening a PR!**
If the issue is one that you can fix, or even make a good pass at, we'd appreciate a PR. For more information about making a contribution to the docs, and using our Style Guide and our templates, refer to ["Writing documentation"](https://docs.goauthentik.io/docs/developer-docs/docs/writing-documentation).

View File

@ -44,6 +44,7 @@ if is_release:
] ]
if not prerelease: if not prerelease:
image_tags += [ image_tags += [
f"{name}:latest",
f"{name}:{version_family}", f"{name}:{version_family}",
] ]
else: else:

View File

@ -9,22 +9,17 @@ inputs:
runs: runs:
using: "composite" using: "composite"
steps: steps:
- name: Install apt deps - name: Install poetry & deps
shell: bash shell: bash
run: | run: |
pipx install poetry || true
sudo apt-get update sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server
- name: Install uv - name: Setup python and restore poetry
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
- name: Setup python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version-file: "pyproject.toml" python-version-file: "pyproject.toml"
- name: Install Python deps cache: "poetry"
shell: bash
run: uv sync --all-extras --dev --frozen
- name: Setup node - name: Setup node
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
@ -35,18 +30,15 @@ runs:
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- name: Setup docker cache
uses: ScribeMD/docker-cache@0.5.0
with:
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
- name: Setup dependencies - name: Setup dependencies
shell: bash shell: bash
run: | run: |
export PSQL_TAG=${{ inputs.postgresql_version }} export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/docker-compose.yml up -d docker compose -f .github/actions/setup/docker-compose.yml up -d
poetry sync
cd web && npm ci cd web && npm ci
- name: Generate config - name: Generate config
shell: uv run python {0} shell: poetry run python {0}
run: | run: |
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
from yaml import safe_dump from yaml import safe_dump

View File

@ -11,7 +11,7 @@ services:
- 5432:5432 - 5432:5432
restart: always restart: always
redis: redis:
image: docker.io/library/redis:7 image: docker.io/library/redis
ports: ports:
- 6379:6379 - 6379:6379
restart: always restart: always

View File

@ -1,32 +1,7 @@
akadmin
asgi
assertIn
authentik
authn
crate
docstrings
entra
goauthentik
gunicorn
hass
jwe
jwks
keypair keypair
keypairs keypairs
kubernetes hass
oidc
ontext
openid
passwordless
plex
saml
scim
singed
slo
sso
totp
traefik
# https://github.com/codespell-project/codespell/issues/1224
upToDate
warmup warmup
webauthn ontext
singed
assertIn

View File

@ -82,12 +82,6 @@ updates:
docusaurus: docusaurus:
patterns: patterns:
- "@docusaurus/*" - "@docusaurus/*"
build:
patterns:
- "@swc/*"
- "swc-*"
- "lightningcss*"
- "@rspack/binding*"
- package-ecosystem: npm - package-ecosystem: npm
directory: "/lifecycle/aws" directory: "/lifecycle/aws"
schedule: schedule:
@ -98,7 +92,7 @@ updates:
prefix: "lifecycle/aws:" prefix: "lifecycle/aws:"
labels: labels:
- dependencies - dependencies
- package-ecosystem: uv - package-ecosystem: pip
directory: "/" directory: "/"
schedule: schedule:
interval: daily interval: daily

View File

@ -40,7 +40,7 @@ jobs:
attestations: write attestations: write
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: docker/setup-qemu-action@v3.6.0 - uses: docker/setup-qemu-action@v3.5.0
- uses: docker/setup-buildx-action@v3 - uses: docker/setup-buildx-action@v3
- name: prepare variables - name: prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables

View File

@ -30,6 +30,7 @@ jobs:
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version-file: "pyproject.toml" python-version-file: "pyproject.toml"
cache: "poetry"
- name: Generate API Client - name: Generate API Client
run: make gen-client-py run: make gen-client-py
- name: Publish package - name: Publish package

View File

@ -33,7 +33,7 @@ jobs:
npm ci npm ci
- name: Check changes have been applied - name: Check changes have been applied
run: | run: |
uv run make aws-cfn poetry run make aws-cfn
git diff --exit-code git diff --exit-code
ci-aws-cfn-mark: ci-aws-cfn-mark:
if: always() if: always()

View File

@ -15,8 +15,8 @@ jobs:
matrix: matrix:
version: version:
- docs - docs
- version-2025-2
- version-2024-12 - version-2024-12
- version-2024-10
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- run: | - run: |

View File

@ -34,7 +34,7 @@ jobs:
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: run job - name: run job
run: uv run make ci-${{ matrix.job }} run: poetry run make ci-${{ matrix.job }}
test-migrations: test-migrations:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@ -42,7 +42,7 @@ jobs:
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: run migrations - name: run migrations
run: uv run python -m lifecycle.migrate run: poetry run python -m lifecycle.migrate
test-make-seed: test-make-seed:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@ -69,21 +69,19 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- name: checkout stable - name: checkout stable
run: | run: |
# Delete all poetry envs
rm -rf /home/runner/.cache/pypoetry
# Copy current, latest config to local # Copy current, latest config to local
# Temporarly comment the .github backup while migrating to uv
cp authentik/lib/default.yml local.env.yml cp authentik/lib/default.yml local.env.yml
# cp -R .github .. cp -R .github ..
cp -R scripts .. cp -R scripts ..
git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1)
# rm -rf .github/ scripts/ rm -rf .github/ scripts/
# mv ../.github ../scripts . mv ../.github ../scripts .
rm -rf scripts/
mv ../scripts .
- name: Setup authentik env (stable) - name: Setup authentik env (stable)
uses: ./.github/actions/setup uses: ./.github/actions/setup
with: with:
postgresql_version: ${{ matrix.psql }} postgresql_version: ${{ matrix.psql }}
continue-on-error: true
- name: run migrations to stable - name: run migrations to stable
run: poetry run python -m lifecycle.migrate run: poetry run python -m lifecycle.migrate
- name: checkout current code - name: checkout current code
@ -93,13 +91,15 @@ jobs:
git reset --hard HEAD git reset --hard HEAD
git clean -d -fx . git clean -d -fx .
git checkout $GITHUB_SHA git checkout $GITHUB_SHA
# Delete previous poetry env
rm -rf /home/runner/.cache/pypoetry/virtualenvs/*
- name: Setup authentik env (ensure latest deps are installed) - name: Setup authentik env (ensure latest deps are installed)
uses: ./.github/actions/setup uses: ./.github/actions/setup
with: with:
postgresql_version: ${{ matrix.psql }} postgresql_version: ${{ matrix.psql }}
- name: migrate to latest - name: migrate to latest
run: | run: |
uv run python -m lifecycle.migrate poetry run python -m lifecycle.migrate
- name: run tests - name: run tests
env: env:
# Test in the main database that we just migrated from the previous stable version # Test in the main database that we just migrated from the previous stable version
@ -108,7 +108,7 @@ jobs:
CI_RUN_ID: ${{ matrix.run_id }} CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5" CI_TOTAL_RUNS: "5"
run: | run: |
uv run make ci-test poetry run make ci-test
test-unittest: test-unittest:
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -133,7 +133,7 @@ jobs:
CI_RUN_ID: ${{ matrix.run_id }} CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5" CI_TOTAL_RUNS: "5"
run: | run: |
uv run make ci-test poetry run make ci-test
- if: ${{ always() }} - if: ${{ always() }}
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v5
with: with:
@ -156,8 +156,8 @@ jobs:
uses: helm/kind-action@v1.12.0 uses: helm/kind-action@v1.12.0
- name: run integration - name: run integration
run: | run: |
uv run coverage run manage.py test tests/integration poetry run coverage run manage.py test tests/integration
uv run coverage xml poetry run coverage xml
- if: ${{ always() }} - if: ${{ always() }}
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v5
with: with:
@ -214,8 +214,8 @@ jobs:
npm run build npm run build
- name: run e2e - name: run e2e
run: | run: |
uv run coverage run manage.py test ${{ matrix.job.glob }} poetry run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage xml poetry run coverage xml
- if: ${{ always() }} - if: ${{ always() }}
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v5
with: with:

View File

@ -29,7 +29,7 @@ jobs:
- name: Generate API - name: Generate API
run: make gen-client-go run: make gen-client-go
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v7 uses: golangci/golangci-lint-action@v6
with: with:
version: latest version: latest
args: --timeout 5000s --verbose args: --timeout 5000s --verbose
@ -82,7 +82,7 @@ jobs:
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0 uses: docker/setup-qemu-action@v3.5.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: prepare variables - name: prepare variables

View File

@ -2,7 +2,7 @@ name: authentik-gen-update-webauthn-mds
on: on:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
- cron: "30 1 1,15 * *" - cron: '30 1 1,15 * *'
env: env:
POSTGRES_DB: authentik POSTGRES_DB: authentik
@ -24,7 +24,7 @@ jobs:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- run: uv run ak update_webauthn_mds - run: poetry run ak update_webauthn_mds
- uses: peter-evans/create-pull-request@v7 - uses: peter-evans/create-pull-request@v7
id: cpr id: cpr
with: with:

View File

@ -1,45 +0,0 @@
name: authentik-packages-npm-publish
on:
push:
branches: [main]
paths:
- packages/docusaurus-config
- packages/eslint-config
- packages/prettier-config
- packages/tsconfig
workflow_dispatch:
jobs:
publish:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
package:
- docusaurus-config
- eslint-config
- prettier-config
- tsconfig
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4
with:
node-version-file: packages/${{ matrix.package }}/package.json
registry-url: "https://registry.npmjs.org"
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
with:
files: |
packages/${{ matrix.package }}/package.json
- name: Publish package
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: packages/${{ matrix.package}}
run: |
npm ci
npm run build
npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}

View File

@ -21,8 +21,8 @@ jobs:
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: generate docs - name: generate docs
run: | run: |
uv run make migrate poetry run make migrate
uv run ak build_source_docs poetry run ak build_source_docs
- name: Publish - name: Publish
uses: netlify/actions/cli@master uses: netlify/actions/cli@master
with: with:

View File

@ -42,7 +42,7 @@ jobs:
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0 uses: docker/setup-qemu-action@v3.5.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: prepare variables - name: prepare variables
@ -186,7 +186,7 @@ jobs:
container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) container=$(docker container create ${{ steps.ev.outputs.imageMainName }})
docker cp ${container}:web/ . docker cp ${container}:web/ .
- name: Create a Sentry.io release - name: Create a Sentry.io release
uses: getsentry/action-release@v3 uses: getsentry/action-release@v1
continue-on-error: true continue-on-error: true
env: env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}

View File

@ -1,27 +0,0 @@
name: authentik-semgrep
on:
workflow_dispatch: {}
pull_request: {}
push:
branches:
- main
- master
paths:
- .github/workflows/semgrep.yml
schedule:
# random HH:MM to avoid a load spike on GitHub Actions at 00:00
- cron: '12 15 * * *'
jobs:
semgrep:
name: semgrep/ci
runs-on: ubuntu-latest
permissions:
contents: read
env:
SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }}
container:
image: semgrep/semgrep
if: (github.actor != 'dependabot[bot]')
steps:
- uses: actions/checkout@v4
- run: semgrep ci

View File

@ -36,10 +36,10 @@ jobs:
run: make gen-client-ts run: make gen-client-ts
- name: run extract - name: run extract
run: | run: |
uv run make i18n-extract poetry run make i18n-extract
- name: run compile - name: run compile
run: | run: |
uv run ak compilemessages poetry run ak compilemessages
make web-check-compile make web-check-compile
- name: Create Pull Request - name: Create Pull Request
if: ${{ github.event_name != 'pull_request' }} if: ${{ github.event_name != 'pull_request' }}

5
.gitignore vendored
View File

@ -11,10 +11,6 @@ local_settings.py
db.sqlite3 db.sqlite3
media media
# Node
node_modules
# If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ # If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/
# in your Git repository. Update and uncomment the following line accordingly. # in your Git repository. Update and uncomment the following line accordingly.
# <django-project-name>/staticfiles/ # <django-project-name>/staticfiles/
@ -37,7 +33,6 @@ eggs/
lib64/ lib64/
parts/ parts/
dist/ dist/
out/
sdist/ sdist/
var/ var/
wheels/ wheels/

View File

@ -1,47 +0,0 @@
# Prettier Ignorefile
## Static Files
**/LICENSE
authentik/stages/**/*
## Build asset directories
coverage
dist
out
.docusaurus
website/docs/developer-docs/api/**/*
## Environment
*.env
## Secrets
*.secrets
## Yarn
.yarn/**/*
## Node
node_modules
coverage
## Configs
*.log
*.yaml
*.yml
# Templates
# TODO: Rename affected files to *.template.* or similar.
*.html
*.mdx
*.md
## Import order matters
poly.ts
src/locale-codes.ts
src/locales/
# Storybook
storybook-static/
.storybook/css-import-maps*

22
.vscode/settings.json vendored
View File

@ -1,4 +1,26 @@
{ {
"cSpell.words": [
"akadmin",
"asgi",
"authentik",
"authn",
"entra",
"goauthentik",
"jwe",
"jwks",
"kubernetes",
"oidc",
"openid",
"passwordless",
"plex",
"saml",
"scim",
"slo",
"sso",
"totp",
"traefik",
"webauthn"
],
"todo-tree.tree.showCountsInTree": true, "todo-tree.tree.showCountsInTree": true,
"todo-tree.tree.showBadges": true, "todo-tree.tree.showBadges": true,
"yaml.customTags": [ "yaml.customTags": [

46
.vscode/tasks.json vendored
View File

@ -3,13 +3,8 @@
"tasks": [ "tasks": [
{ {
"label": "authentik/core: make", "label": "authentik/core: make",
"command": "uv", "command": "poetry",
"args": [ "args": ["run", "make", "lint-fix", "lint"],
"run",
"make",
"lint-fix",
"lint"
],
"presentation": { "presentation": {
"panel": "new" "panel": "new"
}, },
@ -17,12 +12,8 @@
}, },
{ {
"label": "authentik/core: run", "label": "authentik/core: run",
"command": "uv", "command": "poetry",
"args": [ "args": ["run", "ak", "server"],
"run",
"ak",
"server"
],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
@ -32,17 +23,13 @@
{ {
"label": "authentik/web: make", "label": "authentik/web: make",
"command": "make", "command": "make",
"args": [ "args": ["web"],
"web"
],
"group": "build" "group": "build"
}, },
{ {
"label": "authentik/web: watch", "label": "authentik/web: watch",
"command": "make", "command": "make",
"args": [ "args": ["web-watch"],
"web-watch"
],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
@ -52,26 +39,19 @@
{ {
"label": "authentik: install", "label": "authentik: install",
"command": "make", "command": "make",
"args": [ "args": ["install", "-j4"],
"install",
"-j4"
],
"group": "build" "group": "build"
}, },
{ {
"label": "authentik/website: make", "label": "authentik/website: make",
"command": "make", "command": "make",
"args": [ "args": ["website"],
"website"
],
"group": "build" "group": "build"
}, },
{ {
"label": "authentik/website: watch", "label": "authentik/website: watch",
"command": "make", "command": "make",
"args": [ "args": ["website-watch"],
"website-watch"
],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
@ -80,12 +60,8 @@
}, },
{ {
"label": "authentik/api: generate", "label": "authentik/api: generate",
"command": "uv", "command": "poetry",
"args": [ "args": ["run", "make", "gen"],
"run",
"make",
"gen"
],
"group": "build" "group": "build"
} }
] ]

View File

@ -10,7 +10,7 @@ schemas/ @goauthentik/backend
scripts/ @goauthentik/backend scripts/ @goauthentik/backend
tests/ @goauthentik/backend tests/ @goauthentik/backend
pyproject.toml @goauthentik/backend pyproject.toml @goauthentik/backend
uv.lock @goauthentik/backend poetry.lock @goauthentik/backend
go.mod @goauthentik/backend go.mod @goauthentik/backend
go.sum @goauthentik/backend go.sum @goauthentik/backend
# Infrastructure # Infrastructure
@ -23,8 +23,6 @@ docker-compose.yml @goauthentik/infrastructure
Makefile @goauthentik/infrastructure Makefile @goauthentik/infrastructure
.editorconfig @goauthentik/infrastructure .editorconfig @goauthentik/infrastructure
CODEOWNERS @goauthentik/infrastructure CODEOWNERS @goauthentik/infrastructure
# Web packages
packages/ @goauthentik/frontend
# Web # Web
web/ @goauthentik/frontend web/ @goauthentik/frontend
tests/wdio/ @goauthentik/frontend tests/wdio/ @goauthentik/frontend

View File

@ -5,7 +5,7 @@
We as members, contributors, and leaders pledge to make participation in our We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socioeconomic status, identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity nationality, personal appearance, race, religion, or sexual identity
and orientation. and orientation.

View File

@ -43,7 +43,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
RUN npm run build RUN npm run build
# Stage 3: Build go proxy # Stage 3: Build go proxy
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder
ARG TARGETOS ARG TARGETOS
ARG TARGETARCH ARG TARGETARCH
@ -76,7 +76,7 @@ COPY ./go.sum /go/src/goauthentik.io/go.sum
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \ --mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \ if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \
CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \ CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \
go build -o /go/authentik ./cmd/server go build -o /go/authentik ./cmd/server
# Stage 4: MaxMind GeoIP # Stage 4: MaxMind GeoIP
@ -93,59 +93,53 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
mkdir -p /usr/share/GeoIP && \ mkdir -p /usr/share/GeoIP && \
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 5: Download uv # Stage 5: Python dependencies
FROM ghcr.io/astral-sh/uv:0.6.14 AS uv FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps
# Stage 6: Base python image
FROM ghcr.io/goauthentik/fips-python:3.12.10-slim-bookworm-fips AS python-base
ENV VENV_PATH="/ak-root/.venv" \
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
UV_COMPILE_BYTECODE=1 \
UV_LINK_MODE=copy \
UV_NATIVE_TLS=1 \
UV_PYTHON_DOWNLOADS=0
WORKDIR /ak-root/
COPY --from=uv /uv /uvx /bin/
# Stage 7: Python dependencies
FROM python-base AS python-deps
ARG TARGETARCH ARG TARGETARCH
ARG TARGETVARIANT ARG TARGETVARIANT
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache WORKDIR /ak-root/poetry
ENV PATH="/root/.cargo/bin:$PATH" ENV VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false \
PATH="/ak-root/venv/bin:$PATH"
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
apt-get update && \ apt-get update && \
# Required for installing pip packages # Required for installing pip packages
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
--mount=type=cache,target=/root/.cache/pip \
--mount=type=cache,target=/root/.cache/pypoetry \
pip install --no-cache cffi && \
apt-get update && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \
# Build essentials build-essential libffi-dev \
build-essential pkg-config libffi-dev git \ # Required for cryptography
# cryptography curl pkg-config \
curl \ # Required for lxml
# libxml libxslt-dev zlib1g-dev \
libxslt-dev zlib1g-dev \ # Required for xmlsec
# postgresql libltdl-dev \
libpq-dev \ # Required for kadmin
# python-kadmin-rs sccache clang && \
clang libkrb5-dev sccache \ curl https://sh.rustup.rs -sSf | sh -s -- -y && \
# xmlsec . "$HOME/.cargo/env" && \
libltdl-dev && \ python -m venv /ak-root/venv/ && \
curl https://sh.rustup.rs -sSf | sh -s -- -y bash -c "source ${VENV_PATH}/bin/activate && \
pip3 install --upgrade pip poetry && \
poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \
poetry install --only=main --no-ansi --no-interaction --no-root && \
pip uninstall cryptography -y && \
poetry install --only=main --no-ansi --no-interaction --no-root"
ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec" # Stage 6: Run
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image
RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \
--mount=type=bind,target=uv.lock,src=uv.lock \
--mount=type=cache,target=/root/.cache/uv \
uv sync --frozen --no-install-project --no-dev
# Stage 8: Run
FROM python-base AS final-image
ARG VERSION ARG VERSION
ARG GIT_BUILD_HASH ARG GIT_BUILD_HASH
@ -177,7 +171,7 @@ RUN apt-get update && \
COPY ./authentik/ /authentik COPY ./authentik/ /authentik
COPY ./pyproject.toml / COPY ./pyproject.toml /
COPY ./uv.lock / COPY ./poetry.lock /
COPY ./schemas /schemas COPY ./schemas /schemas
COPY ./locale /locale COPY ./locale /locale
COPY ./tests /tests COPY ./tests /tests
@ -186,7 +180,7 @@ COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle COPY ./lifecycle/ /lifecycle
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
COPY --from=go-builder /go/authentik /bin/authentik COPY --from=go-builder /go/authentik /bin/authentik
COPY --from=python-deps /ak-root/.venv /ak-root/.venv COPY --from=python-deps /ak-root/venv /ak-root/venv
COPY --from=web-builder /work/web/dist/ /web/dist/ COPY --from=web-builder /work/web/dist/ /web/dist/
COPY --from=web-builder /work/web/authentik/ /web/authentik/ COPY --from=web-builder /work/web/authentik/ /web/authentik/
COPY --from=website-builder /work/website/build/ /website/help/ COPY --from=website-builder /work/website/build/ /website/help/
@ -197,6 +191,9 @@ USER 1000
ENV TMPDIR=/dev/shm/ \ ENV TMPDIR=/dev/shm/ \
PYTHONDONTWRITEBYTECODE=1 \ PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \ PYTHONUNBUFFERED=1 \
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false \
GOFIPS=1 GOFIPS=1
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]

View File

@ -4,17 +4,34 @@
PWD = $(shell pwd) PWD = $(shell pwd)
UID = $(shell id -u) UID = $(shell id -u)
GID = $(shell id -g) GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.generate_semver) NPM_VERSION = $(shell python -m scripts.npm_version)
PY_SOURCES = authentik tests scripts lifecycle .github PY_SOURCES = authentik tests scripts lifecycle .github
GO_SOURCES = cmd internal
WEB_SOURCES = web/src web/packages
DOCKER_IMAGE ?= "authentik:test" DOCKER_IMAGE ?= "authentik:test"
GEN_API_TS = "gen-ts-api" GEN_API_TS = "gen-ts-api"
GEN_API_PY = "gen-py-api" GEN_API_PY = "gen-py-api"
GEN_API_GO = "gen-go-api" GEN_API_GO = "gen-go-api"
pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null) pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
-I .github/codespell-words.txt \
-S 'web/src/locales/**' \
-S 'website/docs/developer-docs/api/reference/**' \
-S '**/node_modules/**' \
-S '**/dist/**' \
$(PY_SOURCES) \
$(GO_SOURCES) \
$(WEB_SOURCES) \
website/src \
website/blog \
website/docs \
website/integrations \
website/src
all: lint-fix lint test gen web ## Lint, build, and test everything all: lint-fix lint test gen web ## Lint, build, and test everything
@ -32,37 +49,34 @@ go-test:
go test -timeout 0 -v -race -cover ./... go test -timeout 0 -v -race -cover ./...
test: ## Run the server tests and produce a coverage report (locally) test: ## Run the server tests and produce a coverage report (locally)
uv run coverage run manage.py test --keepdb authentik coverage run manage.py test --keepdb authentik
uv run coverage html coverage html
uv run coverage report coverage report
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors. lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
uv run black $(PY_SOURCES) black $(PY_SOURCES)
uv run ruff check --fix $(PY_SOURCES) ruff check --fix $(PY_SOURCES)
lint-codespell: ## Reports spelling errors. lint-codespell: ## Reports spelling errors.
uv run codespell -w codespell -w $(CODESPELL_ARGS)
lint: ## Lint the python and golang sources lint: ## Lint the python and golang sources
uv run bandit -c pyproject.toml -r $(PY_SOURCES) bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules
golangci-lint run -v golangci-lint run -v
core-install: core-install:
uv sync --frozen poetry install
migrate: ## Run the Authentik Django server's migrations migrate: ## Run the Authentik Django server's migrations
uv run python -m lifecycle.migrate python -m lifecycle.migrate
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
aws-cfn: aws-cfn:
cd lifecycle/aws && npm run aws-cfn cd lifecycle/aws && npm run aws-cfn
run: ## Run the main authentik server process
uv run ak server
core-i18n-extract: core-i18n-extract:
uv run ak makemessages \ ak makemessages \
--add-location file \ --add-location file \
--no-obsolete \ --no-obsolete \
--ignore web \ --ignore web \
@ -93,11 +107,11 @@ gen-build: ## Extract the schema from the database
AUTHENTIK_DEBUG=true \ AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \ AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
uv run ak make_blueprint_schema > blueprints/schema.json ak make_blueprint_schema > blueprints/schema.json
AUTHENTIK_DEBUG=true \ AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \ AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
uv run ak spectacular --file schema.yml ak spectacular --file schema.yml
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
@ -148,7 +162,7 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python
docker run \ docker run \
--rm -v ${PWD}:/local \ --rm -v ${PWD}:/local \
--user ${UID}:${GID} \ --user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \ docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \
-i /local/schema.yml \ -i /local/schema.yml \
-g python \ -g python \
-o /local/${GEN_API_PY} \ -o /local/${GEN_API_PY} \
@ -176,7 +190,7 @@ gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/ rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
gen-dev-config: ## Generate a local development config file gen-dev-config: ## Generate a local development config file
uv run scripts/generate_config.py python -m scripts.generate_config
gen: gen-build gen-client-ts gen: gen-build gen-client-ts
@ -257,21 +271,21 @@ ci--meta-debug:
node --version node --version
ci-black: ci--meta-debug ci-black: ci--meta-debug
uv run black --check $(PY_SOURCES) black --check $(PY_SOURCES)
ci-ruff: ci--meta-debug ci-ruff: ci--meta-debug
uv run ruff check $(PY_SOURCES) ruff check $(PY_SOURCES)
ci-codespell: ci--meta-debug ci-codespell: ci--meta-debug
uv run codespell -s codespell $(CODESPELL_ARGS) -s
ci-bandit: ci--meta-debug ci-bandit: ci--meta-debug
uv run bandit -r $(PY_SOURCES) bandit -r $(PY_SOURCES)
ci-pending-migrations: ci--meta-debug ci-pending-migrations: ci--meta-debug
uv run ak makemigrations --check ak makemigrations --check
ci-test: ci--meta-debug ci-test: ci--meta-debug
uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
uv run coverage report coverage report
uv run coverage xml coverage xml

View File

@ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di
## Independent audits and pentests ## Independent audits and pentests
We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specific audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security). We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specfic audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security).
## What authentik classifies as a CVE ## What authentik classifies as a CVE

View File

@ -2,7 +2,7 @@
from os import environ from os import environ
__version__ = "2025.2.4" __version__ = "2025.2.1"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -59,7 +59,7 @@ class SystemInfoSerializer(PassiveSerializer):
if not isinstance(value, str): if not isinstance(value, str):
continue continue
actual_value = value actual_value = value
if raw_session is not None and raw_session in actual_value: if raw_session in actual_value:
actual_value = actual_value.replace( actual_value = actual_value.replace(
raw_session, SafeExceptionReporterFilter.cleansed_substitute raw_session, SafeExceptionReporterFilter.cleansed_substitute
) )

View File

@ -36,7 +36,6 @@ from authentik.core.models import (
GroupSourceConnection, GroupSourceConnection,
PropertyMapping, PropertyMapping,
Provider, Provider,
Session,
Source, Source,
User, User,
UserSourceConnection, UserSourceConnection,
@ -109,7 +108,6 @@ def excluded_models() -> list[type[Model]]:
Policy, Policy,
PolicyBindingModel, PolicyBindingModel,
# Classes that have other dependencies # Classes that have other dependencies
Session,
AuthenticatedSession, AuthenticatedSession,
# Classes which are only internally managed # Classes which are only internally managed
# FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin # FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin

View File

@ -49,8 +49,6 @@ class BrandSerializer(ModelSerializer):
"branding_title", "branding_title",
"branding_logo", "branding_logo",
"branding_favicon", "branding_favicon",
"branding_custom_css",
"branding_default_flow_background",
"flow_authentication", "flow_authentication",
"flow_invalidation", "flow_invalidation",
"flow_recovery", "flow_recovery",
@ -88,7 +86,6 @@ class CurrentBrandSerializer(PassiveSerializer):
branding_title = CharField() branding_title = CharField()
branding_logo = CharField(source="branding_logo_url") branding_logo = CharField(source="branding_logo_url")
branding_favicon = CharField(source="branding_favicon_url") branding_favicon = CharField(source="branding_favicon_url")
branding_custom_css = CharField()
ui_footer_links = ListField( ui_footer_links = ListField(
child=FooterLinkSerializer(), child=FooterLinkSerializer(),
read_only=True, read_only=True,
@ -128,7 +125,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
"branding_title", "branding_title",
"branding_logo", "branding_logo",
"branding_favicon", "branding_favicon",
"branding_default_flow_background",
"flow_authentication", "flow_authentication",
"flow_invalidation", "flow_invalidation",
"flow_recovery", "flow_recovery",

View File

@ -1,35 +0,0 @@
# Generated by Django 5.0.12 on 2025-02-22 01:51
from pathlib import Path
from django.db import migrations, models
from django.apps.registry import Apps
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
Brand = apps.get_model("authentik_brands", "brand")
db_alias = schema_editor.connection.alias
path = Path("/web/dist/custom.css")
if not path.exists():
return
css = path.read_text()
Brand.objects.using(db_alias).update(branding_custom_css=css)
class Migration(migrations.Migration):
dependencies = [
("authentik_brands", "0007_brand_default_application"),
]
operations = [
migrations.AddField(
model_name="brand",
name="branding_custom_css",
field=models.TextField(blank=True, default=""),
),
migrations.RunPython(migrate_custom_css),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 5.0.13 on 2025-03-19 22:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_brands", "0008_brand_branding_custom_css"),
]
operations = [
migrations.AddField(
model_name="brand",
name="branding_default_flow_background",
field=models.TextField(default="/static/dist/assets/images/flow_background.jpg"),
),
]

View File

@ -33,10 +33,6 @@ class Brand(SerializerModel):
branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg") branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg")
branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png") branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png")
branding_custom_css = models.TextField(default="", blank=True)
branding_default_flow_background = models.TextField(
default="/static/dist/assets/images/flow_background.jpg"
)
flow_authentication = models.ForeignKey( flow_authentication = models.ForeignKey(
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication" Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication"
@ -88,12 +84,6 @@ class Brand(SerializerModel):
return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon
return self.branding_favicon return self.branding_favicon
def branding_default_flow_background_url(self) -> str:
"""Get branding_default_flow_background with the correct prefix"""
if self.branding_default_flow_background.startswith("/static"):
return CONFIG.get("web.path", "/")[:-1] + self.branding_default_flow_background
return self.branding_default_flow_background
@property @property
def serializer(self) -> Serializer: def serializer(self) -> Serializer:
from authentik.brands.api import BrandSerializer from authentik.brands.api import BrandSerializer

View File

@ -24,7 +24,6 @@ class TestBrands(APITestCase):
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
"branding_favicon": "/static/dist/assets/icons/icon.png", "branding_favicon": "/static/dist/assets/icons/icon.png",
"branding_title": "authentik", "branding_title": "authentik",
"branding_custom_css": "",
"matched_domain": brand.domain, "matched_domain": brand.domain,
"ui_footer_links": [], "ui_footer_links": [],
"ui_theme": Themes.AUTOMATIC, "ui_theme": Themes.AUTOMATIC,
@ -44,7 +43,6 @@ class TestBrands(APITestCase):
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
"branding_favicon": "/static/dist/assets/icons/icon.png", "branding_favicon": "/static/dist/assets/icons/icon.png",
"branding_title": "custom", "branding_title": "custom",
"branding_custom_css": "",
"matched_domain": "bar.baz", "matched_domain": "bar.baz",
"ui_footer_links": [], "ui_footer_links": [],
"ui_theme": Themes.AUTOMATIC, "ui_theme": Themes.AUTOMATIC,
@ -61,7 +59,6 @@ class TestBrands(APITestCase):
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg", "branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
"branding_favicon": "/static/dist/assets/icons/icon.png", "branding_favicon": "/static/dist/assets/icons/icon.png",
"branding_title": "authentik", "branding_title": "authentik",
"branding_custom_css": "",
"matched_domain": "fallback", "matched_domain": "fallback",
"ui_footer_links": [], "ui_footer_links": [],
"ui_theme": Themes.AUTOMATIC, "ui_theme": Themes.AUTOMATIC,
@ -124,27 +121,3 @@ class TestBrands(APITestCase):
"subject": None, "subject": None,
}, },
) )
def test_branding_url(self):
"""Test branding attributes return correct values"""
brand = create_test_brand()
brand.branding_default_flow_background = "https://goauthentik.io/img/icon.png"
brand.branding_favicon = "https://goauthentik.io/img/icon.png"
brand.branding_logo = "https://goauthentik.io/img/icon.png"
brand.save()
self.assertEqual(
brand.branding_default_flow_background_url(), "https://goauthentik.io/img/icon.png"
)
self.assertJSONEqual(
self.client.get(reverse("authentik_api:brand-current")).content.decode(),
{
"branding_logo": "https://goauthentik.io/img/icon.png",
"branding_favicon": "https://goauthentik.io/img/icon.png",
"branding_title": "authentik",
"branding_custom_css": "",
"matched_domain": brand.domain,
"ui_footer_links": [],
"ui_theme": Themes.AUTOMATIC,
"default_locale": "",
},
)

View File

@ -46,7 +46,7 @@ LOGGER = get_logger()
def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str: def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str:
"""Cache key where application list for user is saved""" """Cache key where application list for user is saved"""
key = f"{CACHE_PREFIX}app_access/{user_pk}" key = f"{CACHE_PREFIX}/app_access/{user_pk}"
if page_number: if page_number:
key += f"/{page_number}" key += f"/{page_number}"
return key return key

View File

@ -5,7 +5,6 @@ from typing import TypedDict
from rest_framework import mixins from rest_framework import mixins
from rest_framework.fields import SerializerMethodField from rest_framework.fields import SerializerMethodField
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.serializers import CharField, DateTimeField, IPAddressField
from rest_framework.viewsets import GenericViewSet from rest_framework.viewsets import GenericViewSet
from ua_parser import user_agent_parser from ua_parser import user_agent_parser
@ -55,11 +54,6 @@ class UserAgentDict(TypedDict):
class AuthenticatedSessionSerializer(ModelSerializer): class AuthenticatedSessionSerializer(ModelSerializer):
"""AuthenticatedSession Serializer""" """AuthenticatedSession Serializer"""
expires = DateTimeField(source="session.expires", read_only=True)
last_ip = IPAddressField(source="session.last_ip", read_only=True)
last_user_agent = CharField(source="session.last_user_agent", read_only=True)
last_used = DateTimeField(source="session.last_used", read_only=True)
current = SerializerMethodField() current = SerializerMethodField()
user_agent = SerializerMethodField() user_agent = SerializerMethodField()
geo_ip = SerializerMethodField() geo_ip = SerializerMethodField()
@ -68,19 +62,19 @@ class AuthenticatedSessionSerializer(ModelSerializer):
def get_current(self, instance: AuthenticatedSession) -> bool: def get_current(self, instance: AuthenticatedSession) -> bool:
"""Check if session is currently active session""" """Check if session is currently active session"""
request: Request = self.context["request"] request: Request = self.context["request"]
return request._request.session.session_key == instance.session.session_key return request._request.session.session_key == instance.session_key
def get_user_agent(self, instance: AuthenticatedSession) -> UserAgentDict: def get_user_agent(self, instance: AuthenticatedSession) -> UserAgentDict:
"""Get parsed user agent""" """Get parsed user agent"""
return user_agent_parser.Parse(instance.session.last_user_agent) return user_agent_parser.Parse(instance.last_user_agent)
def get_geo_ip(self, instance: AuthenticatedSession) -> GeoIPDict | None: # pragma: no cover def get_geo_ip(self, instance: AuthenticatedSession) -> GeoIPDict | None: # pragma: no cover
"""Get GeoIP Data""" """Get GeoIP Data"""
return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.session.last_ip) return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.last_ip)
def get_asn(self, instance: AuthenticatedSession) -> ASNDict | None: # pragma: no cover def get_asn(self, instance: AuthenticatedSession) -> ASNDict | None: # pragma: no cover
"""Get ASN Data""" """Get ASN Data"""
return ASN_CONTEXT_PROCESSOR.asn_dict(instance.session.last_ip) return ASN_CONTEXT_PROCESSOR.asn_dict(instance.last_ip)
class Meta: class Meta:
model = AuthenticatedSession model = AuthenticatedSession
@ -96,7 +90,6 @@ class AuthenticatedSessionSerializer(ModelSerializer):
"last_used", "last_used",
"expires", "expires",
] ]
extra_args = {"uuid": {"read_only": True}}
class AuthenticatedSessionViewSet( class AuthenticatedSessionViewSet(
@ -108,10 +101,9 @@ class AuthenticatedSessionViewSet(
): ):
"""AuthenticatedSession Viewset""" """AuthenticatedSession Viewset"""
lookup_field = "uuid" queryset = AuthenticatedSession.objects.all()
queryset = AuthenticatedSession.objects.select_related("session").all()
serializer_class = AuthenticatedSessionSerializer serializer_class = AuthenticatedSessionSerializer
search_fields = ["user__username", "session__last_ip", "session__last_user_agent"] search_fields = ["user__username", "last_ip", "last_user_agent"]
filterset_fields = ["user__username", "session__last_ip", "session__last_user_agent"] filterset_fields = ["user__username", "last_ip", "last_user_agent"]
ordering = ["user__username"] ordering = ["user__username"]
owner_field = "user" owner_field = "user"

View File

@ -5,7 +5,6 @@ from collections.abc import Iterable
from drf_spectacular.utils import OpenApiResponse, extend_schema from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework import mixins from rest_framework import mixins
from rest_framework.decorators import action from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
from rest_framework.parsers import MultiPartParser from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request from rest_framework.request import Request
@ -155,17 +154,6 @@ class SourceViewSet(
matching_sources.append(source_settings.validated_data) matching_sources.append(source_settings.validated_data)
return Response(matching_sources) return Response(matching_sources)
def destroy(self, request: Request, *args, **kwargs):
"""Prevent deletion of built-in sources"""
instance: Source = self.get_object()
if instance.managed == Source.MANAGED_INBUILT:
raise ValidationError(
{"detail": "Built-in sources cannot be deleted"}, code="protected"
)
return super().destroy(request, *args, **kwargs)
class UserSourceConnectionSerializer(SourceSerializer): class UserSourceConnectionSerializer(SourceSerializer):
"""User source connection""" """User source connection"""
@ -179,13 +167,10 @@ class UserSourceConnectionSerializer(SourceSerializer):
"user", "user",
"source", "source",
"source_obj", "source_obj",
"identifier",
"created", "created",
"last_updated",
] ]
extra_kwargs = { extra_kwargs = {
"created": {"read_only": True}, "created": {"read_only": True},
"last_updated": {"read_only": True},
} }
@ -202,7 +187,7 @@ class UserSourceConnectionViewSet(
queryset = UserSourceConnection.objects.all() queryset = UserSourceConnection.objects.all()
serializer_class = UserSourceConnectionSerializer serializer_class = UserSourceConnectionSerializer
filterset_fields = ["user", "source__slug"] filterset_fields = ["user", "source__slug"]
search_fields = ["user__username", "source__slug", "identifier"] search_fields = ["source__slug"]
ordering = ["source__slug", "pk"] ordering = ["source__slug", "pk"]
owner_field = "user" owner_field = "user"
@ -221,11 +206,9 @@ class GroupSourceConnectionSerializer(SourceSerializer):
"source_obj", "source_obj",
"identifier", "identifier",
"created", "created",
"last_updated",
] ]
extra_kwargs = { extra_kwargs = {
"created": {"read_only": True}, "created": {"read_only": True},
"last_updated": {"read_only": True},
} }
@ -242,5 +225,6 @@ class GroupSourceConnectionViewSet(
queryset = GroupSourceConnection.objects.all() queryset = GroupSourceConnection.objects.all()
serializer_class = GroupSourceConnectionSerializer serializer_class = GroupSourceConnectionSerializer
filterset_fields = ["group", "source__slug"] filterset_fields = ["group", "source__slug"]
search_fields = ["group__name", "source__slug", "identifier"] search_fields = ["source__slug"]
ordering = ["source__slug", "pk"] ordering = ["source__slug", "pk"]
owner_field = "user"

View File

@ -6,6 +6,8 @@ from typing import Any
from django.contrib.auth import update_session_auth_hash from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.models import Permission from django.contrib.auth.models import Permission
from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.core.cache import cache
from django.db.models.functions import ExtractHour from django.db.models.functions import ExtractHour
from django.db.transaction import atomic from django.db.transaction import atomic
from django.db.utils import IntegrityError from django.db.utils import IntegrityError
@ -69,8 +71,8 @@ from authentik.core.middleware import (
from authentik.core.models import ( from authentik.core.models import (
USER_ATTRIBUTE_TOKEN_EXPIRING, USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_PATH_SERVICE_ACCOUNT, USER_PATH_SERVICE_ACCOUNT,
AuthenticatedSession,
Group, Group,
Session,
Token, Token,
TokenIntents, TokenIntents,
User, User,
@ -224,7 +226,6 @@ class UserSerializer(ModelSerializer):
"name", "name",
"is_active", "is_active",
"last_login", "last_login",
"date_joined",
"is_superuser", "is_superuser",
"groups", "groups",
"groups_obj", "groups_obj",
@ -239,7 +240,6 @@ class UserSerializer(ModelSerializer):
] ]
extra_kwargs = { extra_kwargs = {
"name": {"allow_blank": True}, "name": {"allow_blank": True},
"date_joined": {"read_only": True},
"password_change_date": {"read_only": True}, "password_change_date": {"read_only": True},
} }
@ -373,7 +373,7 @@ class UsersFilter(FilterSet):
method="filter_attributes", method="filter_attributes",
) )
is_superuser = BooleanFilter(field_name="ak_groups", method="filter_is_superuser") is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
uuid = UUIDFilter(field_name="uuid") uuid = UUIDFilter(field_name="uuid")
path = CharFilter(field_name="path") path = CharFilter(field_name="path")
@ -391,11 +391,6 @@ class UsersFilter(FilterSet):
queryset=Group.objects.all().order_by("name"), queryset=Group.objects.all().order_by("name"),
) )
def filter_is_superuser(self, queryset, name, value):
if value:
return queryset.filter(ak_groups__is_superuser=True).distinct()
return queryset.exclude(ak_groups__is_superuser=True).distinct()
def filter_attributes(self, queryset, name, value): def filter_attributes(self, queryset, name, value):
"""Filter attributes by query args""" """Filter attributes by query args"""
try: try:
@ -772,6 +767,9 @@ class UserViewSet(UsedByMixin, ModelViewSet):
response = super().partial_update(request, *args, **kwargs) response = super().partial_update(request, *args, **kwargs)
instance: User = self.get_object() instance: User = self.get_object()
if not instance.is_active: if not instance.is_active:
Session.objects.filter(authenticatedsession__user=instance).delete() sessions = AuthenticatedSession.objects.filter(user=instance)
session_ids = sessions.values_list("session_key", flat=True)
cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids)
sessions.delete()
LOGGER.debug("Deleted user's sessions", user=instance.username) LOGGER.debug("Deleted user's sessions", user=instance.username)
return response return response

View File

@ -32,5 +32,5 @@ class AuthentikCoreConfig(ManagedAppConfig):
"name": "authentik Built-in", "name": "authentik Built-in",
"slug": "authentik-built-in", "slug": "authentik-built-in",
}, },
managed=Source.MANAGED_INBUILT, managed="goauthentik.io/sources/inbuilt",
) )

View File

@ -24,15 +24,6 @@ class InbuiltBackend(ModelBackend):
self.set_method("password", request) self.set_method("password", request)
return user return user
async def aauthenticate(
self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any
) -> User | None:
user = await super().aauthenticate(request, username=username, password=password, **kwargs)
if not user:
return None
self.set_method("password", request)
return user
def set_method(self, method: str, request: HttpRequest | None, **kwargs): def set_method(self, method: str, request: HttpRequest | None, **kwargs):
"""Set method data on current flow, if possbiel""" """Set method data on current flow, if possbiel"""
if not request: if not request:

View File

@ -1,15 +0,0 @@
"""Change user type"""
from importlib import import_module
from django.conf import settings
from authentik.tenants.management import TenantCommand
class Command(TenantCommand):
"""Delete all sessions"""
def handle_per_tenant(self, **options):
engine = import_module(settings.SESSION_ENGINE)
engine.SessionStore.clear_expired()

View File

@ -2,14 +2,9 @@
from collections.abc import Callable from collections.abc import Callable
from contextvars import ContextVar from contextvars import ContextVar
from functools import partial
from uuid import uuid4 from uuid import uuid4
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpRequest, HttpResponse from django.http import HttpRequest, HttpResponse
from django.utils.deprecation import MiddlewareMixin
from django.utils.functional import SimpleLazyObject
from django.utils.translation import override from django.utils.translation import override
from sentry_sdk.api import set_tag from sentry_sdk.api import set_tag
from structlog.contextvars import STRUCTLOG_KEY_PREFIX from structlog.contextvars import STRUCTLOG_KEY_PREFIX
@ -25,40 +20,6 @@ CTX_HOST = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + "host", default=None)
CTX_AUTH_VIA = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None) CTX_AUTH_VIA = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None)
def get_user(request):
if not hasattr(request, "_cached_user"):
user = None
if (authenticated_session := request.session.get("authenticatedsession", None)) is not None:
user = authenticated_session.user
request._cached_user = user or AnonymousUser()
return request._cached_user
async def aget_user(request):
if not hasattr(request, "_cached_user"):
user = None
if (
authenticated_session := await request.session.aget("authenticatedsession", None)
) is not None:
user = authenticated_session.user
request._cached_user = user or AnonymousUser()
return request._cached_user
class AuthenticationMiddleware(MiddlewareMixin):
def process_request(self, request):
if not hasattr(request, "session"):
raise ImproperlyConfigured(
"The Django authentication middleware requires session "
"middleware to be installed. Edit your MIDDLEWARE setting to "
"insert "
"'authentik.root.middleware.SessionMiddleware' before "
"'authentik.core.middleware.AuthenticationMiddleware'."
)
request.user = SimpleLazyObject(lambda: get_user(request))
request.auser = partial(aget_user, request)
class ImpersonateMiddleware: class ImpersonateMiddleware:
"""Middleware to impersonate users""" """Middleware to impersonate users"""

View File

@ -1,19 +0,0 @@
# Generated by Django 5.0.13 on 2025-04-07 14:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0043_alter_group_options"),
]
operations = [
migrations.AddField(
model_name="usersourceconnection",
name="new_identifier",
field=models.TextField(default=""),
preserve_default=False,
),
]

View File

@ -1,30 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0044_usersourceconnection_new_identifier"),
("authentik_sources_kerberos", "0003_migrate_userkerberossourceconnection_identifier"),
("authentik_sources_oauth", "0009_migrate_useroauthsourceconnection_identifier"),
("authentik_sources_plex", "0005_migrate_userplexsourceconnection_identifier"),
("authentik_sources_saml", "0019_migrate_usersamlsourceconnection_identifier"),
]
operations = [
migrations.RenameField(
model_name="usersourceconnection",
old_name="new_identifier",
new_name="identifier",
),
migrations.AddIndex(
model_name="usersourceconnection",
index=models.Index(fields=["identifier"], name="authentik_c_identif_59226f_idx"),
),
migrations.AddIndex(
model_name="usersourceconnection",
index=models.Index(
fields=["source", "identifier"], name="authentik_c_source__649e04_idx"
),
),
]

View File

@ -1,238 +0,0 @@
# Generated by Django 5.0.11 on 2025-01-27 12:58
import uuid
import pickle # nosec
from django.core import signing
from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.utils.timezone import now, timedelta
from authentik.lib.migrations import progress_bar
from authentik.root.middleware import ClientIPMiddleware
SESSION_CACHE_ALIAS = "default"
class PickleSerializer:
"""
Simple wrapper around pickle to be used in signing.dumps()/loads() and
cache backends.
"""
def __init__(self, protocol=None):
self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol
def dumps(self, obj):
"""Pickle data to be stored in redis"""
return pickle.dumps(obj, self.protocol)
def loads(self, data):
"""Unpickle data to be loaded from redis"""
return pickle.loads(data) # nosec
def _migrate_session(
apps,
db_alias,
session_key,
session_data,
expires,
):
Session = apps.get_model("authentik_core", "Session")
OldAuthenticatedSession = apps.get_model("authentik_core", "OldAuthenticatedSession")
AuthenticatedSession = apps.get_model("authentik_core", "AuthenticatedSession")
old_auth_session = (
OldAuthenticatedSession.objects.using(db_alias).filter(session_key=session_key).first()
)
args = {
"session_key": session_key,
"expires": expires,
"last_ip": ClientIPMiddleware.default_ip,
"last_user_agent": "",
"session_data": {},
}
for k, v in session_data.items():
if k == "authentik/stages/user_login/last_ip":
args["last_ip"] = v
elif k in ["last_user_agent", "last_used"]:
args[k] = v
elif args in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY]:
pass
else:
args["session_data"][k] = v
if old_auth_session:
args["last_user_agent"] = old_auth_session.last_user_agent
args["last_used"] = old_auth_session.last_used
args["session_data"] = pickle.dumps(args["session_data"])
session = Session.objects.using(db_alias).create(**args)
if old_auth_session:
AuthenticatedSession.objects.using(db_alias).create(
session=session,
user=old_auth_session.user,
)
def migrate_redis_sessions(apps, schema_editor):
from django.core.cache import caches
db_alias = schema_editor.connection.alias
cache = caches[SESSION_CACHE_ALIAS]
# Not a redis cache, skipping
if not hasattr(cache, "keys"):
return
print("\nMigrating Redis sessions to database, this might take a couple of minutes...")
for key, session_data in progress_bar(cache.get_many(cache.keys(f"{KEY_PREFIX}*")).items()):
_migrate_session(
apps=apps,
db_alias=db_alias,
session_key=key.removeprefix(KEY_PREFIX),
session_data=session_data,
expires=now() + timedelta(seconds=cache.ttl(key)),
)
def migrate_database_sessions(apps, schema_editor):
DjangoSession = apps.get_model("sessions", "Session")
db_alias = schema_editor.connection.alias
print("\nMigration database sessions, this might take a couple of minutes...")
for django_session in progress_bar(DjangoSession.objects.using(db_alias).all()):
session_data = signing.loads(
django_session.session_data,
salt="django.contrib.sessions.SessionStore",
serializer=PickleSerializer,
)
_migrate_session(
apps=apps,
db_alias=db_alias,
session_key=django_session.session_key,
session_data=session_data,
expires=django_session.expire_date,
)
class Migration(migrations.Migration):
dependencies = [
("sessions", "0001_initial"),
("authentik_core", "0045_rename_new_identifier_usersourceconnection_identifier_and_more"),
("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"),
("authentik_providers_rac", "0006_connectiontoken_authentik_p_expires_91f148_idx_and_more"),
]
operations = [
# Rename AuthenticatedSession to OldAuthenticatedSession
migrations.RenameModel(
old_name="AuthenticatedSession",
new_name="OldAuthenticatedSession",
),
migrations.RenameIndex(
model_name="oldauthenticatedsession",
new_name="authentik_c_expires_cf4f72_idx",
old_name="authentik_c_expires_08251d_idx",
),
migrations.RenameIndex(
model_name="oldauthenticatedsession",
new_name="authentik_c_expirin_c1f17f_idx",
old_name="authentik_c_expirin_9cd839_idx",
),
migrations.RenameIndex(
model_name="oldauthenticatedsession",
new_name="authentik_c_expirin_e04f5d_idx",
old_name="authentik_c_expirin_195a84_idx",
),
migrations.RenameIndex(
model_name="oldauthenticatedsession",
new_name="authentik_c_session_a44819_idx",
old_name="authentik_c_session_d0f005_idx",
),
migrations.RunSQL(
sql="ALTER INDEX authentik_core_authenticatedsession_user_id_5055b6cf RENAME TO authentik_core_oldauthenticatedsession_user_id_5055b6cf",
reverse_sql="ALTER INDEX authentik_core_oldauthenticatedsession_user_id_5055b6cf RENAME TO authentik_core_authenticatedsession_user_id_5055b6cf",
),
# Create new Session and AuthenticatedSession models
migrations.CreateModel(
name="Session",
fields=[
(
"session_key",
models.CharField(
max_length=40, primary_key=True, serialize=False, verbose_name="session key"
),
),
("expires", models.DateTimeField(default=None, null=True)),
("expiring", models.BooleanField(default=True)),
("session_data", models.BinaryField(verbose_name="session data")),
("last_ip", models.GenericIPAddressField()),
("last_user_agent", models.TextField(blank=True)),
("last_used", models.DateTimeField(auto_now=True)),
],
options={
"default_permissions": [],
"verbose_name": "Session",
"verbose_name_plural": "Sessions",
},
),
migrations.AddIndex(
model_name="session",
index=models.Index(fields=["expires"], name="authentik_c_expires_d2f607_idx"),
),
migrations.AddIndex(
model_name="session",
index=models.Index(fields=["expiring"], name="authentik_c_expirin_7c2cfb_idx"),
),
migrations.AddIndex(
model_name="session",
index=models.Index(
fields=["expiring", "expires"], name="authentik_c_expirin_1ab2e4_idx"
),
),
migrations.AddIndex(
model_name="session",
index=models.Index(
fields=["expires", "session_key"], name="authentik_c_expires_c49143_idx"
),
),
migrations.CreateModel(
name="AuthenticatedSession",
fields=[
(
"session",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
primary_key=True,
serialize=False,
to="authentik_core.session",
),
),
("uuid", models.UUIDField(default=uuid.uuid4, unique=True)),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"verbose_name": "Authenticated Session",
"verbose_name_plural": "Authenticated Sessions",
},
),
migrations.RunPython(
code=migrate_redis_sessions,
reverse_code=migrations.RunPython.noop,
),
migrations.RunPython(
code=migrate_database_sessions,
reverse_code=migrations.RunPython.noop,
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 5.0.11 on 2025-01-27 13:02
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0046_session_and_more"),
("authentik_providers_rac", "0007_migrate_session"),
("authentik_providers_oauth2", "0028_migrate_session"),
]
operations = [
migrations.DeleteModel(
name="OldAuthenticatedSession",
),
]

View File

@ -1,7 +1,6 @@
"""authentik core models""" """authentik core models"""
from datetime import datetime from datetime import datetime
from enum import StrEnum
from hashlib import sha256 from hashlib import sha256
from typing import Any, Optional, Self from typing import Any, Optional, Self
from uuid import uuid4 from uuid import uuid4
@ -10,7 +9,6 @@ from deepmerge import always_merger
from django.contrib.auth.hashers import check_password from django.contrib.auth.hashers import check_password
from django.contrib.auth.models import AbstractUser from django.contrib.auth.models import AbstractUser
from django.contrib.auth.models import UserManager as DjangoUserManager from django.contrib.auth.models import UserManager as DjangoUserManager
from django.contrib.sessions.base_session import AbstractBaseSession
from django.db import models from django.db import models
from django.db.models import Q, QuerySet, options from django.db.models import Q, QuerySet, options
from django.db.models.constants import LOOKUP_SEP from django.db.models.constants import LOOKUP_SEP
@ -648,30 +646,19 @@ class SourceUserMatchingModes(models.TextChoices):
"""Different modes a source can handle new/returning users""" """Different modes a source can handle new/returning users"""
IDENTIFIER = "identifier", _("Use the source-specific identifier") IDENTIFIER = "identifier", _("Use the source-specific identifier")
EMAIL_LINK = ( EMAIL_LINK = "email_link", _(
"email_link", "Link to a user with identical email address. Can have security implications "
_( "when a source doesn't validate email addresses."
"Link to a user with identical email address. Can have security implications "
"when a source doesn't validate email addresses."
),
) )
EMAIL_DENY = ( EMAIL_DENY = "email_deny", _(
"email_deny", "Use the user's email address, but deny enrollment when the email address already exists."
_(
"Use the user's email address, but deny enrollment when the email address already "
"exists."
),
) )
USERNAME_LINK = ( USERNAME_LINK = "username_link", _(
"username_link", "Link to a user with identical username. Can have security implications "
_( "when a username is used with another source."
"Link to a user with identical username. Can have security implications "
"when a username is used with another source."
),
) )
USERNAME_DENY = ( USERNAME_DENY = "username_deny", _(
"username_deny", "Use the user's username, but deny enrollment when the username already exists."
_("Use the user's username, but deny enrollment when the username already exists."),
) )
@ -679,24 +666,18 @@ class SourceGroupMatchingModes(models.TextChoices):
"""Different modes a source can handle new/returning groups""" """Different modes a source can handle new/returning groups"""
IDENTIFIER = "identifier", _("Use the source-specific identifier") IDENTIFIER = "identifier", _("Use the source-specific identifier")
NAME_LINK = ( NAME_LINK = "name_link", _(
"name_link", "Link to a group with identical name. Can have security implications "
_( "when a group name is used with another source."
"Link to a group with identical name. Can have security implications "
"when a group name is used with another source."
),
) )
NAME_DENY = ( NAME_DENY = "name_deny", _(
"name_deny", "Use the group name, but deny enrollment when the name already exists."
_("Use the group name, but deny enrollment when the name already exists."),
) )
class Source(ManagedModel, SerializerModel, PolicyBindingModel): class Source(ManagedModel, SerializerModel, PolicyBindingModel):
"""Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server""" """Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server"""
MANAGED_INBUILT = "goauthentik.io/sources/inbuilt"
name = models.TextField(help_text=_("Source's display Name.")) name = models.TextField(help_text=_("Source's display Name."))
slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True) slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True)
@ -747,7 +728,8 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
choices=SourceGroupMatchingModes.choices, choices=SourceGroupMatchingModes.choices,
default=SourceGroupMatchingModes.IDENTIFIER, default=SourceGroupMatchingModes.IDENTIFIER,
help_text=_( help_text=_(
"How the source determines if an existing group should be used or a new group created." "How the source determines if an existing group should be used or "
"a new group created."
), ),
) )
@ -777,17 +759,11 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
@property @property
def component(self) -> str: def component(self) -> str:
"""Return component used to edit this object""" """Return component used to edit this object"""
if self.managed == self.MANAGED_INBUILT:
return ""
raise NotImplementedError raise NotImplementedError
@property @property
def property_mapping_type(self) -> "type[PropertyMapping]": def property_mapping_type(self) -> "type[PropertyMapping]":
"""Return property mapping type used by this object""" """Return property mapping type used by this object"""
if self.managed == self.MANAGED_INBUILT:
from authentik.core.models import PropertyMapping
return PropertyMapping
raise NotImplementedError raise NotImplementedError
def ui_login_button(self, request: HttpRequest) -> UILoginButton | None: def ui_login_button(self, request: HttpRequest) -> UILoginButton | None:
@ -802,14 +778,10 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
def get_base_user_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: def get_base_user_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]:
"""Get base properties for a user to build final properties upon.""" """Get base properties for a user to build final properties upon."""
if self.managed == self.MANAGED_INBUILT:
return {}
raise NotImplementedError raise NotImplementedError
def get_base_group_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]: def get_base_group_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]:
"""Get base properties for a group to build final properties upon.""" """Get base properties for a group to build final properties upon."""
if self.managed == self.MANAGED_INBUILT:
return {}
raise NotImplementedError raise NotImplementedError
def __str__(self): def __str__(self):
@ -840,7 +812,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
user = models.ForeignKey(User, on_delete=models.CASCADE) user = models.ForeignKey(User, on_delete=models.CASCADE)
source = models.ForeignKey(Source, on_delete=models.CASCADE) source = models.ForeignKey(Source, on_delete=models.CASCADE)
identifier = models.TextField()
objects = InheritanceManager() objects = InheritanceManager()
@ -854,10 +825,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
class Meta: class Meta:
unique_together = (("user", "source"),) unique_together = (("user", "source"),)
indexes = (
models.Index(fields=("identifier",)),
models.Index(fields=("source", "identifier")),
)
class GroupSourceConnection(SerializerModel, CreatedUpdatedModel): class GroupSourceConnection(SerializerModel, CreatedUpdatedModel):
@ -1028,75 +995,45 @@ class PropertyMapping(SerializerModel, ManagedModel):
verbose_name_plural = _("Property Mappings") verbose_name_plural = _("Property Mappings")
class Session(ExpiringModel, AbstractBaseSession): class AuthenticatedSession(ExpiringModel):
"""User session with extra fields for fast access""" """Additional session class for authenticated users. Augments the standard django session
to achieve the following:
- Make it queryable by user
- Have a direct connection to user objects
- Allow users to view their own sessions and terminate them
- Save structured and well-defined information.
"""
# Remove upstream field because we're using our own ExpiringModel uuid = models.UUIDField(default=uuid4, primary_key=True)
expire_date = None
session_data = models.BinaryField(_("session data"))
# Keep in sync with Session.Keys session_key = models.CharField(max_length=40)
last_ip = models.GenericIPAddressField() user = models.ForeignKey(User, on_delete=models.CASCADE)
last_ip = models.TextField()
last_user_agent = models.TextField(blank=True) last_user_agent = models.TextField(blank=True)
last_used = models.DateTimeField(auto_now=True) last_used = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = _("Session")
verbose_name_plural = _("Sessions")
indexes = ExpiringModel.Meta.indexes + [
models.Index(fields=["expires", "session_key"]),
]
default_permissions = []
def __str__(self):
return self.session_key
class Keys(StrEnum):
"""
Keys to be set with the session interface for the fields above to be updated.
If a field is added here that needs to be initialized when the session is initialized,
it must also be reflected in authentik.root.middleware.SessionMiddleware.process_request
and in authentik.core.sessions.SessionStore.__init__
"""
LAST_IP = "last_ip"
LAST_USER_AGENT = "last_user_agent"
LAST_USED = "last_used"
@classmethod
def get_session_store_class(cls):
from authentik.core.sessions import SessionStore
return SessionStore
def get_decoded(self):
raise NotImplementedError
class AuthenticatedSession(SerializerModel):
session = models.OneToOneField(Session, on_delete=models.CASCADE, primary_key=True)
# We use the session as primary key, but we need the API to be able to reference
# this object uniquely without exposing the session key
uuid = models.UUIDField(default=uuid4, unique=True)
user = models.ForeignKey(User, on_delete=models.CASCADE)
class Meta: class Meta:
verbose_name = _("Authenticated Session") verbose_name = _("Authenticated Session")
verbose_name_plural = _("Authenticated Sessions") verbose_name_plural = _("Authenticated Sessions")
indexes = ExpiringModel.Meta.indexes + [
models.Index(fields=["session_key"]),
]
def __str__(self) -> str: def __str__(self) -> str:
return f"Authenticated Session {str(self.pk)[:10]}" return f"Authenticated Session {self.session_key[:10]}"
@staticmethod @staticmethod
def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]: def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]:
"""Create a new session from a http request""" """Create a new session from a http request"""
if not hasattr(request, "session") or not request.session.exists( from authentik.root.middleware import ClientIPMiddleware
request.session.session_key
): if not hasattr(request, "session") or not request.session.session_key:
return None return None
return AuthenticatedSession( return AuthenticatedSession(
session=Session.objects.filter(session_key=request.session.session_key).first(), session_key=request.session.session_key,
user=user, user=user,
last_ip=ClientIPMiddleware.get_client_ip(request),
last_user_agent=request.META.get("HTTP_USER_AGENT", ""),
expires=request.session.get_expiry_date(),
) )

View File

@ -1,168 +0,0 @@
"""authentik sessions engine"""
import pickle # nosec
from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY
from django.contrib.sessions.backends.db import SessionStore as SessionBase
from django.core.exceptions import SuspiciousOperation
from django.utils import timezone
from django.utils.functional import cached_property
from structlog.stdlib import get_logger
from authentik.root.middleware import ClientIPMiddleware
LOGGER = get_logger()
class SessionStore(SessionBase):
def __init__(self, session_key=None, last_ip=None, last_user_agent=""):
super().__init__(session_key)
self._create_kwargs = {
"last_ip": last_ip or ClientIPMiddleware.default_ip,
"last_user_agent": last_user_agent,
}
@classmethod
def get_model_class(cls):
from authentik.core.models import Session
return Session
@cached_property
def model_fields(self):
return [k.value for k in self.model.Keys]
def _get_session_from_db(self):
try:
return (
self.model.objects.select_related(
"authenticatedsession",
"authenticatedsession__user",
)
.prefetch_related(
"authenticatedsession__user__groups",
"authenticatedsession__user__user_permissions",
)
.get(
session_key=self.session_key,
expires__gt=timezone.now(),
)
)
except (self.model.DoesNotExist, SuspiciousOperation) as exc:
if isinstance(exc, SuspiciousOperation):
LOGGER.warning(str(exc))
self._session_key = None
async def _aget_session_from_db(self):
try:
return (
await self.model.objects.select_related(
"authenticatedsession",
"authenticatedsession__user",
)
.prefetch_related(
"authenticatedsession__user__groups",
"authenticatedsession__user__user_permissions",
)
.aget(
session_key=self.session_key,
expires__gt=timezone.now(),
)
)
except (self.model.DoesNotExist, SuspiciousOperation) as exc:
if isinstance(exc, SuspiciousOperation):
LOGGER.warning(str(exc))
self._session_key = None
def encode(self, session_dict):
return pickle.dumps(session_dict, protocol=pickle.HIGHEST_PROTOCOL)
def decode(self, session_data):
try:
return pickle.loads(session_data) # nosec
except pickle.PickleError:
# ValueError, unpickling exceptions. If any of these happen, just return an empty
# dictionary (an empty session)
pass
return {}
def load(self):
s = self._get_session_from_db()
if s:
return {
"authenticatedsession": getattr(s, "authenticatedsession", None),
**{k: getattr(s, k) for k in self.model_fields},
**self.decode(s.session_data),
}
else:
return {}
async def aload(self):
s = await self._aget_session_from_db()
if s:
return {
"authenticatedsession": getattr(s, "authenticatedsession", None),
**{k: getattr(s, k) for k in self.model_fields},
**self.decode(s.session_data),
}
else:
return {}
def create_model_instance(self, data):
args = {
"session_key": self._get_or_create_session_key(),
"expires": self.get_expiry_date(),
"session_data": {},
**self._create_kwargs,
}
for k, v in data.items():
# Don't save:
# - unused auth data
# - related models
if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]:
pass
elif k in self.model_fields:
args[k] = v
else:
args["session_data"][k] = v
args["session_data"] = self.encode(args["session_data"])
return self.model(**args)
async def acreate_model_instance(self, data):
args = {
"session_key": await self._aget_or_create_session_key(),
"expires": await self.aget_expiry_date(),
"session_data": {},
**self._create_kwargs,
}
for k, v in data.items():
# Don't save:
# - unused auth data
# - related models
if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]:
pass
elif k in self.model_fields:
args[k] = v
else:
args["session_data"][k] = v
args["session_data"] = self.encode(args["session_data"])
return self.model(**args)
@classmethod
def clear_expired(cls):
cls.get_model_class().objects.filter(expires__lt=timezone.now()).delete()
@classmethod
async def aclear_expired(cls):
await cls.get_model_class().objects.filter(expires__lt=timezone.now()).adelete()
def cycle_key(self):
data = self._session
key = self.session_key
self.create()
self._session_cache = data
if key:
self.delete(key)
if (authenticated_session := data.get("authenticatedsession")) is not None:
authenticated_session.session_id = self.session_key
authenticated_session.save(force_insert=True)

View File

@ -1,10 +1,11 @@
"""authentik core signals""" """authentik core signals"""
from django.contrib.auth.signals import user_logged_in from django.contrib.auth.signals import user_logged_in, user_logged_out
from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.core.cache import cache from django.core.cache import cache
from django.core.signals import Signal from django.core.signals import Signal
from django.db.models import Model from django.db.models import Model
from django.db.models.signals import post_delete, post_save, pre_save from django.db.models.signals import post_save, pre_delete, pre_save
from django.dispatch import receiver from django.dispatch import receiver
from django.http.request import HttpRequest from django.http.request import HttpRequest
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
@ -14,7 +15,6 @@ from authentik.core.models import (
AuthenticatedSession, AuthenticatedSession,
BackchannelProvider, BackchannelProvider,
ExpiringModel, ExpiringModel,
Session,
User, User,
default_token_duration, default_token_duration,
) )
@ -49,10 +49,19 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_):
session.save() session.save()
@receiver(post_delete, sender=AuthenticatedSession) @receiver(user_logged_out)
def user_logged_out_session(sender, request: HttpRequest, user: User, **_):
"""Delete AuthenticatedSession if it exists"""
if not request.session or not request.session.session_key:
return
AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete()
@receiver(pre_delete, sender=AuthenticatedSession)
def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_): def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_):
"""Delete session when authenticated session is deleted""" """Delete session when authenticated session is deleted"""
Session.objects.filter(session_key=instance.pk).delete() cache_key = f"{KEY_PREFIX}{instance.session_key}"
cache.delete(cache_key)
@receiver(pre_save) @receiver(pre_save)

View File

@ -48,7 +48,6 @@ LOGGER = get_logger()
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups" PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages" SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages"
SESSION_KEY_SOURCE_FLOW_CONTEXT = "authentik/flows/source_flow_context"
SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec
@ -262,7 +261,6 @@ class SourceFlowManager:
plan.append_stage(stage) plan.append_stage(stage)
for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []): for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []):
plan.append_stage(stage) plan.append_stage(stage)
plan.context.update(self.request.session.get(SESSION_KEY_SOURCE_FLOW_CONTEXT, {}))
return plan.to_redirect(self.request, flow) return plan.to_redirect(self.request, flow)
def handle_auth( def handle_auth(

View File

@ -2,16 +2,22 @@
from datetime import datetime, timedelta from datetime import datetime, timedelta
from django.conf import ImproperlyConfigured
from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.contrib.sessions.backends.db import SessionStore as DBSessionStore
from django.core.cache import cache
from django.utils.timezone import now from django.utils.timezone import now
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.core.models import ( from authentik.core.models import (
USER_ATTRIBUTE_EXPIRES, USER_ATTRIBUTE_EXPIRES,
USER_ATTRIBUTE_GENERATED, USER_ATTRIBUTE_GENERATED,
AuthenticatedSession,
ExpiringModel, ExpiringModel,
User, User,
) )
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
from authentik.lib.config import CONFIG
from authentik.root.celery import CELERY_APP from authentik.root.celery import CELERY_APP
LOGGER = get_logger() LOGGER = get_logger()
@ -32,6 +38,40 @@ def clean_expired_models(self: SystemTask):
obj.expire_action() obj.expire_action()
LOGGER.debug("Expired models", model=cls, amount=amount) LOGGER.debug("Expired models", model=cls, amount=amount)
messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}") messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}")
# Special case
amount = 0
for session in AuthenticatedSession.objects.all():
match CONFIG.get("session_storage", "cache"):
case "cache":
cache_key = f"{KEY_PREFIX}{session.session_key}"
value = None
try:
value = cache.get(cache_key)
except Exception as exc:
LOGGER.debug("Failed to get session from cache", exc=exc)
if not value:
session.delete()
amount += 1
case "db":
if not (
DBSessionStore.get_model_class()
.objects.filter(session_key=session.session_key, expire_date__gt=now())
.exists()
):
session.delete()
amount += 1
case _:
# Should never happen, as we check for other values in authentik/root/settings.py
raise ImproperlyConfigured(
"Invalid session_storage setting, allowed values are db and cache"
)
if CONFIG.get("session_storage", "cache") == "db":
DBSessionStore.clear_expired()
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")
self.set_status(TaskStatus.SUCCESSFUL, *messages) self.set_status(TaskStatus.SUCCESSFUL, *messages)

View File

@ -16,7 +16,7 @@
{% block head_before %} {% block head_before %}
{% endblock %} {% endblock %}
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
<style>{{ brand.branding_custom_css }}</style> <link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject>
<script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script> <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
<script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script>
{% block head %} {% block head %}

View File

@ -4,7 +4,7 @@
{% load i18n %} {% load i18n %}
{% block head_before %} {% block head_before %}
<link rel="prefetch" href="{{ request.brand.branding_default_flow_background_url }}" /> <link rel="prefetch" href="{% static 'dist/assets/images/flow_background.jpg' %}" />
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}"> <link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)"> <link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
{% include "base/header_js.html" %} {% include "base/header_js.html" %}
@ -13,7 +13,7 @@
{% block head %} {% block head %}
<style> <style>
:root { :root {
--ak-flow-background: url("{{ request.brand.branding_default_flow_background_url }}"); --ak-flow-background: url("{% static 'dist/assets/images/flow_background.jpg' %}");
--pf-c-background-image--BackgroundImage: var(--ak-flow-background); --pf-c-background-image--BackgroundImage: var(--ak-flow-background);
--pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background); --pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background);
--pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background); --pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background);

View File

@ -5,7 +5,7 @@ from json import loads
from django.urls.base import reverse from django.urls.base import reverse
from rest_framework.test import APITestCase from rest_framework.test import APITestCase
from authentik.core.models import AuthenticatedSession, Session, User from authentik.core.models import User
from authentik.core.tests.utils import create_test_admin_user from authentik.core.tests.utils import create_test_admin_user
@ -30,18 +30,3 @@ class TestAuthenticatedSessionsAPI(APITestCase):
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
body = loads(response.content.decode()) body = loads(response.content.decode())
self.assertEqual(body["pagination"]["count"], 1) self.assertEqual(body["pagination"]["count"], 1)
def test_delete(self):
"""Test deletion"""
self.client.force_login(self.user)
self.assertEqual(AuthenticatedSession.objects.all().count(), 1)
self.assertEqual(Session.objects.all().count(), 1)
response = self.client.delete(
reverse(
"authentik_api:authenticatedsession-detail",
kwargs={"uuid": AuthenticatedSession.objects.first().uuid},
)
)
self.assertEqual(response.status_code, 204)
self.assertEqual(AuthenticatedSession.objects.all().count(), 0)
self.assertEqual(Session.objects.all().count(), 0)

View File

@ -1,19 +0,0 @@
from django.apps import apps
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.tests.utils import create_test_admin_user
class TestSourceAPI(APITestCase):
def setUp(self) -> None:
self.user = create_test_admin_user()
self.client.force_login(self.user)
def test_builtin_source_used_by(self):
"""Test Providers's types endpoint"""
apps.get_app_config("authentik_core").source_inbuilt()
response = self.client.get(
reverse("authentik_api:source-used-by", kwargs={"slug": "authentik-built-in"}),
)
self.assertEqual(response.status_code, 200)

View File

@ -1,8 +1,9 @@
"""Test Users API""" """Test Users API"""
from datetime import datetime from datetime import datetime
from json import loads
from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.core.cache import cache
from django.urls.base import reverse from django.urls.base import reverse
from rest_framework.test import APITestCase from rest_framework.test import APITestCase
@ -10,17 +11,11 @@ from authentik.brands.models import Brand
from authentik.core.models import ( from authentik.core.models import (
USER_ATTRIBUTE_TOKEN_EXPIRING, USER_ATTRIBUTE_TOKEN_EXPIRING,
AuthenticatedSession, AuthenticatedSession,
Session,
Token, Token,
User, User,
UserTypes, UserTypes,
) )
from authentik.core.tests.utils import ( from authentik.core.tests.utils import create_test_admin_user, create_test_brand, create_test_flow
create_test_admin_user,
create_test_brand,
create_test_flow,
create_test_user,
)
from authentik.flows.models import FlowDesignation from authentik.flows.models import FlowDesignation
from authentik.lib.generators import generate_id, generate_key from authentik.lib.generators import generate_id, generate_key
from authentik.stages.email.models import EmailStage from authentik.stages.email.models import EmailStage
@ -31,7 +26,7 @@ class TestUsersAPI(APITestCase):
def setUp(self) -> None: def setUp(self) -> None:
self.admin = create_test_admin_user() self.admin = create_test_admin_user()
self.user = create_test_user() self.user = User.objects.create(username="test-user")
def test_filter_type(self): def test_filter_type(self):
"""Test API filtering by type""" """Test API filtering by type"""
@ -46,35 +41,6 @@ class TestUsersAPI(APITestCase):
) )
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
def test_filter_is_superuser(self):
"""Test API filtering by superuser status"""
User.objects.all().delete()
admin = create_test_admin_user()
self.client.force_login(admin)
# Test superuser
response = self.client.get(
reverse("authentik_api:user-list"),
data={
"is_superuser": True,
},
)
self.assertEqual(response.status_code, 200)
body = loads(response.content)
self.assertEqual(len(body["results"]), 1)
self.assertEqual(body["results"][0]["username"], admin.username)
# Test non-superuser
user = create_test_user()
response = self.client.get(
reverse("authentik_api:user-list"),
data={
"is_superuser": False,
},
)
self.assertEqual(response.status_code, 200)
body = loads(response.content)
self.assertEqual(len(body["results"]), 1, body)
self.assertEqual(body["results"][0]["username"], user.username)
def test_list_with_groups(self): def test_list_with_groups(self):
"""Test listing with groups""" """Test listing with groups"""
self.client.force_login(self.admin) self.client.force_login(self.admin)
@ -133,8 +99,6 @@ class TestUsersAPI(APITestCase):
def test_recovery_email_no_flow(self): def test_recovery_email_no_flow(self):
"""Test user recovery link (no recovery flow set)""" """Test user recovery link (no recovery flow set)"""
self.client.force_login(self.admin) self.client.force_login(self.admin)
self.user.email = ""
self.user.save()
response = self.client.post( response = self.client.post(
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk}) reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk})
) )
@ -380,15 +344,12 @@ class TestUsersAPI(APITestCase):
"""Ensure sessions are deleted when a user is deactivated""" """Ensure sessions are deleted when a user is deactivated"""
user = create_test_admin_user() user = create_test_admin_user()
session_id = generate_id() session_id = generate_id()
session = Session.objects.create(
session_key=session_id,
last_ip="255.255.255.255",
last_user_agent="",
)
AuthenticatedSession.objects.create( AuthenticatedSession.objects.create(
session=session,
user=user, user=user,
session_key=session_id,
last_ip="",
) )
cache.set(KEY_PREFIX + session_id, "foo")
self.client.force_login(self.admin) self.client.force_login(self.admin)
response = self.client.patch( response = self.client.patch(
@ -399,7 +360,5 @@ class TestUsersAPI(APITestCase):
) )
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertFalse(Session.objects.filter(session_key=session_id).exists()) self.assertIsNone(cache.get(KEY_PREFIX + session_id))
self.assertFalse( self.assertFalse(AuthenticatedSession.objects.filter(session_key=session_id).exists())
AuthenticatedSession.objects.filter(session__session_key=session_id).exists()
)

View File

@ -1,5 +1,7 @@
"""authentik URL Configuration""" """authentik URL Configuration"""
from channels.auth import AuthMiddleware
from channels.sessions import CookieMiddleware
from django.conf import settings from django.conf import settings
from django.contrib.auth.decorators import login_required from django.contrib.auth.decorators import login_required
from django.urls import path from django.urls import path
@ -11,11 +13,7 @@ from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet
from authentik.core.api.groups import GroupViewSet from authentik.core.api.groups import GroupViewSet
from authentik.core.api.property_mappings import PropertyMappingViewSet from authentik.core.api.property_mappings import PropertyMappingViewSet
from authentik.core.api.providers import ProviderViewSet from authentik.core.api.providers import ProviderViewSet
from authentik.core.api.sources import ( from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
GroupSourceConnectionViewSet,
SourceViewSet,
UserSourceConnectionViewSet,
)
from authentik.core.api.tokens import TokenViewSet from authentik.core.api.tokens import TokenViewSet
from authentik.core.api.transactional_applications import TransactionalApplicationView from authentik.core.api.transactional_applications import TransactionalApplicationView
from authentik.core.api.users import UserViewSet from authentik.core.api.users import UserViewSet
@ -27,7 +25,7 @@ from authentik.core.views.interface import (
RootRedirectView, RootRedirectView,
) )
from authentik.flows.views.interface import FlowInterfaceView from authentik.flows.views.interface import FlowInterfaceView
from authentik.root.asgi_middleware import AuthMiddlewareStack from authentik.root.asgi_middleware import SessionMiddleware
from authentik.root.messages.consumer import MessageConsumer from authentik.root.messages.consumer import MessageConsumer
from authentik.root.middleware import ChannelsLoggingMiddleware from authentik.root.middleware import ChannelsLoggingMiddleware
@ -83,7 +81,6 @@ api_urlpatterns = [
("core/tokens", TokenViewSet), ("core/tokens", TokenViewSet),
("sources/all", SourceViewSet), ("sources/all", SourceViewSet),
("sources/user_connections/all", UserSourceConnectionViewSet), ("sources/user_connections/all", UserSourceConnectionViewSet),
("sources/group_connections/all", GroupSourceConnectionViewSet),
("providers/all", ProviderViewSet), ("providers/all", ProviderViewSet),
("propertymappings/all", PropertyMappingViewSet), ("propertymappings/all", PropertyMappingViewSet),
("authenticators/all", DeviceViewSet, "device"), ("authenticators/all", DeviceViewSet, "device"),
@ -97,7 +94,9 @@ api_urlpatterns = [
websocket_urlpatterns = [ websocket_urlpatterns = [
path( path(
"ws/client/", "ws/client/",
ChannelsLoggingMiddleware(AuthMiddlewareStack(MessageConsumer.as_asgi())), ChannelsLoggingMiddleware(
CookieMiddleware(SessionMiddleware(AuthMiddleware(MessageConsumer.as_asgi())))
),
), ),
] ]

View File

@ -55,7 +55,7 @@ class RedirectToAppLaunch(View):
) )
except FlowNonApplicableException: except FlowNonApplicableException:
raise Http404 from None raise Http404 from None
plan.append_stage(in_memory_stage(RedirectToAppStage)) plan.insert_stage(in_memory_stage(RedirectToAppStage))
return plan.to_redirect(request, flow) return plan.to_redirect(request, flow)

View File

@ -37,7 +37,6 @@ class GoogleWorkspaceProviderSerializer(EnterpriseRequiredMixin, ProviderSeriali
"user_delete_action", "user_delete_action",
"group_delete_action", "group_delete_action",
"default_group_email_domain", "default_group_email_domain",
"dry_run",
] ]
extra_kwargs = {} extra_kwargs = {}

View File

@ -8,10 +8,9 @@ from httplib2 import HttpLib2Error, HttpLib2ErrorWithResponse
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
from authentik.lib.sync.outgoing import HTTP_CONFLICT from authentik.lib.sync.outgoing import HTTP_CONFLICT
from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
from authentik.lib.sync.outgoing.exceptions import ( from authentik.lib.sync.outgoing.exceptions import (
BadRequestSyncException, BadRequestSyncException,
DryRunRejected,
NotFoundSyncException, NotFoundSyncException,
ObjectExistsSyncException, ObjectExistsSyncException,
StopSync, StopSync,
@ -44,8 +43,6 @@ class GoogleWorkspaceSyncClient[TModel: Model, TConnection: Model, TSchema: dict
self.domains.append(domain_name) self.domains.append(domain_name)
def _request(self, request: HttpRequest): def _request(self, request: HttpRequest):
if self.provider.dry_run and request.method.upper() not in SAFE_METHODS:
raise DryRunRejected(request.uri, request.method, request.body)
try: try:
response = request.execute() response = request.execute()
except GoogleAuthError as exc: except GoogleAuthError as exc:

View File

@ -1,24 +0,0 @@
# Generated by Django 5.0.12 on 2025-02-24 19:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_providers_google_workspace",
"0003_googleworkspaceprovidergroup_attributes_and_more",
),
]
operations = [
migrations.AddField(
model_name="googleworkspaceprovider",
name="dry_run",
field=models.BooleanField(
default=False,
help_text="When enabled, provider will not modify or create objects in the remote system.",
),
),
]

View File

@ -36,7 +36,6 @@ class MicrosoftEntraProviderSerializer(EnterpriseRequiredMixin, ProviderSerializ
"filter_group", "filter_group",
"user_delete_action", "user_delete_action",
"group_delete_action", "group_delete_action",
"dry_run",
] ]
extra_kwargs = {} extra_kwargs = {}

View File

@ -3,7 +3,6 @@ from collections.abc import Coroutine
from dataclasses import asdict from dataclasses import asdict
from typing import Any from typing import Any
import httpx
from azure.core.exceptions import ( from azure.core.exceptions import (
ClientAuthenticationError, ClientAuthenticationError,
ServiceRequestError, ServiceRequestError,
@ -13,7 +12,6 @@ from azure.identity.aio import ClientSecretCredential
from django.db.models import Model from django.db.models import Model
from django.http import HttpResponseBadRequest, HttpResponseNotFound from django.http import HttpResponseBadRequest, HttpResponseNotFound
from kiota_abstractions.api_error import APIError from kiota_abstractions.api_error import APIError
from kiota_abstractions.request_information import RequestInformation
from kiota_authentication_azure.azure_identity_authentication_provider import ( from kiota_authentication_azure.azure_identity_authentication_provider import (
AzureIdentityAuthenticationProvider, AzureIdentityAuthenticationProvider,
) )
@ -23,15 +21,13 @@ from msgraph.generated.models.o_data_errors.o_data_error import ODataError
from msgraph.graph_request_adapter import GraphRequestAdapter, options from msgraph.graph_request_adapter import GraphRequestAdapter, options
from msgraph.graph_service_client import GraphServiceClient from msgraph.graph_service_client import GraphServiceClient
from msgraph_core import GraphClientFactory from msgraph_core import GraphClientFactory
from opentelemetry import trace
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
from authentik.events.utils import sanitize_item from authentik.events.utils import sanitize_item
from authentik.lib.sync.outgoing import HTTP_CONFLICT from authentik.lib.sync.outgoing import HTTP_CONFLICT
from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
from authentik.lib.sync.outgoing.exceptions import ( from authentik.lib.sync.outgoing.exceptions import (
BadRequestSyncException, BadRequestSyncException,
DryRunRejected,
NotFoundSyncException, NotFoundSyncException,
ObjectExistsSyncException, ObjectExistsSyncException,
StopSync, StopSync,
@ -39,24 +35,20 @@ from authentik.lib.sync.outgoing.exceptions import (
) )
class AuthentikRequestAdapter(GraphRequestAdapter): def get_request_adapter(
def __init__(self, auth_provider, provider: MicrosoftEntraProvider, client=None): credentials: ClientSecretCredential, scopes: list[str] | None = None
super().__init__(auth_provider, client) ) -> GraphRequestAdapter:
self._provider = provider if scopes:
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials, scopes=scopes)
else:
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials)
async def get_http_response_message( return GraphRequestAdapter(
self, auth_provider=auth_provider,
request_info: RequestInformation, client=GraphClientFactory.create_with_default_middleware(
parent_span: trace.Span, options=options, client=KiotaClientFactory.get_default_client()
claims: str = "", ),
) -> httpx.Response: )
if self._provider.dry_run and request_info.http_method.value.upper() not in SAFE_METHODS:
raise DryRunRejected(
url=request_info.url,
method=request_info.http_method.value,
body=request_info.content.decode("utf-8"),
)
return await super().get_http_response_message(request_info, parent_span, claims=claims)
class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict]( class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict](
@ -71,27 +63,9 @@ class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict]
self.credentials = provider.microsoft_credentials() self.credentials = provider.microsoft_credentials()
self.__prefetch_domains() self.__prefetch_domains()
def get_request_adapter(
self, credentials: ClientSecretCredential, scopes: list[str] | None = None
) -> AuthentikRequestAdapter:
if scopes:
auth_provider = AzureIdentityAuthenticationProvider(
credentials=credentials, scopes=scopes
)
else:
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials)
return AuthentikRequestAdapter(
auth_provider=auth_provider,
provider=self.provider,
client=GraphClientFactory.create_with_default_middleware(
options=options, client=KiotaClientFactory.get_default_client()
),
)
@property @property
def client(self): def client(self):
return GraphServiceClient(request_adapter=self.get_request_adapter(**self.credentials)) return GraphServiceClient(request_adapter=get_request_adapter(**self.credentials))
def _request[T](self, request: Coroutine[Any, Any, T]) -> T: def _request[T](self, request: Coroutine[Any, Any, T]) -> T:
try: try:

View File

@ -1,24 +0,0 @@
# Generated by Django 5.0.12 on 2025-02-24 19:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_providers_microsoft_entra",
"0002_microsoftentraprovidergroup_attributes_and_more",
),
]
operations = [
migrations.AddField(
model_name="microsoftentraprovider",
name="dry_run",
field=models.BooleanField(
default=False,
help_text="When enabled, provider will not modify or create objects in the remote system.",
),
),
]

View File

@ -32,6 +32,7 @@ class MicrosoftEntraUserTests(APITestCase):
@apply_blueprint("system/providers-microsoft-entra.yaml") @apply_blueprint("system/providers-microsoft-entra.yaml")
def setUp(self) -> None: def setUp(self) -> None:
# Delete all users and groups as the mocked HTTP responses only return one ID # Delete all users and groups as the mocked HTTP responses only return one ID
# which will cause errors with multiple users # which will cause errors with multiple users
Tenant.objects.update(avatars="none") Tenant.objects.update(avatars="none")
@ -96,38 +97,6 @@ class MicrosoftEntraUserTests(APITestCase):
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists()) self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
user_create.assert_called_once() user_create.assert_called_once()
def test_user_create_dry_run(self):
"""Test user creation (dry run)"""
self.provider.dry_run = True
self.provider.save()
uid = generate_id()
with (
patch(
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
MagicMock(return_value={"credentials": self.creds}),
),
patch(
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
AsyncMock(
return_value=OrganizationCollectionResponse(
value=[
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
]
)
),
),
):
user = User.objects.create(
username=uid,
name=f"{uid} {uid}",
email=f"{uid}@goauthentik.io",
)
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
provider=self.provider, user=user
).first()
self.assertIsNone(microsoft_user)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
def test_user_not_created(self): def test_user_not_created(self):
"""Test without property mappings, no group is created""" """Test without property mappings, no group is created"""
self.provider.property_mappings.clear() self.provider.property_mappings.clear()

View File

@ -102,7 +102,7 @@ def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSessi
"format": "complex", "format": "complex",
"session": { "session": {
"format": "opaque", "format": "opaque",
"id": sha256(instance.session.session_key.encode("ascii")).hexdigest(), "id": sha256(instance.session_key.encode("ascii")).hexdigest(),
}, },
"user": { "user": {
"format": "email", "format": "email",

View File

@ -11,14 +11,13 @@ from guardian.shortcuts import get_anonymous_user
from authentik.core.models import Source, User from authentik.core.models import Source, User
from authentik.core.sources.flow_manager import ( from authentik.core.sources.flow_manager import (
SESSION_KEY_OVERRIDE_FLOW_TOKEN, SESSION_KEY_OVERRIDE_FLOW_TOKEN,
SESSION_KEY_SOURCE_FLOW_CONTEXT,
SESSION_KEY_SOURCE_FLOW_STAGES, SESSION_KEY_SOURCE_FLOW_STAGES,
) )
from authentik.core.types import UILoginButton from authentik.core.types import UILoginButton
from authentik.enterprise.stages.source.models import SourceStage from authentik.enterprise.stages.source.models import SourceStage
from authentik.flows.challenge import Challenge, ChallengeResponse from authentik.flows.challenge import Challenge, ChallengeResponse
from authentik.flows.models import FlowToken, in_memory_stage from authentik.flows.models import FlowToken, in_memory_stage
from authentik.flows.planner import PLAN_CONTEXT_IS_REDIRECTED, PLAN_CONTEXT_IS_RESTORED from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED
from authentik.flows.stage import ChallengeStageView, StageView from authentik.flows.stage import ChallengeStageView, StageView
from authentik.lib.utils.time import timedelta_from_string from authentik.lib.utils.time import timedelta_from_string
@ -54,9 +53,6 @@ class SourceStageView(ChallengeStageView):
resume_token = self.create_flow_token() resume_token = self.create_flow_token()
self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token
self.request.session[SESSION_KEY_SOURCE_FLOW_STAGES] = [in_memory_stage(SourceStageFinal)] self.request.session[SESSION_KEY_SOURCE_FLOW_STAGES] = [in_memory_stage(SourceStageFinal)]
self.request.session[SESSION_KEY_SOURCE_FLOW_CONTEXT] = {
PLAN_CONTEXT_IS_REDIRECTED: self.executor.flow,
}
return self.login_button.challenge return self.login_button.challenge
def create_flow_token(self) -> FlowToken: def create_flow_token(self) -> FlowToken:
@ -93,9 +89,9 @@ class SourceStageFinal(StageView):
This stage uses the override flow token to resume execution of the initial flow the This stage uses the override flow token to resume execution of the initial flow the
source stage is bound to.""" source stage is bound to."""
def dispatch(self, *args, **kwargs): def dispatch(self):
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN) token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
self.logger.info("Replacing source flow with overridden flow", flow=token.flow.slug) self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug)
plan = token.plan plan = token.plan
plan.context[PLAN_CONTEXT_IS_RESTORED] = token plan.context[PLAN_CONTEXT_IS_RESTORED] = token
response = plan.to_redirect(self.request, token.flow) response = plan.to_redirect(self.request, token.flow)

View File

@ -4,8 +4,7 @@ from django.urls import reverse
from authentik.core.tests.utils import create_test_flow, create_test_user from authentik.core.tests.utils import create_test_flow, create_test_user
from authentik.enterprise.stages.source.models import SourceStage from authentik.enterprise.stages.source.models import SourceStage
from authentik.enterprise.stages.source.stage import SourceStageFinal from authentik.flows.models import FlowDesignation, FlowStageBinding, FlowToken
from authentik.flows.models import FlowDesignation, FlowStageBinding, FlowToken, in_memory_stage
from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, FlowPlan from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, FlowPlan
from authentik.flows.tests import FlowTestCase from authentik.flows.tests import FlowTestCase
from authentik.flows.views.executor import SESSION_KEY_PLAN from authentik.flows.views.executor import SESSION_KEY_PLAN
@ -88,7 +87,6 @@ class TestSourceStage(FlowTestCase):
self.assertIsNotNone(flow_token) self.assertIsNotNone(flow_token)
session = self.client.session session = self.client.session
plan: FlowPlan = session[SESSION_KEY_PLAN] plan: FlowPlan = session[SESSION_KEY_PLAN]
plan.insert_stage(in_memory_stage(SourceStageFinal), index=0)
plan.context[PLAN_CONTEXT_IS_RESTORED] = flow_token plan.context[PLAN_CONTEXT_IS_RESTORED] = flow_token
session[SESSION_KEY_PLAN] = plan session[SESSION_KEY_PLAN] = plan
session.save() session.save()
@ -98,6 +96,4 @@ class TestSourceStage(FlowTestCase):
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), follow=True reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), follow=True
) )
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertStageRedirects( self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
response, reverse("authentik_core:if-flow", kwargs={"flow_slug": flow.slug})
)

View File

@ -50,8 +50,7 @@ class NotificationTransportSerializer(ModelSerializer):
"mode", "mode",
"mode_verbose", "mode_verbose",
"webhook_url", "webhook_url",
"webhook_mapping_body", "webhook_mapping",
"webhook_mapping_headers",
"send_once", "send_once",
] ]

View File

@ -1,43 +0,0 @@
# Generated by Django 5.0.13 on 2025-03-20 19:54
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_events", "0008_event_authentik_e_expires_8c73a8_idx_and_more"),
]
operations = [
migrations.RenameField(
model_name="notificationtransport",
old_name="webhook_mapping",
new_name="webhook_mapping_body",
),
migrations.AlterField(
model_name="notificationtransport",
name="webhook_mapping_body",
field=models.ForeignKey(
default=None,
help_text="Customize the body of the request. Mapping should return data that is JSON-serializable.",
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
related_name="+",
to="authentik_events.notificationwebhookmapping",
),
),
migrations.AddField(
model_name="notificationtransport",
name="webhook_mapping_headers",
field=models.ForeignKey(
default=None,
help_text="Configure additional headers to be sent. Mapping should return a dictionary of key-value pairs",
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
related_name="+",
to="authentik_events.notificationwebhookmapping",
),
),
]

View File

@ -336,27 +336,8 @@ class NotificationTransport(SerializerModel):
mode = models.TextField(choices=TransportMode.choices, default=TransportMode.LOCAL) mode = models.TextField(choices=TransportMode.choices, default=TransportMode.LOCAL)
webhook_url = models.TextField(blank=True, validators=[DomainlessURLValidator()]) webhook_url = models.TextField(blank=True, validators=[DomainlessURLValidator()])
webhook_mapping_body = models.ForeignKey( webhook_mapping = models.ForeignKey(
"NotificationWebhookMapping", "NotificationWebhookMapping", on_delete=models.SET_DEFAULT, null=True, default=None
on_delete=models.SET_DEFAULT,
null=True,
default=None,
related_name="+",
help_text=_(
"Customize the body of the request. "
"Mapping should return data that is JSON-serializable."
),
)
webhook_mapping_headers = models.ForeignKey(
"NotificationWebhookMapping",
on_delete=models.SET_DEFAULT,
null=True,
default=None,
related_name="+",
help_text=_(
"Configure additional headers to be sent. "
"Mapping should return a dictionary of key-value pairs"
),
) )
send_once = models.BooleanField( send_once = models.BooleanField(
default=False, default=False,
@ -379,8 +360,8 @@ class NotificationTransport(SerializerModel):
def send_local(self, notification: "Notification") -> list[str]: def send_local(self, notification: "Notification") -> list[str]:
"""Local notification delivery""" """Local notification delivery"""
if self.webhook_mapping_body: if self.webhook_mapping:
self.webhook_mapping_body.evaluate( self.webhook_mapping.evaluate(
user=notification.user, user=notification.user,
request=None, request=None,
notification=notification, notification=notification,
@ -399,18 +380,9 @@ class NotificationTransport(SerializerModel):
if notification.event and notification.event.user: if notification.event and notification.event.user:
default_body["event_user_email"] = notification.event.user.get("email", None) default_body["event_user_email"] = notification.event.user.get("email", None)
default_body["event_user_username"] = notification.event.user.get("username", None) default_body["event_user_username"] = notification.event.user.get("username", None)
headers = {} if self.webhook_mapping:
if self.webhook_mapping_body:
default_body = sanitize_item( default_body = sanitize_item(
self.webhook_mapping_body.evaluate( self.webhook_mapping.evaluate(
user=notification.user,
request=None,
notification=notification,
)
)
if self.webhook_mapping_headers:
headers = sanitize_item(
self.webhook_mapping_headers.evaluate(
user=notification.user, user=notification.user,
request=None, request=None,
notification=notification, notification=notification,
@ -420,7 +392,6 @@ class NotificationTransport(SerializerModel):
response = get_http_session().post( response = get_http_session().post(
self.webhook_url, self.webhook_url,
json=default_body, json=default_body,
headers=headers,
) )
response.raise_for_status() response.raise_for_status()
except RequestException as exc: except RequestException as exc:

View File

@ -59,7 +59,7 @@ def get_login_event(request_or_session: HttpRequest | AuthenticatedSession | Non
session = request_or_session.session session = request_or_session.session
if isinstance(request_or_session, AuthenticatedSession): if isinstance(request_or_session, AuthenticatedSession):
SessionStore = _session_engine.SessionStore SessionStore = _session_engine.SessionStore
session = SessionStore(request_or_session.session.session_key) session = SessionStore(request_or_session.session_key)
return session.get(SESSION_LOGIN_EVENT, None) return session.get(SESSION_LOGIN_EVENT, None)

View File

@ -120,7 +120,7 @@ class TestEventsNotifications(APITestCase):
) )
transport = NotificationTransport.objects.create( transport = NotificationTransport.objects.create(
name=generate_id(), webhook_mapping_body=mapping, mode=TransportMode.LOCAL name=generate_id(), webhook_mapping=mapping, mode=TransportMode.LOCAL
) )
NotificationRule.objects.filter(name__startswith="default").delete() NotificationRule.objects.filter(name__startswith="default").delete()
trigger = NotificationRule.objects.create(name=generate_id(), group=self.group) trigger = NotificationRule.objects.create(name=generate_id(), group=self.group)

View File

@ -60,25 +60,20 @@ class TestEventTransports(TestCase):
def test_transport_webhook_mapping(self): def test_transport_webhook_mapping(self):
"""Test webhook transport with custom mapping""" """Test webhook transport with custom mapping"""
mapping_body = NotificationWebhookMapping.objects.create( mapping = NotificationWebhookMapping.objects.create(
name=generate_id(), expression="return request.user" name=generate_id(), expression="return request.user"
) )
mapping_headers = NotificationWebhookMapping.objects.create(
name=generate_id(), expression="""return {"foo": "bar"}"""
)
transport: NotificationTransport = NotificationTransport.objects.create( transport: NotificationTransport = NotificationTransport.objects.create(
name=generate_id(), name=generate_id(),
mode=TransportMode.WEBHOOK, mode=TransportMode.WEBHOOK,
webhook_url="http://localhost:1234/test", webhook_url="http://localhost:1234/test",
webhook_mapping_body=mapping_body, webhook_mapping=mapping,
webhook_mapping_headers=mapping_headers,
) )
with Mocker() as mocker: with Mocker() as mocker:
mocker.post("http://localhost:1234/test") mocker.post("http://localhost:1234/test")
transport.send(self.notification) transport.send(self.notification)
self.assertEqual(mocker.call_count, 1) self.assertEqual(mocker.call_count, 1)
self.assertEqual(mocker.request_history[0].method, "POST") self.assertEqual(mocker.request_history[0].method, "POST")
self.assertEqual(mocker.request_history[0].headers["foo"], "bar")
self.assertJSONEqual( self.assertJSONEqual(
mocker.request_history[0].body.decode(), mocker.request_history[0].body.decode(),
{"email": self.user.email, "pk": self.user.pk, "username": self.user.username}, {"email": self.user.email, "pk": self.user.pk, "username": self.user.username},

View File

@ -8,7 +8,13 @@ from uuid import UUID
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
from django.db import models from django.db import models
from django.http import JsonResponse from django.http import JsonResponse
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField from rest_framework.fields import (
BooleanField,
CharField,
ChoiceField,
DictField,
ListField,
)
from rest_framework.request import Request from rest_framework.request import Request
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
@ -39,6 +45,12 @@ class ErrorDetailSerializer(PassiveSerializer):
code = CharField() code = CharField()
class MessageSerializer(PassiveSerializer):
message = CharField()
level = CharField()
tags = ListField(child=CharField())
class ContextualFlowInfo(PassiveSerializer): class ContextualFlowInfo(PassiveSerializer):
"""Contextual flow information for a challenge""" """Contextual flow information for a challenge"""
@ -55,6 +67,7 @@ class Challenge(PassiveSerializer):
flow_info = ContextualFlowInfo(required=False) flow_info = ContextualFlowInfo(required=False)
component = CharField(default="") component = CharField(default="")
messages = ListField(child=MessageSerializer(), allow_empty=True, required=False)
response_errors = DictField( response_errors = DictField(
child=ErrorDetailSerializer(many=True), allow_empty=True, required=False child=ErrorDetailSerializer(many=True), allow_empty=True, required=False
) )
@ -170,7 +183,6 @@ class FrameChallenge(Challenge):
class FrameChallengeResponse(ChallengeResponse): class FrameChallengeResponse(ChallengeResponse):
component = CharField(default="xak-flow-frame") component = CharField(default="xak-flow-frame")

View File

@ -6,7 +6,6 @@ from typing import TYPE_CHECKING
from uuid import uuid4 from uuid import uuid4
from django.db import models from django.db import models
from django.http import HttpRequest
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from model_utils.managers import InheritanceManager from model_utils.managers import InheritanceManager
from rest_framework.serializers import BaseSerializer from rest_framework.serializers import BaseSerializer
@ -179,12 +178,11 @@ class Flow(SerializerModel, PolicyBindingModel):
help_text=_("Required level of authentication and authorization to access a flow."), help_text=_("Required level of authentication and authorization to access a flow."),
) )
def background_url(self, request: HttpRequest | None = None) -> str: @property
def background_url(self) -> str:
"""Get the URL to the background image. If the name is /static or starts with http """Get the URL to the background image. If the name is /static or starts with http
it is returned as-is""" it is returned as-is"""
if not self.background: if not self.background:
if request:
return request.brand.branding_default_flow_background_url()
return ( return (
CONFIG.get("web.path", "/")[:-1] + "/static/dist/assets/images/flow_background.jpg" CONFIG.get("web.path", "/")[:-1] + "/static/dist/assets/images/flow_background.jpg"
) )

View File

@ -76,10 +76,10 @@ class FlowPlan:
self.bindings.append(binding) self.bindings.append(binding)
self.markers.append(marker or StageMarker()) self.markers.append(marker or StageMarker())
def insert_stage(self, stage: Stage, marker: StageMarker | None = None, index=1): def insert_stage(self, stage: Stage, marker: StageMarker | None = None):
"""Insert stage into plan, as immediate next stage""" """Insert stage into plan, as immediate next stage"""
self.bindings.insert(index, FlowStageBinding(stage=stage, order=0)) self.bindings.insert(1, FlowStageBinding(stage=stage, order=0))
self.markers.insert(index, marker or StageMarker()) self.markers.insert(1, marker or StageMarker())
def redirect(self, destination: str): def redirect(self, destination: str):
"""Insert a redirect stage as next stage""" """Insert a redirect stage as next stage"""

View File

@ -4,6 +4,7 @@ from typing import TYPE_CHECKING
from django.conf import settings from django.conf import settings
from django.contrib.auth.models import AnonymousUser from django.contrib.auth.models import AnonymousUser
from django.contrib.messages import get_messages
from django.http import HttpRequest from django.http import HttpRequest
from django.http.request import QueryDict from django.http.request import QueryDict
from django.http.response import HttpResponse from django.http.response import HttpResponse
@ -21,6 +22,7 @@ from authentik.flows.challenge import (
ChallengeResponse, ChallengeResponse,
ContextualFlowInfo, ContextualFlowInfo,
HttpChallengeResponse, HttpChallengeResponse,
MessageSerializer,
RedirectChallenge, RedirectChallenge,
SessionEndChallenge, SessionEndChallenge,
WithUserInfoChallenge, WithUserInfoChallenge,
@ -184,13 +186,29 @@ class ChallengeStageView(StageView):
flow_info = ContextualFlowInfo( flow_info = ContextualFlowInfo(
data={ data={
"title": self.format_title(), "title": self.format_title(),
"background": self.executor.flow.background_url(self.request), "background": self.executor.flow.background_url,
"cancel_url": reverse("authentik_flows:cancel"), "cancel_url": reverse("authentik_flows:cancel"),
"layout": self.executor.flow.layout, "layout": self.executor.flow.layout,
} }
) )
flow_info.is_valid() flow_info.is_valid()
challenge.initial_data["flow_info"] = flow_info.data challenge.initial_data["flow_info"] = flow_info.data
if "messages" not in challenge.initial_data and not isinstance(
challenge, RedirectStage
):
messages = MessageSerializer(
data=[
{
"message": message.message,
"level": message.level_tag,
"tags": message.tags,
}
for message in get_messages(self.request)
],
many=True,
)
messages.is_valid()
challenge.initial_data["messages"] = messages.data
if isinstance(challenge, WithUserInfoChallenge): if isinstance(challenge, WithUserInfoChallenge):
# If there's a pending user, update the `username` field # If there's a pending user, update the `username` field
# this field is only used by password managers. # this field is only used by password managers.

View File

@ -27,6 +27,7 @@ class FlowTestCase(APITestCase):
self.assertIsNotNone(raw_response["component"]) self.assertIsNotNone(raw_response["component"])
if flow: if flow:
self.assertIn("flow_info", raw_response) self.assertIn("flow_info", raw_response)
self.assertEqual(raw_response["flow_info"]["background"], flow.background_url)
self.assertEqual( self.assertEqual(
raw_response["flow_info"]["cancel_url"], reverse("authentik_flows:cancel") raw_response["flow_info"]["cancel_url"], reverse("authentik_flows:cancel")
) )

View File

@ -1,11 +1,9 @@
"""API flow tests""" """API flow tests"""
from json import loads
from django.urls import reverse from django.urls import reverse
from rest_framework.test import APITestCase from rest_framework.test import APITestCase
from authentik.core.tests.utils import create_test_admin_user, create_test_flow from authentik.core.tests.utils import create_test_admin_user
from authentik.flows.api.stages import StageSerializer, StageViewSet from authentik.flows.api.stages import StageSerializer, StageViewSet
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, Stage from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, Stage
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
@ -79,22 +77,6 @@ class TestFlowsAPI(APITestCase):
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertJSONEqual(response.content, {"diagram": DIAGRAM_EXPECTED}) self.assertJSONEqual(response.content, {"diagram": DIAGRAM_EXPECTED})
def test_api_background(self):
"""Test custom background"""
user = create_test_admin_user()
self.client.force_login(user)
flow = create_test_flow()
response = self.client.get(reverse("authentik_api:flow-detail", kwargs={"slug": flow.slug}))
body = loads(response.content.decode())
self.assertEqual(body["background"], "/static/dist/assets/images/flow_background.jpg")
flow.background = "https://goauthentik.io/img/icon.png"
flow.save()
response = self.client.get(reverse("authentik_api:flow-detail", kwargs={"slug": flow.slug}))
body = loads(response.content.decode())
self.assertEqual(body["background"], "https://goauthentik.io/img/icon.png")
def test_api_diagram_no_stages(self): def test_api_diagram_no_stages(self):
"""Test flow diagram with no stages.""" """Test flow diagram with no stages."""
user = create_test_admin_user() user = create_test_admin_user()

View File

@ -49,12 +49,13 @@ class TestFlowInspector(APITestCase):
"captcha_stage": None, "captcha_stage": None,
"component": "ak-stage-identification", "component": "ak-stage-identification",
"flow_info": { "flow_info": {
"background": "/static/dist/assets/images/flow_background.jpg", "background": flow.background_url,
"cancel_url": reverse("authentik_flows:cancel"), "cancel_url": reverse("authentik_flows:cancel"),
"title": flow.title, "title": flow.title,
"layout": "stacked", "layout": "stacked",
}, },
"flow_designation": "authentication", "flow_designation": "authentication",
"messages": [],
"password_fields": False, "password_fields": False,
"primary_action": "Log in", "primary_action": "Log in",
"sources": [], "sources": [],

View File

@ -69,7 +69,6 @@ SESSION_KEY_APPLICATION_PRE = "authentik/flows/application_pre"
SESSION_KEY_GET = "authentik/flows/get" SESSION_KEY_GET = "authentik/flows/get"
SESSION_KEY_POST = "authentik/flows/post" SESSION_KEY_POST = "authentik/flows/post"
SESSION_KEY_HISTORY = "authentik/flows/history" SESSION_KEY_HISTORY = "authentik/flows/history"
SESSION_KEY_AUTH_STARTED = "authentik/flows/auth_started"
QS_KEY_TOKEN = "flow_token" # nosec QS_KEY_TOKEN = "flow_token" # nosec
QS_QUERY = "query" QS_QUERY = "query"
@ -454,7 +453,6 @@ class FlowExecutorView(APIView):
SESSION_KEY_APPLICATION_PRE, SESSION_KEY_APPLICATION_PRE,
SESSION_KEY_PLAN, SESSION_KEY_PLAN,
SESSION_KEY_GET, SESSION_KEY_GET,
SESSION_KEY_AUTH_STARTED,
# We might need the initial POST payloads for later requests # We might need the initial POST payloads for later requests
# SESSION_KEY_POST, # SESSION_KEY_POST,
# We don't delete the history on purpose, as a user might # We don't delete the history on purpose, as a user might

View File

@ -6,22 +6,14 @@ from django.shortcuts import get_object_or_404
from ua_parser.user_agent_parser import Parse from ua_parser.user_agent_parser import Parse
from authentik.core.views.interface import InterfaceView from authentik.core.views.interface import InterfaceView
from authentik.flows.models import Flow, FlowDesignation from authentik.flows.models import Flow
from authentik.flows.views.executor import SESSION_KEY_AUTH_STARTED
class FlowInterfaceView(InterfaceView): class FlowInterfaceView(InterfaceView):
"""Flow interface""" """Flow interface"""
def get_context_data(self, **kwargs: Any) -> dict[str, Any]: def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
flow = get_object_or_404(Flow, slug=self.kwargs.get("flow_slug")) kwargs["flow"] = get_object_or_404(Flow, slug=self.kwargs.get("flow_slug"))
kwargs["flow"] = flow
if (
not self.request.user.is_authenticated
and flow.designation == FlowDesignation.AUTHENTICATION
):
self.request.session[SESSION_KEY_AUTH_STARTED] = True
self.request.session.save()
kwargs["inspector"] = "inspector" in self.request.GET kwargs["inspector"] = "inspector" in self.request.GET
return super().get_context_data(**kwargs) return super().get_context_data(**kwargs)

View File

@ -282,14 +282,16 @@ class ConfigLoader:
def get_optional_int(self, path: str, default=None) -> int | None: def get_optional_int(self, path: str, default=None) -> int | None:
"""Wrapper for get that converts value into int or None if set""" """Wrapper for get that converts value into int or None if set"""
value = self.get(path, UNSET) value = self.get(path, default)
if value is UNSET: if value is UNSET:
return default return default
try: try:
return int(value) return int(value)
except (ValueError, TypeError) as exc: except (ValueError, TypeError) as exc:
if value is None or (isinstance(value, str) and value.lower() == "null"): if value is None or (isinstance(value, str) and value.lower() == "null"):
return None return default
if value is UNSET:
return default
self.log("warning", "Failed to parse config as int", path=path, exc=str(exc)) self.log("warning", "Failed to parse config as int", path=path, exc=str(exc))
return default return default
@ -370,9 +372,9 @@ def django_db_config(config: ConfigLoader | None = None) -> dict:
"sslcert": config.get("postgresql.sslcert"), "sslcert": config.get("postgresql.sslcert"),
"sslkey": config.get("postgresql.sslkey"), "sslkey": config.get("postgresql.sslkey"),
}, },
"CONN_MAX_AGE": config.get_optional_int("postgresql.conn_max_age", 0), "CONN_MAX_AGE": CONFIG.get_optional_int("postgresql.conn_max_age", 0),
"CONN_HEALTH_CHECKS": config.get_bool("postgresql.conn_health_checks", False), "CONN_HEALTH_CHECKS": CONFIG.get_bool("postgresql.conn_health_checks", False),
"DISABLE_SERVER_SIDE_CURSORS": config.get_bool( "DISABLE_SERVER_SIDE_CURSORS": CONFIG.get_bool(
"postgresql.disable_server_side_cursors", False "postgresql.disable_server_side_cursors", False
), ),
"TEST": { "TEST": {
@ -381,8 +383,8 @@ def django_db_config(config: ConfigLoader | None = None) -> dict:
} }
} }
conn_max_age = config.get_optional_int("postgresql.conn_max_age", UNSET) conn_max_age = CONFIG.get_optional_int("postgresql.conn_max_age", UNSET)
disable_server_side_cursors = config.get_bool("postgresql.disable_server_side_cursors", UNSET) disable_server_side_cursors = CONFIG.get_bool("postgresql.disable_server_side_cursors", UNSET)
if config.get_bool("postgresql.use_pgpool", False): if config.get_bool("postgresql.use_pgpool", False):
db["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True db["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True
if disable_server_side_cursors is not UNSET: if disable_server_side_cursors is not UNSET:

View File

@ -1,20 +1,5 @@
# authentik configuration # update website/docs/install-config/configuration/configuration.mdx
# # This is the default configuration file
# https://docs.goauthentik.io/docs/install-config/configuration/
#
# To override the settings in this file, run the following command from the repository root:
#
# ```shell
# make gen-dev-config
# ```
#
# You may edit the generated file to override the configuration below.
#
# When making modifying the default configuration file,
# ensure that the corresponding documentation is updated to match.
#
# @see {@link ../../website/docs/install-config/configuration/configuration.mdx Configuration documentation} for more information.
postgresql: postgresql:
host: localhost host: localhost
name: authentik name: authentik
@ -60,8 +45,6 @@ redis:
# url: "" # url: ""
# transport_options: "" # transport_options: ""
http_timeout: 30
cache: cache:
# url: "" # url: ""
timeout: 300 timeout: 300

View File

@ -18,7 +18,7 @@ from sentry_sdk import start_span
from sentry_sdk.tracing import Span from sentry_sdk.tracing import Span
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.core.models import User from authentik.core.models import AuthenticatedSession, User
from authentik.events.models import Event from authentik.events.models import Event
from authentik.lib.expression.exceptions import ControlFlowException from authentik.lib.expression.exceptions import ControlFlowException
from authentik.lib.utils.http import get_http_session from authentik.lib.utils.http import get_http_session
@ -203,7 +203,9 @@ class BaseEvaluator:
provider = OAuth2Provider.objects.get(name=provider) provider = OAuth2Provider.objects.get(name=provider)
session = None session = None
if hasattr(request, "session") and request.session.session_key: if hasattr(request, "session") and request.session.session_key:
session = request.session["authenticatedsession"] session = AuthenticatedSession.objects.filter(
session_key=request.session.session_key
).first()
access_token = AccessToken( access_token = AccessToken(
provider=provider, provider=provider,
user=user, user=user,

View File

@ -18,15 +18,6 @@ class SerializerModel(models.Model):
@property @property
def serializer(self) -> type[BaseSerializer]: def serializer(self) -> type[BaseSerializer]:
"""Get serializer for this model""" """Get serializer for this model"""
# Special handling for built-in source
if (
hasattr(self, "managed")
and hasattr(self, "MANAGED_INBUILT")
and self.managed == self.MANAGED_INBUILT
):
from authentik.core.api.sources import SourceSerializer
return SourceSerializer
raise NotImplementedError raise NotImplementedError

View File

@ -33,7 +33,6 @@ class SyncObjectSerializer(PassiveSerializer):
) )
) )
sync_object_id = CharField() sync_object_id = CharField()
override_dry_run = BooleanField(default=False)
class SyncObjectResultSerializer(PassiveSerializer): class SyncObjectResultSerializer(PassiveSerializer):
@ -99,7 +98,6 @@ class OutgoingSyncProviderStatusMixin:
page=1, page=1,
provider_pk=provider.pk, provider_pk=provider.pk,
pk=params.validated_data["sync_object_id"], pk=params.validated_data["sync_object_id"],
override_dry_run=params.validated_data["override_dry_run"],
).get() ).get()
return Response(SyncObjectResultSerializer(instance={"messages": res}).data) return Response(SyncObjectResultSerializer(instance={"messages": res}).data)

View File

@ -28,14 +28,6 @@ class Direction(StrEnum):
remove = "remove" remove = "remove"
SAFE_METHODS = [
"GET",
"HEAD",
"OPTIONS",
"TRACE",
]
class BaseOutgoingSyncClient[ class BaseOutgoingSyncClient[
TModel: "Model", TConnection: "Model", TSchema: dict, TProvider: "OutgoingSyncProvider" TModel: "Model", TConnection: "Model", TSchema: dict, TProvider: "OutgoingSyncProvider"
]: ]:

View File

@ -21,22 +21,6 @@ class BadRequestSyncException(BaseSyncException):
"""Exception when invalid data was sent to the remote system""" """Exception when invalid data was sent to the remote system"""
class DryRunRejected(BaseSyncException):
"""When dry_run is enabled and a provider dropped a mutating request"""
def __init__(self, url: str, method: str, body: dict):
super().__init__()
self.url = url
self.method = method
self.body = body
def __repr__(self):
return self.__str__()
def __str__(self):
return f"Dry-run rejected request: {self.method} {self.url}"
class StopSync(BaseSyncException): class StopSync(BaseSyncException):
"""Exception raised when a configuration error should stop the sync process""" """Exception raised when a configuration error should stop the sync process"""

View File

@ -1,9 +1,8 @@
from typing import Any, Self from typing import Any, Self
import pglock import pglock
from django.db import connection, models from django.db import connection
from django.db.models import Model, QuerySet, TextChoices from django.db.models import Model, QuerySet, TextChoices
from django.utils.translation import gettext_lazy as _
from authentik.core.models import Group, User from authentik.core.models import Group, User
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
@ -19,14 +18,6 @@ class OutgoingSyncDeleteAction(TextChoices):
class OutgoingSyncProvider(Model): class OutgoingSyncProvider(Model):
"""Base abstract models for providers implementing outgoing sync"""
dry_run = models.BooleanField(
default=False,
help_text=_(
"When enabled, provider will not modify or create objects in the remote system."
),
)
class Meta: class Meta:
abstract = True abstract = True
@ -41,7 +32,7 @@ class OutgoingSyncProvider(Model):
@property @property
def sync_lock(self) -> pglock.advisory: def sync_lock(self) -> pglock.advisory:
"""Postgres lock for syncing to prevent multiple parallel syncs happening""" """Postgres lock for syncing SCIM to prevent multiple parallel syncs happening"""
return pglock.advisory( return pglock.advisory(
lock_id=f"goauthentik.io/{connection.schema_name}/providers/outgoing-sync/{str(self.pk)}", lock_id=f"goauthentik.io/{connection.schema_name}/providers/outgoing-sync/{str(self.pk)}",
timeout=0, timeout=0,

View File

@ -20,7 +20,6 @@ from authentik.lib.sync.outgoing import PAGE_SIZE, PAGE_TIMEOUT
from authentik.lib.sync.outgoing.base import Direction from authentik.lib.sync.outgoing.base import Direction
from authentik.lib.sync.outgoing.exceptions import ( from authentik.lib.sync.outgoing.exceptions import (
BadRequestSyncException, BadRequestSyncException,
DryRunRejected,
StopSync, StopSync,
TransientSyncException, TransientSyncException,
) )
@ -106,9 +105,7 @@ class SyncTasks:
return return
task.set_status(TaskStatus.SUCCESSFUL, *messages) task.set_status(TaskStatus.SUCCESSFUL, *messages)
def sync_objects( def sync_objects(self, object_type: str, page: int, provider_pk: int, **filter):
self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter
):
_object_type = path_to_class(object_type) _object_type = path_to_class(object_type)
self.logger = get_logger().bind( self.logger = get_logger().bind(
provider_type=class_to_path(self._provider_model), provider_type=class_to_path(self._provider_model),
@ -119,10 +116,6 @@ class SyncTasks:
provider = self._provider_model.objects.filter(pk=provider_pk).first() provider = self._provider_model.objects.filter(pk=provider_pk).first()
if not provider: if not provider:
return messages return messages
# Override dry run mode if requested, however don't save the provider
# so that scheduled sync tasks still run in dry_run mode
if override_dry_run:
provider.dry_run = False
try: try:
client = provider.client_for_model(_object_type) client = provider.client_for_model(_object_type)
except TransientSyncException: except TransientSyncException:
@ -139,22 +132,6 @@ class SyncTasks:
except SkipObjectException: except SkipObjectException:
self.logger.debug("skipping object due to SkipObject", obj=obj) self.logger.debug("skipping object due to SkipObject", obj=obj)
continue continue
except DryRunRejected as exc:
messages.append(
asdict(
LogEvent(
_("Dropping mutating request due to dry run"),
log_level="info",
logger=f"{provider._meta.verbose_name}@{object_type}",
attributes={
"obj": sanitize_item(obj),
"method": exc.method,
"url": exc.url,
"body": exc.body,
},
)
)
)
except BadRequestSyncException as exc: except BadRequestSyncException as exc:
self.logger.warning("failed to sync object", exc=exc, obj=obj) self.logger.warning("failed to sync object", exc=exc, obj=obj)
messages.append( messages.append(
@ -254,10 +231,8 @@ class SyncTasks:
raise Retry() from exc raise Retry() from exc
except SkipObjectException: except SkipObjectException:
continue continue
except DryRunRejected as exc:
self.logger.info("Rejected dry-run event", exc=exc)
except StopSync as exc: except StopSync as exc:
self.logger.warning("Stopping sync", exc=exc, provider_pk=provider.pk) self.logger.warning(exc, provider_pk=provider.pk)
def sync_signal_m2m(self, group_pk: str, action: str, pk_set: list[int]): def sync_signal_m2m(self, group_pk: str, action: str, pk_set: list[int]):
self.logger = get_logger().bind( self.logger = get_logger().bind(
@ -288,7 +263,5 @@ class SyncTasks:
raise Retry() from exc raise Retry() from exc
except SkipObjectException: except SkipObjectException:
continue continue
except DryRunRejected as exc:
self.logger.info("Rejected dry-run event", exc=exc)
except StopSync as exc: except StopSync as exc:
self.logger.warning("Stopping sync", exc=exc, provider_pk=provider.pk) self.logger.warning(exc, provider_pk=provider.pk)

View File

@ -158,18 +158,6 @@ class TestConfig(TestCase):
test_obj = Test() test_obj = Test()
dumps(test_obj, indent=4, cls=AttrEncoder) dumps(test_obj, indent=4, cls=AttrEncoder)
def test_get_optional_int(self):
config = ConfigLoader()
self.assertEqual(config.get_optional_int("foo", 21), 21)
self.assertEqual(config.get_optional_int("foo"), None)
config.set("foo", "21")
self.assertEqual(config.get_optional_int("foo"), 21)
self.assertEqual(config.get_optional_int("foo", 0), 21)
self.assertEqual(config.get_optional_int("foo", "null"), 21)
config.set("foo", "null")
self.assertEqual(config.get_optional_int("foo"), None)
self.assertEqual(config.get_optional_int("foo", 21), None)
@mock.patch.dict(environ, check_deprecations_env_vars) @mock.patch.dict(environ, check_deprecations_env_vars)
def test_check_deprecations(self): def test_check_deprecations(self):
"""Test config key re-write for deprecated env vars""" """Test config key re-write for deprecated env vars"""
@ -233,16 +221,6 @@ class TestConfig(TestCase):
}, },
) )
def test_db_conn_max_age(self):
"""Test DB conn_max_age Config"""
config = ConfigLoader()
config.set("postgresql.conn_max_age", "null")
conf = django_db_config(config)
self.assertEqual(
conf["default"]["CONN_MAX_AGE"],
None,
)
def test_db_read_replicas(self): def test_db_read_replicas(self):
"""Test read replicas""" """Test read replicas"""
config = ConfigLoader() config = ConfigLoader()

View File

@ -16,40 +16,7 @@ def authentik_user_agent() -> str:
return f"authentik@{get_full_version()}" return f"authentik@{get_full_version()}"
class TimeoutSession(Session): class DebugSession(Session):
"""Always set a default HTTP request timeout"""
def __init__(self, default_timeout=None):
super().__init__()
self.timeout = default_timeout
def send(
self,
request,
*,
stream=...,
verify=...,
proxies=...,
cert=...,
timeout=...,
allow_redirects=...,
**kwargs,
):
if not timeout and self.timeout:
timeout = self.timeout
return super().send(
request,
stream=stream,
verify=verify,
proxies=proxies,
cert=cert,
timeout=timeout,
allow_redirects=allow_redirects,
**kwargs,
)
class DebugSession(TimeoutSession):
"""requests session which logs http requests and responses""" """requests session which logs http requests and responses"""
def send(self, req: PreparedRequest, *args, **kwargs): def send(self, req: PreparedRequest, *args, **kwargs):
@ -75,9 +42,8 @@ class DebugSession(TimeoutSession):
def get_http_session() -> Session: def get_http_session() -> Session:
"""Get a requests session with common headers""" """Get a requests session with common headers"""
session = TimeoutSession() session = Session()
if CONFIG.get_bool("debug") or CONFIG.get("log_level") == "trace": if CONFIG.get_bool("debug") or CONFIG.get("log_level") == "trace":
session = DebugSession() session = DebugSession()
session.headers["User-Agent"] = authentik_user_agent() session.headers["User-Agent"] = authentik_user_agent()
session.timeout = CONFIG.get_optional_int("http_timeout")
return session return session

View File

@ -13,7 +13,6 @@ from paramiko.ssh_exception import SSHException
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from yaml import safe_dump from yaml import safe_dump
from authentik import __version__
from authentik.outposts.apps import MANAGED_OUTPOST from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException
from authentik.outposts.docker_ssh import DockerInlineSSH, SSHManagedExternallyException from authentik.outposts.docker_ssh import DockerInlineSSH, SSHManagedExternallyException
@ -185,7 +184,7 @@ class DockerController(BaseController):
try: try:
self.client.images.pull(image) self.client.images.pull(image)
except DockerException: # pragma: no cover except DockerException: # pragma: no cover
image = f"ghcr.io/goauthentik/{self.outpost.type}:{__version__}" image = f"ghcr.io/goauthentik/{self.outpost.type}:latest"
self.client.images.pull(image) self.client.images.pull(image)
return image return image

View File

@ -1,6 +1,5 @@
"""Base Kubernetes Reconciler""" """Base Kubernetes Reconciler"""
import re
from dataclasses import asdict from dataclasses import asdict
from json import dumps from json import dumps
from typing import TYPE_CHECKING, Generic, TypeVar from typing import TYPE_CHECKING, Generic, TypeVar
@ -68,8 +67,7 @@ class KubernetesObjectReconciler(Generic[T]):
@property @property
def name(self) -> str: def name(self) -> str:
"""Get the name of the object this reconciler manages""" """Get the name of the object this reconciler manages"""
return (
base_name = (
self.controller.outpost.config.object_naming_template self.controller.outpost.config.object_naming_template
% { % {
"name": slugify(self.controller.outpost.name), "name": slugify(self.controller.outpost.name),
@ -77,16 +75,6 @@ class KubernetesObjectReconciler(Generic[T]):
} }
).lower() ).lower()
formatted = slugify(base_name)
formatted = re.sub(r"[^a-z0-9-]", "-", formatted)
formatted = re.sub(r"-+", "-", formatted)
formatted = formatted[:63]
if not formatted:
formatted = f"outpost-{self.controller.outpost.uuid.hex}"[:63]
return formatted
def get_patched_reference_object(self) -> T: def get_patched_reference_object(self) -> T:
"""Get patched reference object""" """Get patched reference object"""
reference = self.get_reference_object() reference = self.get_reference_object()
@ -124,6 +112,7 @@ class KubernetesObjectReconciler(Generic[T]):
try: try:
current = self.retrieve() current = self.retrieve()
except (OpenApiException, HTTPError) as exc: except (OpenApiException, HTTPError) as exc:
if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code: if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code:
self.logger.debug("Failed to get current, triggering recreate") self.logger.debug("Failed to get current, triggering recreate")
raise NeedsRecreate from exc raise NeedsRecreate from exc
@ -167,6 +156,7 @@ class KubernetesObjectReconciler(Generic[T]):
self.delete(current) self.delete(current)
self.logger.debug("Removing") self.logger.debug("Removing")
except (OpenApiException, HTTPError) as exc: except (OpenApiException, HTTPError) as exc:
if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code: if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code:
self.logger.debug("Failed to get current, assuming non-existent") self.logger.debug("Failed to get current, assuming non-existent")
return return

View File

@ -61,14 +61,9 @@ class KubernetesController(BaseController):
client: KubernetesClient client: KubernetesClient
connection: KubernetesServiceConnection connection: KubernetesServiceConnection
def __init__( def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection) -> None:
self,
outpost: Outpost,
connection: KubernetesServiceConnection,
client: KubernetesClient | None = None,
) -> None:
super().__init__(outpost, connection) super().__init__(outpost, connection)
self.client = client if client else KubernetesClient(connection) self.client = KubernetesClient(connection)
self.reconcilers = { self.reconcilers = {
SecretReconciler.reconciler_name(): SecretReconciler, SecretReconciler.reconciler_name(): SecretReconciler,
DeploymentReconciler.reconciler_name(): DeploymentReconciler, DeploymentReconciler.reconciler_name(): DeploymentReconciler,

View File

@ -1,44 +0,0 @@
"""Kubernetes controller tests"""
from django.test import TestCase
from authentik.blueprints.tests import reconcile_app
from authentik.lib.generators import generate_id
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler
from authentik.outposts.controllers.kubernetes import KubernetesController
from authentik.outposts.models import KubernetesServiceConnection, Outpost, OutpostType
class KubernetesControllerTests(TestCase):
"""Kubernetes controller tests"""
@reconcile_app("authentik_outposts")
def setUp(self) -> None:
self.outpost = Outpost.objects.create(
name="test",
type=OutpostType.PROXY,
)
self.integration = KubernetesServiceConnection(name="test")
def test_gen_name(self):
"""Ensure the generated name is valid"""
controller = KubernetesController(
Outpost.objects.filter(managed=MANAGED_OUTPOST).first(),
self.integration,
# Pass something not-none as client so we don't
# attempt to connect to K8s as that's not needed
client=self,
)
rec = DeploymentReconciler(controller)
self.assertEqual(rec.name, "ak-outpost-authentik-embedded-outpost")
controller.outpost.name = generate_id()
self.assertLess(len(rec.name), 64)
# Test custom naming template
_cfg = controller.outpost.config
_cfg.object_naming_template = ""
controller.outpost.config = _cfg
self.assertEqual(rec.name, f"outpost-{controller.outpost.uuid.hex}")
self.assertLess(len(rec.name), 64)

Some files were not shown because too many files have changed in this diff Show More