Compare commits
32 Commits
version/20
...
web/design
Author | SHA1 | Date | |
---|---|---|---|
977e73b9d8 | |||
2ddb7e1e97 | |||
246423b2be | |||
457b61c5b4 | |||
25eefb7d55 | |||
50d2f69332 | |||
7d972ec711 | |||
854427e463 | |||
be349e2e14 | |||
bd0e81b8ad | |||
f6afb59515 | |||
dddde09be5 | |||
6d7fc94698 | |||
1dcf9108ad | |||
7bb6a3dfe6 | |||
9cc440eee1 | |||
fe9e4526ac | |||
20b66f850c | |||
67b327414b | |||
5b8d86b5a9 | |||
67aed3e318 | |||
9809b94030 | |||
e7527c551b | |||
36b10b434a | |||
831797b871 | |||
5cc2c0f45f | |||
32442766f4 | |||
75790909a8 | |||
e0d5df89ca | |||
f25a9c624e | |||
914993a788 | |||
89dad07a66 |
@ -1,24 +1,22 @@
|
||||
[bumpversion]
|
||||
current_version = 2025.4.0-rc1
|
||||
current_version = 2024.12.2
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||
serialize =
|
||||
serialize =
|
||||
{major}.{minor}.{patch}-{rc_t}{rc_n}
|
||||
{major}.{minor}.{patch}
|
||||
message = release: {new_version}
|
||||
tag_name = version/{new_version}
|
||||
|
||||
[bumpversion:part:rc_t]
|
||||
values =
|
||||
values =
|
||||
rc
|
||||
final
|
||||
optional_value = final
|
||||
|
||||
[bumpversion:file:pyproject.toml]
|
||||
|
||||
[bumpversion:file:uv.lock]
|
||||
|
||||
[bumpversion:file:package.json]
|
||||
|
||||
[bumpversion:file:docker-compose.yml]
|
||||
|
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -28,11 +28,7 @@ Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
|
||||
<!--
|
||||
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
|
||||
-->
|
||||
|
||||
- authentik version: [e.g. 2025.2.0]
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
|
22
.github/ISSUE_TEMPLATE/docs_issue.md
vendored
22
.github/ISSUE_TEMPLATE/docs_issue.md
vendored
@ -1,22 +0,0 @@
|
||||
---
|
||||
name: Documentation issue
|
||||
about: Suggest an improvement or report a problem
|
||||
title: ""
|
||||
labels: documentation
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
**Do you see an area that can be clarified or expanded, a technical inaccuracy, or a broken link? Please describe.**
|
||||
A clear and concise description of what the problem is, or where the document can be improved. Ex. I believe we need more details about [...]
|
||||
|
||||
**Provide the URL or link to the exact page in the documentation to which you are referring.**
|
||||
If there are multiple pages, list them all, and be sure to state the header or section where the content is.
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the documentation issue here.
|
||||
|
||||
**Consider opening a PR!**
|
||||
If the issue is one that you can fix, or even make a good pass at, we'd appreciate a PR. For more information about making a contribution to the docs, and using our Style Guide and our templates, refer to ["Writing documentation"](https://docs.goauthentik.io/docs/developer-docs/docs/writing-documentation).
|
7
.github/ISSUE_TEMPLATE/question.md
vendored
7
.github/ISSUE_TEMPLATE/question.md
vendored
@ -20,12 +20,7 @@ Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
|
||||
<!--
|
||||
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
|
||||
-->
|
||||
|
||||
|
||||
- authentik version: [e.g. 2025.2.0]
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
|
@ -44,6 +44,7 @@ if is_release:
|
||||
]
|
||||
if not prerelease:
|
||||
image_tags += [
|
||||
f"{name}:latest",
|
||||
f"{name}:{version_family}",
|
||||
]
|
||||
else:
|
||||
|
20
.github/actions/setup/action.yml
vendored
20
.github/actions/setup/action.yml
vendored
@ -9,22 +9,17 @@ inputs:
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install apt deps
|
||||
- name: Install poetry & deps
|
||||
shell: bash
|
||||
run: |
|
||||
pipx install poetry || true
|
||||
sudo apt-get update
|
||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Setup python
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
- name: Install Python deps
|
||||
shell: bash
|
||||
run: uv sync --all-extras --dev --frozen
|
||||
cache: "poetry"
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
@ -35,18 +30,15 @@ runs:
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup docker cache
|
||||
uses: ScribeMD/docker-cache@0.5.0
|
||||
with:
|
||||
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
|
||||
- name: Setup dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||
docker compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry install --sync
|
||||
cd web && npm ci
|
||||
- name: Generate config
|
||||
shell: uv run python {0}
|
||||
shell: poetry run python {0}
|
||||
run: |
|
||||
from authentik.lib.generators import generate_id
|
||||
from yaml import safe_dump
|
||||
|
2
.github/actions/setup/docker-compose.yml
vendored
2
.github/actions/setup/docker-compose.yml
vendored
@ -11,7 +11,7 @@ services:
|
||||
- 5432:5432
|
||||
restart: always
|
||||
redis:
|
||||
image: docker.io/library/redis:7
|
||||
image: docker.io/library/redis
|
||||
ports:
|
||||
- 6379:6379
|
||||
restart: always
|
||||
|
33
.github/codespell-words.txt
vendored
33
.github/codespell-words.txt
vendored
@ -1,32 +1,7 @@
|
||||
akadmin
|
||||
asgi
|
||||
assertIn
|
||||
authentik
|
||||
authn
|
||||
crate
|
||||
docstrings
|
||||
entra
|
||||
goauthentik
|
||||
gunicorn
|
||||
hass
|
||||
jwe
|
||||
jwks
|
||||
keypair
|
||||
keypairs
|
||||
kubernetes
|
||||
oidc
|
||||
ontext
|
||||
openid
|
||||
passwordless
|
||||
plex
|
||||
saml
|
||||
scim
|
||||
singed
|
||||
slo
|
||||
sso
|
||||
totp
|
||||
traefik
|
||||
# https://github.com/codespell-project/codespell/issues/1224
|
||||
upToDate
|
||||
hass
|
||||
warmup
|
||||
webauthn
|
||||
ontext
|
||||
singed
|
||||
assertIn
|
||||
|
8
.github/dependabot.yml
vendored
8
.github/dependabot.yml
vendored
@ -82,12 +82,6 @@ updates:
|
||||
docusaurus:
|
||||
patterns:
|
||||
- "@docusaurus/*"
|
||||
build:
|
||||
patterns:
|
||||
- "@swc/*"
|
||||
- "swc-*"
|
||||
- "lightningcss*"
|
||||
- "@rspack/binding*"
|
||||
- package-ecosystem: npm
|
||||
directory: "/lifecycle/aws"
|
||||
schedule:
|
||||
@ -98,7 +92,7 @@ updates:
|
||||
prefix: "lifecycle/aws:"
|
||||
labels:
|
||||
- dependencies
|
||||
- package-ecosystem: uv
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
|
@ -40,7 +40,7 @@ jobs:
|
||||
attestations: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-qemu-action@v3.6.0
|
||||
- uses: docker/setup-qemu-action@v3.3.0
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
@ -77,7 +77,7 @@ jobs:
|
||||
id: push
|
||||
with:
|
||||
context: .
|
||||
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
push: true
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
@ -89,7 +89,6 @@ jobs:
|
||||
cache-to: ${{ steps.ev.outputs.cacheTo }}
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
@ -46,7 +46,6 @@ jobs:
|
||||
- build-server-arm64
|
||||
outputs:
|
||||
tags: ${{ steps.ev.outputs.imageTagsJSON }}
|
||||
shouldPush: ${{ steps.ev.outputs.shouldPush }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: prepare variables
|
||||
@ -58,7 +57,6 @@ jobs:
|
||||
image-name: ${{ inputs.image_name }}
|
||||
merge-server:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.get-tags.outputs.shouldPush == 'true' }}
|
||||
needs:
|
||||
- get-tags
|
||||
- build-server-amd64
|
||||
|
1
.github/workflows/api-py-publish.yml
vendored
1
.github/workflows/api-py-publish.yml
vendored
@ -30,6 +30,7 @@ jobs:
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
cache: "poetry"
|
||||
- name: Generate API Client
|
||||
run: make gen-client-py
|
||||
- name: Publish package
|
||||
|
2
.github/workflows/ci-aws-cfn.yml
vendored
2
.github/workflows/ci-aws-cfn.yml
vendored
@ -33,7 +33,7 @@ jobs:
|
||||
npm ci
|
||||
- name: Check changes have been applied
|
||||
run: |
|
||||
uv run make aws-cfn
|
||||
poetry run make aws-cfn
|
||||
git diff --exit-code
|
||||
ci-aws-cfn-mark:
|
||||
if: always()
|
||||
|
28
.github/workflows/ci-main-daily.yml
vendored
28
.github/workflows/ci-main-daily.yml
vendored
@ -1,28 +0,0 @@
|
||||
---
|
||||
name: authentik-ci-main-daily
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Every night at 3am
|
||||
- cron: "0 3 * * *"
|
||||
|
||||
jobs:
|
||||
test-container:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- docs
|
||||
- version-2025-2
|
||||
- version-2024-12
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
current="$(pwd)"
|
||||
dir="/tmp/authentik/${{ matrix.version }}"
|
||||
mkdir -p $dir
|
||||
cd $dir
|
||||
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
|
||||
${current}/scripts/test_docker.sh
|
59
.github/workflows/ci-main.yml
vendored
59
.github/workflows/ci-main.yml
vendored
@ -34,7 +34,7 @@ jobs:
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run job
|
||||
run: uv run make ci-${{ matrix.job }}
|
||||
run: poetry run make ci-${{ matrix.job }}
|
||||
test-migrations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@ -42,48 +42,35 @@ jobs:
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run migrations
|
||||
run: uv run python -m lifecycle.migrate
|
||||
test-make-seed:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: seed
|
||||
run: |
|
||||
echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
seed: ${{ steps.seed.outputs.seed }}
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-migrations-from-stable:
|
||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
needs: test-make-seed
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: checkout stable
|
||||
run: |
|
||||
# Delete all poetry envs
|
||||
rm -rf /home/runner/.cache/pypoetry
|
||||
# Copy current, latest config to local
|
||||
# Temporarly comment the .github backup while migrating to uv
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
# cp -R .github ..
|
||||
cp -R .github ..
|
||||
cp -R scripts ..
|
||||
git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1)
|
||||
# rm -rf .github/ scripts/
|
||||
# mv ../.github ../scripts .
|
||||
rm -rf scripts/
|
||||
mv ../scripts .
|
||||
rm -rf .github/ scripts/
|
||||
mv ../.github ../scripts .
|
||||
- name: Setup authentik env (stable)
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
continue-on-error: true
|
||||
- name: run migrations to stable
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
- name: checkout current code
|
||||
@ -93,34 +80,31 @@ jobs:
|
||||
git reset --hard HEAD
|
||||
git clean -d -fx .
|
||||
git checkout $GITHUB_SHA
|
||||
# Delete previous poetry env
|
||||
rm -rf /home/runner/.cache/pypoetry/virtualenvs/*
|
||||
- name: Setup authentik env (ensure latest deps are installed)
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: migrate to latest
|
||||
run: |
|
||||
uv run python -m lifecycle.migrate
|
||||
poetry run python -m lifecycle.migrate
|
||||
- name: run tests
|
||||
env:
|
||||
# Test in the main database that we just migrated from the previous stable version
|
||||
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
|
||||
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||
CI_RUN_ID: ${{ matrix.run_id }}
|
||||
CI_TOTAL_RUNS: "5"
|
||||
run: |
|
||||
uv run make ci-test
|
||||
poetry run make test
|
||||
test-unittest:
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
needs: test-make-seed
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup authentik env
|
||||
@ -128,12 +112,9 @@ jobs:
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: run unittest
|
||||
env:
|
||||
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||
CI_RUN_ID: ${{ matrix.run_id }}
|
||||
CI_TOTAL_RUNS: "5"
|
||||
run: |
|
||||
uv run make ci-test
|
||||
poetry run make test
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
@ -156,8 +137,8 @@ jobs:
|
||||
uses: helm/kind-action@v1.12.0
|
||||
- name: run integration
|
||||
run: |
|
||||
uv run coverage run manage.py test tests/integration
|
||||
uv run coverage xml
|
||||
poetry run coverage run manage.py test tests/integration
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
@ -214,8 +195,8 @@ jobs:
|
||||
npm run build
|
||||
- name: run e2e
|
||||
run: |
|
||||
uv run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
uv run coverage xml
|
||||
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
|
4
.github/workflows/ci-outpost.yml
vendored
4
.github/workflows/ci-outpost.yml
vendored
@ -29,7 +29,7 @@ jobs:
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v7
|
||||
uses: golangci/golangci-lint-action@v6
|
||||
with:
|
||||
version: latest
|
||||
args: --timeout 5000s --verbose
|
||||
@ -82,7 +82,7 @@ jobs:
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.6.0
|
||||
uses: docker/setup-qemu-action@v3.3.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
|
@ -2,7 +2,7 @@ name: authentik-gen-update-webauthn-mds
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "30 1 1,15 * *"
|
||||
- cron: '30 1 1,15 * *'
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
@ -24,7 +24,7 @@ jobs:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- run: uv run ak update_webauthn_mds
|
||||
- run: poetry run ak update_webauthn_mds
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
id: cpr
|
||||
with:
|
||||
|
45
.github/workflows/packages-npm-publish.yml
vendored
45
.github/workflows/packages-npm-publish.yml
vendored
@ -1,45 +0,0 @@
|
||||
name: authentik-packages-npm-publish
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- packages/docusaurus-config/**
|
||||
- packages/eslint-config/**
|
||||
- packages/prettier-config/**
|
||||
- packages/tsconfig/**
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
publish:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
package:
|
||||
- docusaurus-config
|
||||
- eslint-config
|
||||
- prettier-config
|
||||
- tsconfig
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: packages/${{ matrix.package }}/package.json
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
|
||||
with:
|
||||
files: |
|
||||
packages/${{ matrix.package }}/package.json
|
||||
- name: Publish package
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: packages/${{ matrix.package}}
|
||||
run: |
|
||||
npm ci
|
||||
npm run build
|
||||
npm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
|
4
.github/workflows/publish-source-docs.yml
vendored
4
.github/workflows/publish-source-docs.yml
vendored
@ -21,8 +21,8 @@ jobs:
|
||||
uses: ./.github/actions/setup
|
||||
- name: generate docs
|
||||
run: |
|
||||
uv run make migrate
|
||||
uv run ak build_source_docs
|
||||
poetry run make migrate
|
||||
poetry run ak build_source_docs
|
||||
- name: Publish
|
||||
uses: netlify/actions/cli@master
|
||||
with:
|
||||
|
12
.github/workflows/release-publish.yml
vendored
12
.github/workflows/release-publish.yml
vendored
@ -9,17 +9,9 @@ jobs:
|
||||
build-server:
|
||||
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
# Needed to upload container images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
attestations: write
|
||||
with:
|
||||
image_name: ghcr.io/goauthentik/server,beryju/authentik
|
||||
release: true
|
||||
registry_dockerhub: true
|
||||
registry_ghcr: true
|
||||
build-outpost:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
@ -42,7 +34,7 @@ jobs:
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.6.0
|
||||
uses: docker/setup-qemu-action@v3.3.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
@ -186,7 +178,7 @@ jobs:
|
||||
container=$(docker container create ${{ steps.ev.outputs.imageMainName }})
|
||||
docker cp ${container}:web/ .
|
||||
- name: Create a Sentry.io release
|
||||
uses: getsentry/action-release@v3
|
||||
uses: getsentry/action-release@v1
|
||||
continue-on-error: true
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
|
11
.github/workflows/release-tag.yml
vendored
11
.github/workflows/release-tag.yml
vendored
@ -14,7 +14,16 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Pre-release test
|
||||
run: |
|
||||
make test-docker
|
||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
docker buildx install
|
||||
mkdir -p ./gen-ts-api
|
||||
docker build -t testing:latest .
|
||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
||||
echo "AUTHENTIK_TAG=latest" >> .env
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql redis
|
||||
docker compose run -u root server test-all
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
|
6
.github/workflows/repo-stale.yml
vendored
6
.github/workflows/repo-stale.yml
vendored
@ -1,8 +1,8 @@
|
||||
name: "authentik-repo-stale"
|
||||
name: 'authentik-repo-stale'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
- cron: '30 1 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
@ -25,7 +25,7 @@ jobs:
|
||||
days-before-stale: 60
|
||||
days-before-close: 7
|
||||
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
|
||||
stale-issue-label: status/stale
|
||||
stale-issue-label: wontfix
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
|
27
.github/workflows/semgrep.yml
vendored
27
.github/workflows/semgrep.yml
vendored
@ -1,27 +0,0 @@
|
||||
name: authentik-semgrep
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
pull_request: {}
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
paths:
|
||||
- .github/workflows/semgrep.yml
|
||||
schedule:
|
||||
# random HH:MM to avoid a load spike on GitHub Actions at 00:00
|
||||
- cron: '12 15 * * *'
|
||||
jobs:
|
||||
semgrep:
|
||||
name: semgrep/ci
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }}
|
||||
container:
|
||||
image: semgrep/semgrep
|
||||
if: (github.actor != 'dependabot[bot]')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: semgrep ci
|
@ -1,13 +1,9 @@
|
||||
---
|
||||
name: authentik-translate-extract-compile
|
||||
name: authentik-backend-translate-extract-compile
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
@ -19,30 +15,23 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/checkout@v4
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: run extract
|
||||
run: |
|
||||
uv run make i18n-extract
|
||||
poetry run make i18n-extract
|
||||
- name: run compile
|
||||
run: |
|
||||
uv run ak compilemessages
|
||||
poetry run ak compilemessages
|
||||
make web-check-compile
|
||||
- name: Create Pull Request
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
8
.gitignore
vendored
8
.gitignore
vendored
@ -11,10 +11,6 @@ local_settings.py
|
||||
db.sqlite3
|
||||
media
|
||||
|
||||
# Node
|
||||
|
||||
node_modules
|
||||
|
||||
# If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/
|
||||
# in your Git repository. Update and uncomment the following line accordingly.
|
||||
# <django-project-name>/staticfiles/
|
||||
@ -37,7 +33,6 @@ eggs/
|
||||
lib64/
|
||||
parts/
|
||||
dist/
|
||||
out/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
@ -214,6 +209,3 @@ source_docs/
|
||||
|
||||
### Golang ###
|
||||
/vendor/
|
||||
|
||||
### Docker ###
|
||||
docker-compose.override.yml
|
||||
|
@ -1,47 +0,0 @@
|
||||
# Prettier Ignorefile
|
||||
|
||||
## Static Files
|
||||
**/LICENSE
|
||||
|
||||
authentik/stages/**/*
|
||||
|
||||
## Build asset directories
|
||||
coverage
|
||||
dist
|
||||
out
|
||||
.docusaurus
|
||||
website/docs/developer-docs/api/**/*
|
||||
|
||||
## Environment
|
||||
*.env
|
||||
|
||||
## Secrets
|
||||
*.secrets
|
||||
|
||||
## Yarn
|
||||
.yarn/**/*
|
||||
|
||||
## Node
|
||||
node_modules
|
||||
coverage
|
||||
|
||||
## Configs
|
||||
*.log
|
||||
*.yaml
|
||||
*.yml
|
||||
|
||||
# Templates
|
||||
# TODO: Rename affected files to *.template.* or similar.
|
||||
*.html
|
||||
*.mdx
|
||||
*.md
|
||||
|
||||
## Import order matters
|
||||
poly.ts
|
||||
src/locale-codes.ts
|
||||
src/locales/
|
||||
|
||||
# Storybook
|
||||
storybook-static/
|
||||
.storybook/css-import-maps*
|
||||
|
7
.vscode/extensions.json
vendored
7
.vscode/extensions.json
vendored
@ -2,7 +2,6 @@
|
||||
"recommendations": [
|
||||
"bashmish.es6-string-css",
|
||||
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
||||
"charliermarsh.ruff",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"esbenp.prettier-vscode",
|
||||
@ -11,12 +10,12 @@
|
||||
"Gruntfuggly.todo-tree",
|
||||
"mechatroner.rainbow-csv",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.debugpy",
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"ms-python.black-formatter",
|
||||
"redhat.vscode-yaml",
|
||||
"Tobermory.es6-string-html",
|
||||
"unifiedjs.vscode-mdx",
|
||||
"unifiedjs.vscode-mdx"
|
||||
]
|
||||
}
|
||||
|
66
.vscode/launch.json
vendored
66
.vscode/launch.json
vendored
@ -2,76 +2,26 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug: Attach Server Core",
|
||||
"type": "debugpy",
|
||||
"name": "Python: PDB attach Server",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 9901
|
||||
"port": 6800
|
||||
},
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "."
|
||||
}
|
||||
],
|
||||
"justMyCode": true,
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Debug: Attach Worker",
|
||||
"type": "debugpy",
|
||||
"name": "Python: PDB attach Worker",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 9901
|
||||
"port": 6900
|
||||
},
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "."
|
||||
}
|
||||
],
|
||||
"justMyCode": true,
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Server Router",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/server",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start LDAP Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/ldap",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Proxy Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/proxy",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start RAC Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/rac",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Radius Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/radius",
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
22
.vscode/settings.json
vendored
22
.vscode/settings.json
vendored
@ -1,4 +1,26 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"akadmin",
|
||||
"asgi",
|
||||
"authentik",
|
||||
"authn",
|
||||
"entra",
|
||||
"goauthentik",
|
||||
"jwe",
|
||||
"jwks",
|
||||
"kubernetes",
|
||||
"oidc",
|
||||
"openid",
|
||||
"passwordless",
|
||||
"plex",
|
||||
"saml",
|
||||
"scim",
|
||||
"slo",
|
||||
"sso",
|
||||
"totp",
|
||||
"traefik",
|
||||
"webauthn"
|
||||
],
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
"todo-tree.tree.showBadges": true,
|
||||
"yaml.customTags": [
|
||||
|
46
.vscode/tasks.json
vendored
46
.vscode/tasks.json
vendored
@ -3,13 +3,8 @@
|
||||
"tasks": [
|
||||
{
|
||||
"label": "authentik/core: make",
|
||||
"command": "uv",
|
||||
"args": [
|
||||
"run",
|
||||
"make",
|
||||
"lint-fix",
|
||||
"lint"
|
||||
],
|
||||
"command": "poetry",
|
||||
"args": ["run", "make", "lint-fix", "lint"],
|
||||
"presentation": {
|
||||
"panel": "new"
|
||||
},
|
||||
@ -17,12 +12,8 @@
|
||||
},
|
||||
{
|
||||
"label": "authentik/core: run",
|
||||
"command": "uv",
|
||||
"args": [
|
||||
"run",
|
||||
"ak",
|
||||
"server"
|
||||
],
|
||||
"command": "poetry",
|
||||
"args": ["run", "ak", "server"],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"panel": "dedicated",
|
||||
@ -32,17 +23,13 @@
|
||||
{
|
||||
"label": "authentik/web: make",
|
||||
"command": "make",
|
||||
"args": [
|
||||
"web"
|
||||
],
|
||||
"args": ["web"],
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "authentik/web: watch",
|
||||
"command": "make",
|
||||
"args": [
|
||||
"web-watch"
|
||||
],
|
||||
"args": ["web-watch"],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"panel": "dedicated",
|
||||
@ -52,26 +39,19 @@
|
||||
{
|
||||
"label": "authentik: install",
|
||||
"command": "make",
|
||||
"args": [
|
||||
"install",
|
||||
"-j4"
|
||||
],
|
||||
"args": ["install", "-j4"],
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "authentik/website: make",
|
||||
"command": "make",
|
||||
"args": [
|
||||
"website"
|
||||
],
|
||||
"args": ["website"],
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "authentik/website: watch",
|
||||
"command": "make",
|
||||
"args": [
|
||||
"website-watch"
|
||||
],
|
||||
"args": ["website-watch"],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"panel": "dedicated",
|
||||
@ -80,12 +60,8 @@
|
||||
},
|
||||
{
|
||||
"label": "authentik/api: generate",
|
||||
"command": "uv",
|
||||
"args": [
|
||||
"run",
|
||||
"make",
|
||||
"gen"
|
||||
],
|
||||
"command": "poetry",
|
||||
"args": ["run", "make", "gen"],
|
||||
"group": "build"
|
||||
}
|
||||
]
|
||||
|
@ -10,7 +10,7 @@ schemas/ @goauthentik/backend
|
||||
scripts/ @goauthentik/backend
|
||||
tests/ @goauthentik/backend
|
||||
pyproject.toml @goauthentik/backend
|
||||
uv.lock @goauthentik/backend
|
||||
poetry.lock @goauthentik/backend
|
||||
go.mod @goauthentik/backend
|
||||
go.sum @goauthentik/backend
|
||||
# Infrastructure
|
||||
@ -23,8 +23,6 @@ docker-compose.yml @goauthentik/infrastructure
|
||||
Makefile @goauthentik/infrastructure
|
||||
.editorconfig @goauthentik/infrastructure
|
||||
CODEOWNERS @goauthentik/infrastructure
|
||||
# Web packages
|
||||
packages/ @goauthentik/frontend
|
||||
# Web
|
||||
web/ @goauthentik/frontend
|
||||
tests/wdio/ @goauthentik/frontend
|
||||
|
@ -5,7 +5,7 @@
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socioeconomic status,
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
|
89
Dockerfile
89
Dockerfile
@ -43,7 +43,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
||||
RUN npm run build
|
||||
|
||||
# Stage 3: Build go proxy
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder
|
||||
FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
@ -76,7 +76,7 @@ COPY ./go.sum /go/src/goauthentik.io/go.sum
|
||||
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
||||
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
|
||||
if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \
|
||||
CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \
|
||||
CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \
|
||||
go build -o /go/authentik ./cmd/server
|
||||
|
||||
# Stage 4: MaxMind GeoIP
|
||||
@ -93,59 +93,53 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
mkdir -p /usr/share/GeoIP && \
|
||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 5: Download uv
|
||||
FROM ghcr.io/astral-sh/uv:0.6.16 AS uv
|
||||
# Stage 6: Base python image
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.10-slim-bookworm-fips AS python-base
|
||||
|
||||
ENV VENV_PATH="/ak-root/.venv" \
|
||||
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
|
||||
UV_COMPILE_BYTECODE=1 \
|
||||
UV_LINK_MODE=copy \
|
||||
UV_NATIVE_TLS=1 \
|
||||
UV_PYTHON_DOWNLOADS=0
|
||||
|
||||
WORKDIR /ak-root/
|
||||
|
||||
COPY --from=uv /uv /uvx /bin/
|
||||
|
||||
# Stage 7: Python dependencies
|
||||
FROM python-base AS python-deps
|
||||
# Stage 5: Python dependencies
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS python-deps
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
WORKDIR /ak-root/poetry
|
||||
|
||||
ENV PATH="/root/.cargo/bin:$PATH"
|
||||
ENV VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="/ak-root/venv/bin:$PATH"
|
||||
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
||||
apt-get update && \
|
||||
# Required for installing pip packages
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev
|
||||
|
||||
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
||||
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
||||
--mount=type=cache,target=/root/.cache/pip \
|
||||
--mount=type=cache,target=/root/.cache/pypoetry \
|
||||
pip install --no-cache cffi && \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
# Build essentials
|
||||
build-essential pkg-config libffi-dev git \
|
||||
# cryptography
|
||||
curl \
|
||||
# libxml
|
||||
libxslt-dev zlib1g-dev \
|
||||
# postgresql
|
||||
libpq-dev \
|
||||
# python-kadmin-rs
|
||||
clang libkrb5-dev sccache \
|
||||
# xmlsec
|
||||
libltdl-dev && \
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y
|
||||
build-essential libffi-dev \
|
||||
# Required for cryptography
|
||||
curl pkg-config \
|
||||
# Required for lxml
|
||||
libxslt-dev zlib1g-dev \
|
||||
# Required for xmlsec
|
||||
libltdl-dev \
|
||||
# Required for kadmin
|
||||
sccache clang && \
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y && \
|
||||
. "$HOME/.cargo/env" && \
|
||||
python -m venv /ak-root/venv/ && \
|
||||
bash -c "source ${VENV_PATH}/bin/activate && \
|
||||
pip3 install --upgrade pip poetry && \
|
||||
poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \
|
||||
poetry install --only=main --no-ansi --no-interaction --no-root && \
|
||||
pip uninstall cryptography -y && \
|
||||
poetry install --only=main --no-ansi --no-interaction --no-root"
|
||||
|
||||
ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec"
|
||||
|
||||
RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \
|
||||
--mount=type=bind,target=uv.lock,src=uv.lock \
|
||||
--mount=type=cache,target=/root/.cache/uv \
|
||||
uv sync --frozen --no-install-project --no-dev
|
||||
|
||||
# Stage 8: Run
|
||||
FROM python-base AS final-image
|
||||
# Stage 6: Run
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS final-image
|
||||
|
||||
ARG VERSION
|
||||
ARG GIT_BUILD_HASH
|
||||
@ -177,7 +171,7 @@ RUN apt-get update && \
|
||||
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./uv.lock /
|
||||
COPY ./poetry.lock /
|
||||
COPY ./schemas /schemas
|
||||
COPY ./locale /locale
|
||||
COPY ./tests /tests
|
||||
@ -186,7 +180,7 @@ COPY ./blueprints /blueprints
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
|
||||
COPY --from=go-builder /go/authentik /bin/authentik
|
||||
COPY --from=python-deps /ak-root/.venv /ak-root/.venv
|
||||
COPY --from=python-deps /ak-root/venv /ak-root/venv
|
||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=website-builder /work/website/build/ /website/help/
|
||||
@ -197,6 +191,9 @@ USER 1000
|
||||
ENV TMPDIR=/dev/shm/ \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
|
||||
VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
GOFIPS=1
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
||||
|
80
Makefile
80
Makefile
@ -4,7 +4,7 @@
|
||||
PWD = $(shell pwd)
|
||||
UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.generate_semver)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
PY_SOURCES = authentik tests scripts lifecycle .github
|
||||
DOCKER_IMAGE ?= "authentik:test"
|
||||
|
||||
@ -12,9 +12,23 @@ GEN_API_TS = "gen-ts-api"
|
||||
GEN_API_PY = "gen-py-api"
|
||||
GEN_API_GO = "gen-go-api"
|
||||
|
||||
pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null)
|
||||
pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null)
|
||||
pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
|
||||
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
|
||||
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
|
||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
-S 'web/src/locales/**' \
|
||||
-S 'website/docs/developer-docs/api/reference/**' \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
website/src \
|
||||
website/blog \
|
||||
website/docs \
|
||||
website/integrations \
|
||||
website/src
|
||||
|
||||
all: lint-fix lint test gen web ## Lint, build, and test everything
|
||||
|
||||
@ -31,38 +45,44 @@ help: ## Show this help
|
||||
go-test:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
test-docker: ## Run all tests in a docker-compose
|
||||
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
||||
docker compose pull -q
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql redis
|
||||
docker compose run -u root server test-all
|
||||
rm -f .env
|
||||
|
||||
test: ## Run the server tests and produce a coverage report (locally)
|
||||
uv run coverage run manage.py test --keepdb authentik
|
||||
uv run coverage html
|
||||
uv run coverage report
|
||||
coverage run manage.py test --keepdb authentik
|
||||
coverage html
|
||||
coverage report
|
||||
|
||||
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
||||
uv run black $(PY_SOURCES)
|
||||
uv run ruff check --fix $(PY_SOURCES)
|
||||
black $(PY_SOURCES)
|
||||
ruff check --fix $(PY_SOURCES)
|
||||
|
||||
lint-codespell: ## Reports spelling errors.
|
||||
uv run codespell -w
|
||||
codespell -w $(CODESPELL_ARGS)
|
||||
|
||||
lint: ## Lint the python and golang sources
|
||||
uv run bandit -c pyproject.toml -r $(PY_SOURCES)
|
||||
bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules
|
||||
golangci-lint run -v
|
||||
|
||||
core-install:
|
||||
uv sync --frozen
|
||||
poetry install
|
||||
|
||||
migrate: ## Run the Authentik Django server's migrations
|
||||
uv run python -m lifecycle.migrate
|
||||
python -m lifecycle.migrate
|
||||
|
||||
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||
|
||||
aws-cfn:
|
||||
cd lifecycle/aws && npm run aws-cfn
|
||||
|
||||
run: ## Run the main authentik server process
|
||||
uv run ak server
|
||||
|
||||
core-i18n-extract:
|
||||
uv run ak makemessages \
|
||||
ak makemessages \
|
||||
--add-location file \
|
||||
--no-obsolete \
|
||||
--ignore web \
|
||||
@ -93,11 +113,11 @@ gen-build: ## Extract the schema from the database
|
||||
AUTHENTIK_DEBUG=true \
|
||||
AUTHENTIK_TENANTS__ENABLED=true \
|
||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||
uv run ak make_blueprint_schema > blueprints/schema.json
|
||||
ak make_blueprint_schema > blueprints/schema.json
|
||||
AUTHENTIK_DEBUG=true \
|
||||
AUTHENTIK_TENANTS__ENABLED=true \
|
||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||
uv run ak spectacular --file schema.yml
|
||||
ak spectacular --file schema.yml
|
||||
|
||||
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||
@ -148,7 +168,7 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g python \
|
||||
-o /local/${GEN_API_PY} \
|
||||
@ -176,7 +196,7 @@ gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
|
||||
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
|
||||
|
||||
gen-dev-config: ## Generate a local development config file
|
||||
uv run scripts/generate_config.py
|
||||
python -m scripts.generate_config
|
||||
|
||||
gen: gen-build gen-client-ts
|
||||
|
||||
@ -243,9 +263,6 @@ docker: ## Build a docker image of the current source tree
|
||||
mkdir -p ${GEN_API_TS}
|
||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||
|
||||
test-docker:
|
||||
BUILD=true ./scripts/test_docker.sh
|
||||
|
||||
#########################
|
||||
## CI
|
||||
#########################
|
||||
@ -257,21 +274,16 @@ ci--meta-debug:
|
||||
node --version
|
||||
|
||||
ci-black: ci--meta-debug
|
||||
uv run black --check $(PY_SOURCES)
|
||||
black --check $(PY_SOURCES)
|
||||
|
||||
ci-ruff: ci--meta-debug
|
||||
uv run ruff check $(PY_SOURCES)
|
||||
ruff check $(PY_SOURCES)
|
||||
|
||||
ci-codespell: ci--meta-debug
|
||||
uv run codespell -s
|
||||
codespell $(CODESPELL_ARGS) -s
|
||||
|
||||
ci-bandit: ci--meta-debug
|
||||
uv run bandit -r $(PY_SOURCES)
|
||||
bandit -r $(PY_SOURCES)
|
||||
|
||||
ci-pending-migrations: ci--meta-debug
|
||||
uv run ak makemigrations --check
|
||||
|
||||
ci-test: ci--meta-debug
|
||||
uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
|
||||
uv run coverage report
|
||||
uv run coverage xml
|
||||
ak makemigrations --check
|
||||
|
@ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di
|
||||
|
||||
## Independent audits and pentests
|
||||
|
||||
We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specific audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security).
|
||||
We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specfic audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security).
|
||||
|
||||
## What authentik classifies as a CVE
|
||||
|
||||
@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
||||
|
||||
| Version | Supported |
|
||||
| --------- | --------- |
|
||||
| 2024.10.x | ✅ |
|
||||
| 2024.12.x | ✅ |
|
||||
| 2025.2.x | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from os import environ
|
||||
|
||||
__version__ = "2025.4.0"
|
||||
__version__ = "2024.12.2"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
@ -59,7 +59,7 @@ class SystemInfoSerializer(PassiveSerializer):
|
||||
if not isinstance(value, str):
|
||||
continue
|
||||
actual_value = value
|
||||
if raw_session is not None and raw_session in actual_value:
|
||||
if raw_session in actual_value:
|
||||
actual_value = actual_value.replace(
|
||||
raw_session, SafeExceptionReporterFilter.cleansed_substitute
|
||||
)
|
||||
|
@ -7,7 +7,7 @@ from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import CharField, DateTimeField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ListSerializer
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
@ -15,7 +15,7 @@ from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
|
||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
||||
from authentik.rbac.decorators import permission_required
|
||||
|
||||
|
||||
|
@ -36,7 +36,6 @@ from authentik.core.models import (
|
||||
GroupSourceConnection,
|
||||
PropertyMapping,
|
||||
Provider,
|
||||
Session,
|
||||
Source,
|
||||
User,
|
||||
UserSourceConnection,
|
||||
@ -51,7 +50,7 @@ from authentik.enterprise.providers.microsoft_entra.models import (
|
||||
MicrosoftEntraProviderGroup,
|
||||
MicrosoftEntraProviderUser,
|
||||
)
|
||||
from authentik.enterprise.providers.ssf.models import StreamEvent
|
||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
|
||||
EndpointDevice,
|
||||
EndpointDeviceConnection,
|
||||
@ -72,7 +71,6 @@ from authentik.providers.oauth2.models import (
|
||||
DeviceToken,
|
||||
RefreshToken,
|
||||
)
|
||||
from authentik.providers.rac.models import ConnectionToken
|
||||
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||
from authentik.rbac.models import Role
|
||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||
@ -109,7 +107,6 @@ def excluded_models() -> list[type[Model]]:
|
||||
Policy,
|
||||
PolicyBindingModel,
|
||||
# Classes that have other dependencies
|
||||
Session,
|
||||
AuthenticatedSession,
|
||||
# Classes which are only internally managed
|
||||
# FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin
|
||||
@ -134,7 +131,6 @@ def excluded_models() -> list[type[Model]]:
|
||||
EndpointDevice,
|
||||
EndpointDeviceConnection,
|
||||
DeviceToken,
|
||||
StreamEvent,
|
||||
)
|
||||
|
||||
|
||||
|
@ -49,8 +49,6 @@ class BrandSerializer(ModelSerializer):
|
||||
"branding_title",
|
||||
"branding_logo",
|
||||
"branding_favicon",
|
||||
"branding_custom_css",
|
||||
"branding_default_flow_background",
|
||||
"flow_authentication",
|
||||
"flow_invalidation",
|
||||
"flow_recovery",
|
||||
@ -88,7 +86,6 @@ class CurrentBrandSerializer(PassiveSerializer):
|
||||
branding_title = CharField()
|
||||
branding_logo = CharField(source="branding_logo_url")
|
||||
branding_favicon = CharField(source="branding_favicon_url")
|
||||
branding_custom_css = CharField()
|
||||
ui_footer_links = ListField(
|
||||
child=FooterLinkSerializer(),
|
||||
read_only=True,
|
||||
@ -128,7 +125,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
|
||||
"branding_title",
|
||||
"branding_logo",
|
||||
"branding_favicon",
|
||||
"branding_default_flow_background",
|
||||
"flow_authentication",
|
||||
"flow_invalidation",
|
||||
"flow_recovery",
|
||||
|
@ -1,35 +0,0 @@
|
||||
# Generated by Django 5.0.12 on 2025-02-22 01:51
|
||||
|
||||
from pathlib import Path
|
||||
from django.db import migrations, models
|
||||
from django.apps.registry import Apps
|
||||
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
Brand = apps.get_model("authentik_brands", "brand")
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
path = Path("/web/dist/custom.css")
|
||||
if not path.exists():
|
||||
return
|
||||
css = path.read_text()
|
||||
Brand.objects.using(db_alias).update(branding_custom_css=css)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_brands", "0007_brand_default_application"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="brand",
|
||||
name="branding_custom_css",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
),
|
||||
migrations.RunPython(migrate_custom_css),
|
||||
]
|
@ -1,18 +0,0 @@
|
||||
# Generated by Django 5.0.13 on 2025-03-19 22:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_brands", "0008_brand_branding_custom_css"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="brand",
|
||||
name="branding_default_flow_background",
|
||||
field=models.TextField(default="/static/dist/assets/images/flow_background.jpg"),
|
||||
),
|
||||
]
|
@ -33,10 +33,6 @@ class Brand(SerializerModel):
|
||||
|
||||
branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg")
|
||||
branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png")
|
||||
branding_custom_css = models.TextField(default="", blank=True)
|
||||
branding_default_flow_background = models.TextField(
|
||||
default="/static/dist/assets/images/flow_background.jpg"
|
||||
)
|
||||
|
||||
flow_authentication = models.ForeignKey(
|
||||
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication"
|
||||
@ -88,12 +84,6 @@ class Brand(SerializerModel):
|
||||
return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon
|
||||
return self.branding_favicon
|
||||
|
||||
def branding_default_flow_background_url(self) -> str:
|
||||
"""Get branding_default_flow_background with the correct prefix"""
|
||||
if self.branding_default_flow_background.startswith("/static"):
|
||||
return CONFIG.get("web.path", "/")[:-1] + self.branding_default_flow_background
|
||||
return self.branding_default_flow_background
|
||||
|
||||
@property
|
||||
def serializer(self) -> Serializer:
|
||||
from authentik.brands.api import BrandSerializer
|
||||
|
@ -24,7 +24,6 @@ class TestBrands(APITestCase):
|
||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||
"branding_title": "authentik",
|
||||
"branding_custom_css": "",
|
||||
"matched_domain": brand.domain,
|
||||
"ui_footer_links": [],
|
||||
"ui_theme": Themes.AUTOMATIC,
|
||||
@ -44,7 +43,6 @@ class TestBrands(APITestCase):
|
||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||
"branding_title": "custom",
|
||||
"branding_custom_css": "",
|
||||
"matched_domain": "bar.baz",
|
||||
"ui_footer_links": [],
|
||||
"ui_theme": Themes.AUTOMATIC,
|
||||
@ -61,7 +59,6 @@ class TestBrands(APITestCase):
|
||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||
"branding_title": "authentik",
|
||||
"branding_custom_css": "",
|
||||
"matched_domain": "fallback",
|
||||
"ui_footer_links": [],
|
||||
"ui_theme": Themes.AUTOMATIC,
|
||||
@ -124,27 +121,3 @@ class TestBrands(APITestCase):
|
||||
"subject": None,
|
||||
},
|
||||
)
|
||||
|
||||
def test_branding_url(self):
|
||||
"""Test branding attributes return correct values"""
|
||||
brand = create_test_brand()
|
||||
brand.branding_default_flow_background = "https://goauthentik.io/img/icon.png"
|
||||
brand.branding_favicon = "https://goauthentik.io/img/icon.png"
|
||||
brand.branding_logo = "https://goauthentik.io/img/icon.png"
|
||||
brand.save()
|
||||
self.assertEqual(
|
||||
brand.branding_default_flow_background_url(), "https://goauthentik.io/img/icon.png"
|
||||
)
|
||||
self.assertJSONEqual(
|
||||
self.client.get(reverse("authentik_api:brand-current")).content.decode(),
|
||||
{
|
||||
"branding_logo": "https://goauthentik.io/img/icon.png",
|
||||
"branding_favicon": "https://goauthentik.io/img/icon.png",
|
||||
"branding_title": "authentik",
|
||||
"branding_custom_css": "",
|
||||
"matched_domain": brand.domain,
|
||||
"ui_footer_links": [],
|
||||
"ui_theme": Themes.AUTOMATIC,
|
||||
"default_locale": "",
|
||||
},
|
||||
)
|
||||
|
@ -46,7 +46,7 @@ LOGGER = get_logger()
|
||||
|
||||
def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str:
|
||||
"""Cache key where application list for user is saved"""
|
||||
key = f"{CACHE_PREFIX}app_access/{user_pk}"
|
||||
key = f"{CACHE_PREFIX}/app_access/{user_pk}"
|
||||
if page_number:
|
||||
key += f"/{page_number}"
|
||||
return key
|
||||
|
@ -5,7 +5,6 @@ from typing import TypedDict
|
||||
from rest_framework import mixins
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.serializers import CharField, DateTimeField, IPAddressField
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from ua_parser import user_agent_parser
|
||||
|
||||
@ -55,11 +54,6 @@ class UserAgentDict(TypedDict):
|
||||
class AuthenticatedSessionSerializer(ModelSerializer):
|
||||
"""AuthenticatedSession Serializer"""
|
||||
|
||||
expires = DateTimeField(source="session.expires", read_only=True)
|
||||
last_ip = IPAddressField(source="session.last_ip", read_only=True)
|
||||
last_user_agent = CharField(source="session.last_user_agent", read_only=True)
|
||||
last_used = DateTimeField(source="session.last_used", read_only=True)
|
||||
|
||||
current = SerializerMethodField()
|
||||
user_agent = SerializerMethodField()
|
||||
geo_ip = SerializerMethodField()
|
||||
@ -68,19 +62,19 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
||||
def get_current(self, instance: AuthenticatedSession) -> bool:
|
||||
"""Check if session is currently active session"""
|
||||
request: Request = self.context["request"]
|
||||
return request._request.session.session_key == instance.session.session_key
|
||||
return request._request.session.session_key == instance.session_key
|
||||
|
||||
def get_user_agent(self, instance: AuthenticatedSession) -> UserAgentDict:
|
||||
"""Get parsed user agent"""
|
||||
return user_agent_parser.Parse(instance.session.last_user_agent)
|
||||
return user_agent_parser.Parse(instance.last_user_agent)
|
||||
|
||||
def get_geo_ip(self, instance: AuthenticatedSession) -> GeoIPDict | None: # pragma: no cover
|
||||
"""Get GeoIP Data"""
|
||||
return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.session.last_ip)
|
||||
return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.last_ip)
|
||||
|
||||
def get_asn(self, instance: AuthenticatedSession) -> ASNDict | None: # pragma: no cover
|
||||
"""Get ASN Data"""
|
||||
return ASN_CONTEXT_PROCESSOR.asn_dict(instance.session.last_ip)
|
||||
return ASN_CONTEXT_PROCESSOR.asn_dict(instance.last_ip)
|
||||
|
||||
class Meta:
|
||||
model = AuthenticatedSession
|
||||
@ -96,7 +90,6 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
||||
"last_used",
|
||||
"expires",
|
||||
]
|
||||
extra_args = {"uuid": {"read_only": True}}
|
||||
|
||||
|
||||
class AuthenticatedSessionViewSet(
|
||||
@ -108,10 +101,9 @@ class AuthenticatedSessionViewSet(
|
||||
):
|
||||
"""AuthenticatedSession Viewset"""
|
||||
|
||||
lookup_field = "uuid"
|
||||
queryset = AuthenticatedSession.objects.select_related("session").all()
|
||||
queryset = AuthenticatedSession.objects.all()
|
||||
serializer_class = AuthenticatedSessionSerializer
|
||||
search_fields = ["user__username", "session__last_ip", "session__last_user_agent"]
|
||||
filterset_fields = ["user__username", "session__last_ip", "session__last_user_agent"]
|
||||
search_fields = ["user__username", "last_ip", "last_user_agent"]
|
||||
filterset_fields = ["user__username", "last_ip", "last_user_agent"]
|
||||
ordering = ["user__username"]
|
||||
owner_field = "user"
|
||||
|
@ -3,7 +3,6 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.fields import (
|
||||
BooleanField,
|
||||
CharField,
|
||||
@ -17,6 +16,7 @@ from rest_framework.viewsets import ViewSet
|
||||
|
||||
from authentik.core.api.utils import MetaNameSerializer
|
||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
|
||||
from authentik.rbac.decorators import permission_required
|
||||
from authentik.stages.authenticator import device_classes, devices_for_user
|
||||
from authentik.stages.authenticator.models import Device
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||
@ -73,9 +73,7 @@ class AdminDeviceViewSet(ViewSet):
|
||||
def get_devices(self, **kwargs):
|
||||
"""Get all devices in all child classes"""
|
||||
for model in device_classes():
|
||||
device_set = get_objects_for_user(
|
||||
self.request.user, f"{model._meta.app_label}.view_{model._meta.model_name}", model
|
||||
).filter(**kwargs)
|
||||
device_set = model.objects.filter(**kwargs)
|
||||
yield from device_set
|
||||
|
||||
@extend_schema(
|
||||
@ -88,6 +86,10 @@ class AdminDeviceViewSet(ViewSet):
|
||||
],
|
||||
responses={200: DeviceSerializer(many=True)},
|
||||
)
|
||||
@permission_required(
|
||||
None,
|
||||
[f"{model._meta.app_label}.view_{model._meta.model_name}" for model in device_classes()],
|
||||
)
|
||||
def list(self, request: Request) -> Response:
|
||||
"""Get all devices for current user"""
|
||||
kwargs = {}
|
||||
|
@ -4,7 +4,6 @@ from json import loads
|
||||
|
||||
from django.db.models import Prefetch
|
||||
from django.http import Http404
|
||||
from django.utils.translation import gettext as _
|
||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.utils import (
|
||||
@ -82,37 +81,9 @@ class GroupSerializer(ModelSerializer):
|
||||
if not self.instance or not parent:
|
||||
return parent
|
||||
if str(parent.group_uuid) == str(self.instance.group_uuid):
|
||||
raise ValidationError(_("Cannot set group as parent of itself."))
|
||||
raise ValidationError("Cannot set group as parent of itself.")
|
||||
return parent
|
||||
|
||||
def validate_is_superuser(self, superuser: bool):
|
||||
"""Ensure that the user creating this group has permissions to set the superuser flag"""
|
||||
request: Request = self.context.get("request", None)
|
||||
if not request:
|
||||
return superuser
|
||||
# If we're updating an instance, and the state hasn't changed, we don't need to check perms
|
||||
if self.instance and superuser == self.instance.is_superuser:
|
||||
return superuser
|
||||
user: User = request.user
|
||||
perm = (
|
||||
"authentik_core.enable_group_superuser"
|
||||
if superuser
|
||||
else "authentik_core.disable_group_superuser"
|
||||
)
|
||||
has_perm = user.has_perm(perm)
|
||||
if self.instance and not has_perm:
|
||||
has_perm = user.has_perm(perm, self.instance)
|
||||
if not has_perm:
|
||||
raise ValidationError(
|
||||
_(
|
||||
(
|
||||
"User does not have permission to set "
|
||||
"superuser status to {superuser_status}."
|
||||
).format_map({"superuser_status": superuser})
|
||||
)
|
||||
)
|
||||
return superuser
|
||||
|
||||
class Meta:
|
||||
model = Group
|
||||
fields = [
|
||||
|
@ -5,7 +5,6 @@ from collections.abc import Iterable
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework import mixins
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.request import Request
|
||||
@ -86,7 +85,7 @@ class SourceViewSet(
|
||||
serializer_class = SourceSerializer
|
||||
lookup_field = "slug"
|
||||
search_fields = ["slug", "name"]
|
||||
filterset_fields = ["slug", "name", "managed", "pbm_uuid"]
|
||||
filterset_fields = ["slug", "name", "managed"]
|
||||
|
||||
def get_queryset(self): # pragma: no cover
|
||||
return Source.objects.select_subclasses()
|
||||
@ -155,17 +154,6 @@ class SourceViewSet(
|
||||
matching_sources.append(source_settings.validated_data)
|
||||
return Response(matching_sources)
|
||||
|
||||
def destroy(self, request: Request, *args, **kwargs):
|
||||
"""Prevent deletion of built-in sources"""
|
||||
instance: Source = self.get_object()
|
||||
|
||||
if instance.managed == Source.MANAGED_INBUILT:
|
||||
raise ValidationError(
|
||||
{"detail": "Built-in sources cannot be deleted"}, code="protected"
|
||||
)
|
||||
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
class UserSourceConnectionSerializer(SourceSerializer):
|
||||
"""User source connection"""
|
||||
@ -179,13 +167,10 @@ class UserSourceConnectionSerializer(SourceSerializer):
|
||||
"user",
|
||||
"source",
|
||||
"source_obj",
|
||||
"identifier",
|
||||
"created",
|
||||
"last_updated",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"created": {"read_only": True},
|
||||
"last_updated": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
@ -202,7 +187,7 @@ class UserSourceConnectionViewSet(
|
||||
queryset = UserSourceConnection.objects.all()
|
||||
serializer_class = UserSourceConnectionSerializer
|
||||
filterset_fields = ["user", "source__slug"]
|
||||
search_fields = ["user__username", "source__slug", "identifier"]
|
||||
search_fields = ["source__slug"]
|
||||
ordering = ["source__slug", "pk"]
|
||||
owner_field = "user"
|
||||
|
||||
@ -221,11 +206,9 @@ class GroupSourceConnectionSerializer(SourceSerializer):
|
||||
"source_obj",
|
||||
"identifier",
|
||||
"created",
|
||||
"last_updated",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"created": {"read_only": True},
|
||||
"last_updated": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
@ -242,5 +225,6 @@ class GroupSourceConnectionViewSet(
|
||||
queryset = GroupSourceConnection.objects.all()
|
||||
serializer_class = GroupSourceConnectionSerializer
|
||||
filterset_fields = ["group", "source__slug"]
|
||||
search_fields = ["group__name", "source__slug", "identifier"]
|
||||
search_fields = ["source__slug"]
|
||||
ordering = ["source__slug", "pk"]
|
||||
owner_field = "user"
|
||||
|
@ -6,6 +6,8 @@ from typing import Any
|
||||
|
||||
from django.contrib.auth import update_session_auth_hash
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
@ -69,8 +71,8 @@ from authentik.core.middleware import (
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||
USER_PATH_SERVICE_ACCOUNT,
|
||||
AuthenticatedSession,
|
||||
Group,
|
||||
Session,
|
||||
Token,
|
||||
TokenIntents,
|
||||
User,
|
||||
@ -224,7 +226,6 @@ class UserSerializer(ModelSerializer):
|
||||
"name",
|
||||
"is_active",
|
||||
"last_login",
|
||||
"date_joined",
|
||||
"is_superuser",
|
||||
"groups",
|
||||
"groups_obj",
|
||||
@ -235,12 +236,9 @@ class UserSerializer(ModelSerializer):
|
||||
"path",
|
||||
"type",
|
||||
"uuid",
|
||||
"password_change_date",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"name": {"allow_blank": True},
|
||||
"date_joined": {"read_only": True},
|
||||
"password_change_date": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
@ -373,7 +371,7 @@ class UsersFilter(FilterSet):
|
||||
method="filter_attributes",
|
||||
)
|
||||
|
||||
is_superuser = BooleanFilter(field_name="ak_groups", method="filter_is_superuser")
|
||||
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
|
||||
uuid = UUIDFilter(field_name="uuid")
|
||||
|
||||
path = CharFilter(field_name="path")
|
||||
@ -391,11 +389,6 @@ class UsersFilter(FilterSet):
|
||||
queryset=Group.objects.all().order_by("name"),
|
||||
)
|
||||
|
||||
def filter_is_superuser(self, queryset, name, value):
|
||||
if value:
|
||||
return queryset.filter(ak_groups__is_superuser=True).distinct()
|
||||
return queryset.exclude(ak_groups__is_superuser=True).distinct()
|
||||
|
||||
def filter_attributes(self, queryset, name, value):
|
||||
"""Filter attributes by query args"""
|
||||
try:
|
||||
@ -772,6 +765,9 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
response = super().partial_update(request, *args, **kwargs)
|
||||
instance: User = self.get_object()
|
||||
if not instance.is_active:
|
||||
Session.objects.filter(authenticatedsession__user=instance).delete()
|
||||
sessions = AuthenticatedSession.objects.filter(user=instance)
|
||||
session_ids = sessions.values_list("session_key", flat=True)
|
||||
cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids)
|
||||
sessions.delete()
|
||||
LOGGER.debug("Deleted user's sessions", user=instance.username)
|
||||
return response
|
||||
|
@ -20,8 +20,6 @@ from rest_framework.serializers import (
|
||||
raise_errors_on_nested_writes,
|
||||
)
|
||||
|
||||
from authentik.rbac.permissions import assign_initial_permissions
|
||||
|
||||
|
||||
def is_dict(value: Any):
|
||||
"""Ensure a value is a dictionary, useful for JSONFields"""
|
||||
@ -31,14 +29,6 @@ def is_dict(value: Any):
|
||||
|
||||
|
||||
class ModelSerializer(BaseModelSerializer):
|
||||
def create(self, validated_data):
|
||||
instance = super().create(validated_data)
|
||||
|
||||
request = self.context.get("request")
|
||||
if request and hasattr(request, "user") and not request.user.is_anonymous:
|
||||
assign_initial_permissions(request.user, instance)
|
||||
|
||||
return instance
|
||||
|
||||
def update(self, instance: Model, validated_data):
|
||||
raise_errors_on_nested_writes("update", self, validated_data)
|
||||
|
@ -32,5 +32,5 @@ class AuthentikCoreConfig(ManagedAppConfig):
|
||||
"name": "authentik Built-in",
|
||||
"slug": "authentik-built-in",
|
||||
},
|
||||
managed=Source.MANAGED_INBUILT,
|
||||
managed="goauthentik.io/sources/inbuilt",
|
||||
)
|
||||
|
@ -24,15 +24,6 @@ class InbuiltBackend(ModelBackend):
|
||||
self.set_method("password", request)
|
||||
return user
|
||||
|
||||
async def aauthenticate(
|
||||
self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any
|
||||
) -> User | None:
|
||||
user = await super().aauthenticate(request, username=username, password=password, **kwargs)
|
||||
if not user:
|
||||
return None
|
||||
self.set_method("password", request)
|
||||
return user
|
||||
|
||||
def set_method(self, method: str, request: HttpRequest | None, **kwargs):
|
||||
"""Set method data on current flow, if possbiel"""
|
||||
if not request:
|
||||
|
@ -1,15 +0,0 @@
|
||||
"""Change user type"""
|
||||
|
||||
from importlib import import_module
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from authentik.tenants.management import TenantCommand
|
||||
|
||||
|
||||
class Command(TenantCommand):
|
||||
"""Delete all sessions"""
|
||||
|
||||
def handle_per_tenant(self, **options):
|
||||
engine = import_module(settings.SESSION_ENGINE)
|
||||
engine.SessionStore.clear_expired()
|
@ -5,7 +5,6 @@ from typing import TextIO
|
||||
from daphne.management.commands.runserver import Command as RunServer
|
||||
from daphne.server import Server
|
||||
|
||||
from authentik.lib.debug import start_debug_server
|
||||
from authentik.root.signals import post_startup, pre_startup, startup
|
||||
|
||||
|
||||
@ -14,7 +13,6 @@ class SignalServer(Server):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
start_debug_server()
|
||||
|
||||
def ready_callable():
|
||||
pre_startup.send(sender=self)
|
||||
|
@ -9,7 +9,6 @@ from django.db import close_old_connections
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.debug import start_debug_server
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -29,7 +28,10 @@ class Command(BaseCommand):
|
||||
def handle(self, **options):
|
||||
LOGGER.debug("Celery options", **options)
|
||||
close_old_connections()
|
||||
start_debug_server()
|
||||
if CONFIG.get_bool("remote_debug"):
|
||||
import debugpy
|
||||
|
||||
debugpy.listen(("0.0.0.0", 6900)) # nosec
|
||||
worker: Worker = CELERY_APP.Worker(
|
||||
no_color=False,
|
||||
quiet=True,
|
||||
|
@ -2,14 +2,9 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from contextvars import ContextVar
|
||||
from functools import partial
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.utils.functional import SimpleLazyObject
|
||||
from django.utils.translation import override
|
||||
from sentry_sdk.api import set_tag
|
||||
from structlog.contextvars import STRUCTLOG_KEY_PREFIX
|
||||
@ -25,40 +20,6 @@ CTX_HOST = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + "host", default=None)
|
||||
CTX_AUTH_VIA = ContextVar[str | None](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None)
|
||||
|
||||
|
||||
def get_user(request):
|
||||
if not hasattr(request, "_cached_user"):
|
||||
user = None
|
||||
if (authenticated_session := request.session.get("authenticatedsession", None)) is not None:
|
||||
user = authenticated_session.user
|
||||
request._cached_user = user or AnonymousUser()
|
||||
return request._cached_user
|
||||
|
||||
|
||||
async def aget_user(request):
|
||||
if not hasattr(request, "_cached_user"):
|
||||
user = None
|
||||
if (
|
||||
authenticated_session := await request.session.aget("authenticatedsession", None)
|
||||
) is not None:
|
||||
user = authenticated_session.user
|
||||
request._cached_user = user or AnonymousUser()
|
||||
return request._cached_user
|
||||
|
||||
|
||||
class AuthenticationMiddleware(MiddlewareMixin):
|
||||
def process_request(self, request):
|
||||
if not hasattr(request, "session"):
|
||||
raise ImproperlyConfigured(
|
||||
"The Django authentication middleware requires session "
|
||||
"middleware to be installed. Edit your MIDDLEWARE setting to "
|
||||
"insert "
|
||||
"'authentik.root.middleware.SessionMiddleware' before "
|
||||
"'authentik.core.middleware.AuthenticationMiddleware'."
|
||||
)
|
||||
request.user = SimpleLazyObject(lambda: get_user(request))
|
||||
request.auser = partial(aget_user, request)
|
||||
|
||||
|
||||
class ImpersonateMiddleware:
|
||||
"""Middleware to impersonate users"""
|
||||
|
||||
|
@ -1,26 +0,0 @@
|
||||
# Generated by Django 5.0.11 on 2025-01-30 23:55
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="group",
|
||||
options={
|
||||
"permissions": [
|
||||
("add_user_to_group", "Add user to group"),
|
||||
("remove_user_from_group", "Remove user from group"),
|
||||
("enable_group_superuser", "Enable superuser status"),
|
||||
("disable_group_superuser", "Disable superuser status"),
|
||||
],
|
||||
"verbose_name": "Group",
|
||||
"verbose_name_plural": "Groups",
|
||||
},
|
||||
),
|
||||
]
|
@ -1,19 +0,0 @@
|
||||
# Generated by Django 5.0.13 on 2025-04-07 14:04
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0043_alter_group_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="usersourceconnection",
|
||||
name="new_identifier",
|
||||
field=models.TextField(default=""),
|
||||
preserve_default=False,
|
||||
),
|
||||
]
|
@ -1,30 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0044_usersourceconnection_new_identifier"),
|
||||
("authentik_sources_kerberos", "0003_migrate_userkerberossourceconnection_identifier"),
|
||||
("authentik_sources_oauth", "0009_migrate_useroauthsourceconnection_identifier"),
|
||||
("authentik_sources_plex", "0005_migrate_userplexsourceconnection_identifier"),
|
||||
("authentik_sources_saml", "0019_migrate_usersamlsourceconnection_identifier"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name="usersourceconnection",
|
||||
old_name="new_identifier",
|
||||
new_name="identifier",
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="usersourceconnection",
|
||||
index=models.Index(fields=["identifier"], name="authentik_c_identif_59226f_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="usersourceconnection",
|
||||
index=models.Index(
|
||||
fields=["source", "identifier"], name="authentik_c_source__649e04_idx"
|
||||
),
|
||||
),
|
||||
]
|
@ -1,238 +0,0 @@
|
||||
# Generated by Django 5.0.11 on 2025-01-27 12:58
|
||||
|
||||
import uuid
|
||||
import pickle # nosec
|
||||
from django.core import signing
|
||||
from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.utils.timezone import now, timedelta
|
||||
from authentik.lib.migrations import progress_bar
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
|
||||
SESSION_CACHE_ALIAS = "default"
|
||||
|
||||
|
||||
class PickleSerializer:
|
||||
"""
|
||||
Simple wrapper around pickle to be used in signing.dumps()/loads() and
|
||||
cache backends.
|
||||
"""
|
||||
|
||||
def __init__(self, protocol=None):
|
||||
self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol
|
||||
|
||||
def dumps(self, obj):
|
||||
"""Pickle data to be stored in redis"""
|
||||
return pickle.dumps(obj, self.protocol)
|
||||
|
||||
def loads(self, data):
|
||||
"""Unpickle data to be loaded from redis"""
|
||||
return pickle.loads(data) # nosec
|
||||
|
||||
|
||||
def _migrate_session(
|
||||
apps,
|
||||
db_alias,
|
||||
session_key,
|
||||
session_data,
|
||||
expires,
|
||||
):
|
||||
Session = apps.get_model("authentik_core", "Session")
|
||||
OldAuthenticatedSession = apps.get_model("authentik_core", "OldAuthenticatedSession")
|
||||
AuthenticatedSession = apps.get_model("authentik_core", "AuthenticatedSession")
|
||||
|
||||
old_auth_session = (
|
||||
OldAuthenticatedSession.objects.using(db_alias).filter(session_key=session_key).first()
|
||||
)
|
||||
|
||||
args = {
|
||||
"session_key": session_key,
|
||||
"expires": expires,
|
||||
"last_ip": ClientIPMiddleware.default_ip,
|
||||
"last_user_agent": "",
|
||||
"session_data": {},
|
||||
}
|
||||
for k, v in session_data.items():
|
||||
if k == "authentik/stages/user_login/last_ip":
|
||||
args["last_ip"] = v
|
||||
elif k in ["last_user_agent", "last_used"]:
|
||||
args[k] = v
|
||||
elif args in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY]:
|
||||
pass
|
||||
else:
|
||||
args["session_data"][k] = v
|
||||
if old_auth_session:
|
||||
args["last_user_agent"] = old_auth_session.last_user_agent
|
||||
args["last_used"] = old_auth_session.last_used
|
||||
|
||||
args["session_data"] = pickle.dumps(args["session_data"])
|
||||
session = Session.objects.using(db_alias).create(**args)
|
||||
|
||||
if old_auth_session:
|
||||
AuthenticatedSession.objects.using(db_alias).create(
|
||||
session=session,
|
||||
user=old_auth_session.user,
|
||||
)
|
||||
|
||||
|
||||
def migrate_redis_sessions(apps, schema_editor):
|
||||
from django.core.cache import caches
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
cache = caches[SESSION_CACHE_ALIAS]
|
||||
|
||||
# Not a redis cache, skipping
|
||||
if not hasattr(cache, "keys"):
|
||||
return
|
||||
|
||||
print("\nMigrating Redis sessions to database, this might take a couple of minutes...")
|
||||
for key, session_data in progress_bar(cache.get_many(cache.keys(f"{KEY_PREFIX}*")).items()):
|
||||
_migrate_session(
|
||||
apps=apps,
|
||||
db_alias=db_alias,
|
||||
session_key=key.removeprefix(KEY_PREFIX),
|
||||
session_data=session_data,
|
||||
expires=now() + timedelta(seconds=cache.ttl(key)),
|
||||
)
|
||||
|
||||
|
||||
def migrate_database_sessions(apps, schema_editor):
|
||||
DjangoSession = apps.get_model("sessions", "Session")
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
print("\nMigration database sessions, this might take a couple of minutes...")
|
||||
for django_session in progress_bar(DjangoSession.objects.using(db_alias).all()):
|
||||
session_data = signing.loads(
|
||||
django_session.session_data,
|
||||
salt="django.contrib.sessions.SessionStore",
|
||||
serializer=PickleSerializer,
|
||||
)
|
||||
_migrate_session(
|
||||
apps=apps,
|
||||
db_alias=db_alias,
|
||||
session_key=django_session.session_key,
|
||||
session_data=session_data,
|
||||
expires=django_session.expire_date,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("sessions", "0001_initial"),
|
||||
("authentik_core", "0045_rename_new_identifier_usersourceconnection_identifier_and_more"),
|
||||
("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"),
|
||||
("authentik_providers_rac", "0006_connectiontoken_authentik_p_expires_91f148_idx_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Rename AuthenticatedSession to OldAuthenticatedSession
|
||||
migrations.RenameModel(
|
||||
old_name="AuthenticatedSession",
|
||||
new_name="OldAuthenticatedSession",
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="oldauthenticatedsession",
|
||||
new_name="authentik_c_expires_cf4f72_idx",
|
||||
old_name="authentik_c_expires_08251d_idx",
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="oldauthenticatedsession",
|
||||
new_name="authentik_c_expirin_c1f17f_idx",
|
||||
old_name="authentik_c_expirin_9cd839_idx",
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="oldauthenticatedsession",
|
||||
new_name="authentik_c_expirin_e04f5d_idx",
|
||||
old_name="authentik_c_expirin_195a84_idx",
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name="oldauthenticatedsession",
|
||||
new_name="authentik_c_session_a44819_idx",
|
||||
old_name="authentik_c_session_d0f005_idx",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
sql="ALTER INDEX authentik_core_authenticatedsession_user_id_5055b6cf RENAME TO authentik_core_oldauthenticatedsession_user_id_5055b6cf",
|
||||
reverse_sql="ALTER INDEX authentik_core_oldauthenticatedsession_user_id_5055b6cf RENAME TO authentik_core_authenticatedsession_user_id_5055b6cf",
|
||||
),
|
||||
# Create new Session and AuthenticatedSession models
|
||||
migrations.CreateModel(
|
||||
name="Session",
|
||||
fields=[
|
||||
(
|
||||
"session_key",
|
||||
models.CharField(
|
||||
max_length=40, primary_key=True, serialize=False, verbose_name="session key"
|
||||
),
|
||||
),
|
||||
("expires", models.DateTimeField(default=None, null=True)),
|
||||
("expiring", models.BooleanField(default=True)),
|
||||
("session_data", models.BinaryField(verbose_name="session data")),
|
||||
("last_ip", models.GenericIPAddressField()),
|
||||
("last_user_agent", models.TextField(blank=True)),
|
||||
("last_used", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"default_permissions": [],
|
||||
"verbose_name": "Session",
|
||||
"verbose_name_plural": "Sessions",
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="session",
|
||||
index=models.Index(fields=["expires"], name="authentik_c_expires_d2f607_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="session",
|
||||
index=models.Index(fields=["expiring"], name="authentik_c_expirin_7c2cfb_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="session",
|
||||
index=models.Index(
|
||||
fields=["expiring", "expires"], name="authentik_c_expirin_1ab2e4_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="session",
|
||||
index=models.Index(
|
||||
fields=["expires", "session_key"], name="authentik_c_expires_c49143_idx"
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="AuthenticatedSession",
|
||||
fields=[
|
||||
(
|
||||
"session",
|
||||
models.OneToOneField(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="authentik_core.session",
|
||||
),
|
||||
),
|
||||
("uuid", models.UUIDField(default=uuid.uuid4, unique=True)),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Authenticated Session",
|
||||
"verbose_name_plural": "Authenticated Sessions",
|
||||
},
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=migrate_redis_sessions,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=migrate_database_sessions,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
@ -1,18 +0,0 @@
|
||||
# Generated by Django 5.0.11 on 2025-01-27 13:02
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0046_session_and_more"),
|
||||
("authentik_providers_rac", "0007_migrate_session"),
|
||||
("authentik_providers_oauth2", "0028_migrate_session"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name="OldAuthenticatedSession",
|
||||
),
|
||||
]
|
@ -1,7 +1,6 @@
|
||||
"""authentik core models"""
|
||||
|
||||
from datetime import datetime
|
||||
from enum import StrEnum
|
||||
from hashlib import sha256
|
||||
from typing import Any, Optional, Self
|
||||
from uuid import uuid4
|
||||
@ -10,7 +9,6 @@ from deepmerge import always_merger
|
||||
from django.contrib.auth.hashers import check_password
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.contrib.auth.models import UserManager as DjangoUserManager
|
||||
from django.contrib.sessions.base_session import AbstractBaseSession
|
||||
from django.db import models
|
||||
from django.db.models import Q, QuerySet, options
|
||||
from django.db.models.constants import LOOKUP_SEP
|
||||
@ -206,8 +204,6 @@ class Group(SerializerModel, AttributesMixin):
|
||||
permissions = [
|
||||
("add_user_to_group", _("Add user to group")),
|
||||
("remove_user_from_group", _("Remove user from group")),
|
||||
("enable_group_superuser", _("Enable superuser status")),
|
||||
("disable_group_superuser", _("Disable superuser status")),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
@ -603,14 +599,6 @@ class Application(SerializerModel, PolicyBindingModel):
|
||||
return None
|
||||
return candidates[-1]
|
||||
|
||||
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
|
||||
"""Get Backchannel provider for a specific type"""
|
||||
providers = self.backchannel_providers.filter(
|
||||
**{f"{provider_type._meta.model_name}__isnull": False},
|
||||
**kwargs,
|
||||
)
|
||||
return getattr(providers.first(), provider_type._meta.model_name)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.name)
|
||||
|
||||
@ -648,30 +636,19 @@ class SourceUserMatchingModes(models.TextChoices):
|
||||
"""Different modes a source can handle new/returning users"""
|
||||
|
||||
IDENTIFIER = "identifier", _("Use the source-specific identifier")
|
||||
EMAIL_LINK = (
|
||||
"email_link",
|
||||
_(
|
||||
"Link to a user with identical email address. Can have security implications "
|
||||
"when a source doesn't validate email addresses."
|
||||
),
|
||||
EMAIL_LINK = "email_link", _(
|
||||
"Link to a user with identical email address. Can have security implications "
|
||||
"when a source doesn't validate email addresses."
|
||||
)
|
||||
EMAIL_DENY = (
|
||||
"email_deny",
|
||||
_(
|
||||
"Use the user's email address, but deny enrollment when the email address already "
|
||||
"exists."
|
||||
),
|
||||
EMAIL_DENY = "email_deny", _(
|
||||
"Use the user's email address, but deny enrollment when the email address already exists."
|
||||
)
|
||||
USERNAME_LINK = (
|
||||
"username_link",
|
||||
_(
|
||||
"Link to a user with identical username. Can have security implications "
|
||||
"when a username is used with another source."
|
||||
),
|
||||
USERNAME_LINK = "username_link", _(
|
||||
"Link to a user with identical username. Can have security implications "
|
||||
"when a username is used with another source."
|
||||
)
|
||||
USERNAME_DENY = (
|
||||
"username_deny",
|
||||
_("Use the user's username, but deny enrollment when the username already exists."),
|
||||
USERNAME_DENY = "username_deny", _(
|
||||
"Use the user's username, but deny enrollment when the username already exists."
|
||||
)
|
||||
|
||||
|
||||
@ -679,24 +656,18 @@ class SourceGroupMatchingModes(models.TextChoices):
|
||||
"""Different modes a source can handle new/returning groups"""
|
||||
|
||||
IDENTIFIER = "identifier", _("Use the source-specific identifier")
|
||||
NAME_LINK = (
|
||||
"name_link",
|
||||
_(
|
||||
"Link to a group with identical name. Can have security implications "
|
||||
"when a group name is used with another source."
|
||||
),
|
||||
NAME_LINK = "name_link", _(
|
||||
"Link to a group with identical name. Can have security implications "
|
||||
"when a group name is used with another source."
|
||||
)
|
||||
NAME_DENY = (
|
||||
"name_deny",
|
||||
_("Use the group name, but deny enrollment when the name already exists."),
|
||||
NAME_DENY = "name_deny", _(
|
||||
"Use the group name, but deny enrollment when the name already exists."
|
||||
)
|
||||
|
||||
|
||||
class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
"""Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server"""
|
||||
|
||||
MANAGED_INBUILT = "goauthentik.io/sources/inbuilt"
|
||||
|
||||
name = models.TextField(help_text=_("Source's display Name."))
|
||||
slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True)
|
||||
|
||||
@ -747,7 +718,8 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
choices=SourceGroupMatchingModes.choices,
|
||||
default=SourceGroupMatchingModes.IDENTIFIER,
|
||||
help_text=_(
|
||||
"How the source determines if an existing group should be used or a new group created."
|
||||
"How the source determines if an existing group should be used or "
|
||||
"a new group created."
|
||||
),
|
||||
)
|
||||
|
||||
@ -777,17 +749,11 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
@property
|
||||
def component(self) -> str:
|
||||
"""Return component used to edit this object"""
|
||||
if self.managed == self.MANAGED_INBUILT:
|
||||
return ""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def property_mapping_type(self) -> "type[PropertyMapping]":
|
||||
"""Return property mapping type used by this object"""
|
||||
if self.managed == self.MANAGED_INBUILT:
|
||||
from authentik.core.models import PropertyMapping
|
||||
|
||||
return PropertyMapping
|
||||
raise NotImplementedError
|
||||
|
||||
def ui_login_button(self, request: HttpRequest) -> UILoginButton | None:
|
||||
@ -802,14 +768,10 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
|
||||
def get_base_user_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]:
|
||||
"""Get base properties for a user to build final properties upon."""
|
||||
if self.managed == self.MANAGED_INBUILT:
|
||||
return {}
|
||||
raise NotImplementedError
|
||||
|
||||
def get_base_group_properties(self, **kwargs) -> dict[str, Any | dict[str, Any]]:
|
||||
"""Get base properties for a group to build final properties upon."""
|
||||
if self.managed == self.MANAGED_INBUILT:
|
||||
return {}
|
||||
raise NotImplementedError
|
||||
|
||||
def __str__(self):
|
||||
@ -840,7 +802,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
|
||||
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
source = models.ForeignKey(Source, on_delete=models.CASCADE)
|
||||
identifier = models.TextField()
|
||||
|
||||
objects = InheritanceManager()
|
||||
|
||||
@ -854,10 +815,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
|
||||
|
||||
class Meta:
|
||||
unique_together = (("user", "source"),)
|
||||
indexes = (
|
||||
models.Index(fields=("identifier",)),
|
||||
models.Index(fields=("source", "identifier")),
|
||||
)
|
||||
|
||||
|
||||
class GroupSourceConnection(SerializerModel, CreatedUpdatedModel):
|
||||
@ -1028,75 +985,45 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
||||
verbose_name_plural = _("Property Mappings")
|
||||
|
||||
|
||||
class Session(ExpiringModel, AbstractBaseSession):
|
||||
"""User session with extra fields for fast access"""
|
||||
class AuthenticatedSession(ExpiringModel):
|
||||
"""Additional session class for authenticated users. Augments the standard django session
|
||||
to achieve the following:
|
||||
- Make it queryable by user
|
||||
- Have a direct connection to user objects
|
||||
- Allow users to view their own sessions and terminate them
|
||||
- Save structured and well-defined information.
|
||||
"""
|
||||
|
||||
# Remove upstream field because we're using our own ExpiringModel
|
||||
expire_date = None
|
||||
session_data = models.BinaryField(_("session data"))
|
||||
uuid = models.UUIDField(default=uuid4, primary_key=True)
|
||||
|
||||
# Keep in sync with Session.Keys
|
||||
last_ip = models.GenericIPAddressField()
|
||||
session_key = models.CharField(max_length=40)
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
|
||||
last_ip = models.TextField()
|
||||
last_user_agent = models.TextField(blank=True)
|
||||
last_used = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Session")
|
||||
verbose_name_plural = _("Sessions")
|
||||
indexes = ExpiringModel.Meta.indexes + [
|
||||
models.Index(fields=["expires", "session_key"]),
|
||||
]
|
||||
default_permissions = []
|
||||
|
||||
def __str__(self):
|
||||
return self.session_key
|
||||
|
||||
class Keys(StrEnum):
|
||||
"""
|
||||
Keys to be set with the session interface for the fields above to be updated.
|
||||
|
||||
If a field is added here that needs to be initialized when the session is initialized,
|
||||
it must also be reflected in authentik.root.middleware.SessionMiddleware.process_request
|
||||
and in authentik.core.sessions.SessionStore.__init__
|
||||
"""
|
||||
|
||||
LAST_IP = "last_ip"
|
||||
LAST_USER_AGENT = "last_user_agent"
|
||||
LAST_USED = "last_used"
|
||||
|
||||
@classmethod
|
||||
def get_session_store_class(cls):
|
||||
from authentik.core.sessions import SessionStore
|
||||
|
||||
return SessionStore
|
||||
|
||||
def get_decoded(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class AuthenticatedSession(SerializerModel):
|
||||
session = models.OneToOneField(Session, on_delete=models.CASCADE, primary_key=True)
|
||||
# We use the session as primary key, but we need the API to be able to reference
|
||||
# this object uniquely without exposing the session key
|
||||
uuid = models.UUIDField(default=uuid4, unique=True)
|
||||
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Authenticated Session")
|
||||
verbose_name_plural = _("Authenticated Sessions")
|
||||
indexes = ExpiringModel.Meta.indexes + [
|
||||
models.Index(fields=["session_key"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Authenticated Session {str(self.pk)[:10]}"
|
||||
return f"Authenticated Session {self.session_key[:10]}"
|
||||
|
||||
@staticmethod
|
||||
def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]:
|
||||
"""Create a new session from a http request"""
|
||||
if not hasattr(request, "session") or not request.session.exists(
|
||||
request.session.session_key
|
||||
):
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
if not hasattr(request, "session") or not request.session.session_key:
|
||||
return None
|
||||
return AuthenticatedSession(
|
||||
session=Session.objects.filter(session_key=request.session.session_key).first(),
|
||||
session_key=request.session.session_key,
|
||||
user=user,
|
||||
last_ip=ClientIPMiddleware.get_client_ip(request),
|
||||
last_user_agent=request.META.get("HTTP_USER_AGENT", ""),
|
||||
expires=request.session.get_expiry_date(),
|
||||
)
|
||||
|
@ -1,168 +0,0 @@
|
||||
"""authentik sessions engine"""
|
||||
|
||||
import pickle # nosec
|
||||
|
||||
from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY
|
||||
from django.contrib.sessions.backends.db import SessionStore as SessionBase
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
from django.utils import timezone
|
||||
from django.utils.functional import cached_property
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class SessionStore(SessionBase):
|
||||
def __init__(self, session_key=None, last_ip=None, last_user_agent=""):
|
||||
super().__init__(session_key)
|
||||
self._create_kwargs = {
|
||||
"last_ip": last_ip or ClientIPMiddleware.default_ip,
|
||||
"last_user_agent": last_user_agent,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_model_class(cls):
|
||||
from authentik.core.models import Session
|
||||
|
||||
return Session
|
||||
|
||||
@cached_property
|
||||
def model_fields(self):
|
||||
return [k.value for k in self.model.Keys]
|
||||
|
||||
def _get_session_from_db(self):
|
||||
try:
|
||||
return (
|
||||
self.model.objects.select_related(
|
||||
"authenticatedsession",
|
||||
"authenticatedsession__user",
|
||||
)
|
||||
.prefetch_related(
|
||||
"authenticatedsession__user__groups",
|
||||
"authenticatedsession__user__user_permissions",
|
||||
)
|
||||
.get(
|
||||
session_key=self.session_key,
|
||||
expires__gt=timezone.now(),
|
||||
)
|
||||
)
|
||||
except (self.model.DoesNotExist, SuspiciousOperation) as exc:
|
||||
if isinstance(exc, SuspiciousOperation):
|
||||
LOGGER.warning(str(exc))
|
||||
self._session_key = None
|
||||
|
||||
async def _aget_session_from_db(self):
|
||||
try:
|
||||
return (
|
||||
await self.model.objects.select_related(
|
||||
"authenticatedsession",
|
||||
"authenticatedsession__user",
|
||||
)
|
||||
.prefetch_related(
|
||||
"authenticatedsession__user__groups",
|
||||
"authenticatedsession__user__user_permissions",
|
||||
)
|
||||
.aget(
|
||||
session_key=self.session_key,
|
||||
expires__gt=timezone.now(),
|
||||
)
|
||||
)
|
||||
except (self.model.DoesNotExist, SuspiciousOperation) as exc:
|
||||
if isinstance(exc, SuspiciousOperation):
|
||||
LOGGER.warning(str(exc))
|
||||
self._session_key = None
|
||||
|
||||
def encode(self, session_dict):
|
||||
return pickle.dumps(session_dict, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def decode(self, session_data):
|
||||
try:
|
||||
return pickle.loads(session_data) # nosec
|
||||
except pickle.PickleError:
|
||||
# ValueError, unpickling exceptions. If any of these happen, just return an empty
|
||||
# dictionary (an empty session)
|
||||
pass
|
||||
return {}
|
||||
|
||||
def load(self):
|
||||
s = self._get_session_from_db()
|
||||
if s:
|
||||
return {
|
||||
"authenticatedsession": getattr(s, "authenticatedsession", None),
|
||||
**{k: getattr(s, k) for k in self.model_fields},
|
||||
**self.decode(s.session_data),
|
||||
}
|
||||
else:
|
||||
return {}
|
||||
|
||||
async def aload(self):
|
||||
s = await self._aget_session_from_db()
|
||||
if s:
|
||||
return {
|
||||
"authenticatedsession": getattr(s, "authenticatedsession", None),
|
||||
**{k: getattr(s, k) for k in self.model_fields},
|
||||
**self.decode(s.session_data),
|
||||
}
|
||||
else:
|
||||
return {}
|
||||
|
||||
def create_model_instance(self, data):
|
||||
args = {
|
||||
"session_key": self._get_or_create_session_key(),
|
||||
"expires": self.get_expiry_date(),
|
||||
"session_data": {},
|
||||
**self._create_kwargs,
|
||||
}
|
||||
for k, v in data.items():
|
||||
# Don't save:
|
||||
# - unused auth data
|
||||
# - related models
|
||||
if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]:
|
||||
pass
|
||||
elif k in self.model_fields:
|
||||
args[k] = v
|
||||
else:
|
||||
args["session_data"][k] = v
|
||||
args["session_data"] = self.encode(args["session_data"])
|
||||
return self.model(**args)
|
||||
|
||||
async def acreate_model_instance(self, data):
|
||||
args = {
|
||||
"session_key": await self._aget_or_create_session_key(),
|
||||
"expires": await self.aget_expiry_date(),
|
||||
"session_data": {},
|
||||
**self._create_kwargs,
|
||||
}
|
||||
for k, v in data.items():
|
||||
# Don't save:
|
||||
# - unused auth data
|
||||
# - related models
|
||||
if k in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY, "authenticatedsession"]:
|
||||
pass
|
||||
elif k in self.model_fields:
|
||||
args[k] = v
|
||||
else:
|
||||
args["session_data"][k] = v
|
||||
args["session_data"] = self.encode(args["session_data"])
|
||||
return self.model(**args)
|
||||
|
||||
@classmethod
|
||||
def clear_expired(cls):
|
||||
cls.get_model_class().objects.filter(expires__lt=timezone.now()).delete()
|
||||
|
||||
@classmethod
|
||||
async def aclear_expired(cls):
|
||||
await cls.get_model_class().objects.filter(expires__lt=timezone.now()).adelete()
|
||||
|
||||
def cycle_key(self):
|
||||
data = self._session
|
||||
key = self.session_key
|
||||
self.create()
|
||||
self._session_cache = data
|
||||
if key:
|
||||
self.delete(key)
|
||||
if (authenticated_session := data.get("authenticatedsession")) is not None:
|
||||
authenticated_session.session_id = self.session_key
|
||||
authenticated_session.save(force_insert=True)
|
@ -1,10 +1,11 @@
|
||||
"""authentik core signals"""
|
||||
|
||||
from django.contrib.auth.signals import user_logged_in
|
||||
from django.contrib.auth.signals import user_logged_in, user_logged_out
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
from django.core.signals import Signal
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_delete, post_save, pre_save
|
||||
from django.db.models.signals import post_save, pre_delete, pre_save
|
||||
from django.dispatch import receiver
|
||||
from django.http.request import HttpRequest
|
||||
from structlog.stdlib import get_logger
|
||||
@ -14,7 +15,6 @@ from authentik.core.models import (
|
||||
AuthenticatedSession,
|
||||
BackchannelProvider,
|
||||
ExpiringModel,
|
||||
Session,
|
||||
User,
|
||||
default_token_duration,
|
||||
)
|
||||
@ -49,10 +49,19 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_):
|
||||
session.save()
|
||||
|
||||
|
||||
@receiver(post_delete, sender=AuthenticatedSession)
|
||||
@receiver(user_logged_out)
|
||||
def user_logged_out_session(sender, request: HttpRequest, user: User, **_):
|
||||
"""Delete AuthenticatedSession if it exists"""
|
||||
if not request.session or not request.session.session_key:
|
||||
return
|
||||
AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete()
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=AuthenticatedSession)
|
||||
def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_):
|
||||
"""Delete session when authenticated session is deleted"""
|
||||
Session.objects.filter(session_key=instance.pk).delete()
|
||||
cache_key = f"{KEY_PREFIX}{instance.session_key}"
|
||||
cache.delete(cache_key)
|
||||
|
||||
|
||||
@receiver(pre_save)
|
||||
|
@ -35,7 +35,8 @@ from authentik.flows.planner import (
|
||||
FlowPlanner,
|
||||
)
|
||||
from authentik.flows.stage import StageView
|
||||
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET
|
||||
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN
|
||||
from authentik.lib.utils.urls import redirect_with_qs
|
||||
from authentik.lib.views import bad_request_message
|
||||
from authentik.policies.denied import AccessDeniedResponse
|
||||
from authentik.policies.utils import delete_none_values
|
||||
@ -46,10 +47,8 @@ from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
|
||||
SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages"
|
||||
SESSION_KEY_SOURCE_FLOW_CONTEXT = "authentik/flows/source_flow_context"
|
||||
SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec
|
||||
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
|
||||
|
||||
|
||||
class MessageStage(StageView):
|
||||
@ -220,28 +219,28 @@ class SourceFlowManager:
|
||||
}
|
||||
)
|
||||
flow_context.update(self.policy_context)
|
||||
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
||||
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
|
||||
self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug)
|
||||
plan = token.plan
|
||||
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
|
||||
plan.context.update(flow_context)
|
||||
for stage in self.get_stages_to_append(flow):
|
||||
plan.append_stage(stage)
|
||||
if stages:
|
||||
for stage in stages:
|
||||
plan.append_stage(stage)
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
flow_slug = token.flow.slug
|
||||
token.delete()
|
||||
return redirect_with_qs(
|
||||
"authentik_core:if-flow",
|
||||
self.request.GET,
|
||||
flow_slug=flow_slug,
|
||||
)
|
||||
flow_context.setdefault(PLAN_CONTEXT_REDIRECT, final_redirect)
|
||||
|
||||
if not flow:
|
||||
# We only check for the flow token here if we don't have a flow, otherwise we rely on
|
||||
# SESSION_KEY_SOURCE_FLOW_STAGES to delegate the usage of this token and dynamically add
|
||||
# stages that deal with this token to return to another flow
|
||||
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
||||
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
|
||||
self._logger.info(
|
||||
"Replacing source flow with overridden flow", flow=token.flow.slug
|
||||
)
|
||||
plan = token.plan
|
||||
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
|
||||
plan.context.update(flow_context)
|
||||
for stage in self.get_stages_to_append(flow):
|
||||
plan.append_stage(stage)
|
||||
if stages:
|
||||
for stage in stages:
|
||||
plan.append_stage(stage)
|
||||
redirect = plan.to_redirect(self.request, token.flow)
|
||||
token.delete()
|
||||
return redirect
|
||||
return bad_request_message(
|
||||
self.request,
|
||||
_("Configured flow does not exist."),
|
||||
@ -260,9 +259,6 @@ class SourceFlowManager:
|
||||
if stages:
|
||||
for stage in stages:
|
||||
plan.append_stage(stage)
|
||||
for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []):
|
||||
plan.append_stage(stage)
|
||||
plan.context.update(self.request.session.get(SESSION_KEY_SOURCE_FLOW_CONTEXT, {}))
|
||||
return plan.to_redirect(self.request, flow)
|
||||
|
||||
def handle_auth(
|
||||
@ -299,8 +295,6 @@ class SourceFlowManager:
|
||||
# When request isn't authenticated we jump straight to auth
|
||||
if not self.request.user.is_authenticated:
|
||||
return self.handle_auth(connection)
|
||||
# When an override flow token exists we actually still use a flow for link
|
||||
# to continue the existing flow we came from
|
||||
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
||||
return self._prepare_flow(None, connection)
|
||||
connection.save()
|
||||
|
@ -2,16 +2,22 @@
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from django.conf import ImproperlyConfigured
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.contrib.sessions.backends.db import SessionStore as DBSessionStore
|
||||
from django.core.cache import cache
|
||||
from django.utils.timezone import now
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_EXPIRES,
|
||||
USER_ATTRIBUTE_GENERATED,
|
||||
AuthenticatedSession,
|
||||
ExpiringModel,
|
||||
User,
|
||||
)
|
||||
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -32,6 +38,38 @@ def clean_expired_models(self: SystemTask):
|
||||
obj.expire_action()
|
||||
LOGGER.debug("Expired models", model=cls, amount=amount)
|
||||
messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}")
|
||||
# Special case
|
||||
amount = 0
|
||||
|
||||
for session in AuthenticatedSession.objects.all():
|
||||
match CONFIG.get("session_storage", "cache"):
|
||||
case "cache":
|
||||
cache_key = f"{KEY_PREFIX}{session.session_key}"
|
||||
value = None
|
||||
try:
|
||||
value = cache.get(cache_key)
|
||||
|
||||
except Exception as exc:
|
||||
LOGGER.debug("Failed to get session from cache", exc=exc)
|
||||
if not value:
|
||||
session.delete()
|
||||
amount += 1
|
||||
case "db":
|
||||
if not (
|
||||
DBSessionStore.get_model_class()
|
||||
.objects.filter(session_key=session.session_key, expire_date__gt=now())
|
||||
.exists()
|
||||
):
|
||||
session.delete()
|
||||
amount += 1
|
||||
case _:
|
||||
# Should never happen, as we check for other values in authentik/root/settings.py
|
||||
raise ImproperlyConfigured(
|
||||
"Invalid session_storage setting, allowed values are db and cache"
|
||||
)
|
||||
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
|
||||
|
||||
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")
|
||||
self.set_status(TaskStatus.SUCCESSFUL, *messages)
|
||||
|
||||
|
||||
|
@ -11,7 +11,6 @@
|
||||
build: "{{ build }}",
|
||||
api: {
|
||||
base: "{{ base_url }}",
|
||||
relBase: "{{ base_url_rel }}",
|
||||
},
|
||||
};
|
||||
window.addEventListener("DOMContentLoaded", function () {
|
||||
|
@ -8,15 +8,13 @@
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
|
||||
{# Darkreader breaks the site regardless of theme as its not compatible with webcomponents, and we default to a dark theme based on preferred colour-scheme #}
|
||||
<meta name="darkreader-lock">
|
||||
<title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title>
|
||||
<link rel="icon" href="{{ brand.branding_favicon_url }}">
|
||||
<link rel="shortcut icon" href="{{ brand.branding_favicon_url }}">
|
||||
{% block head_before %}
|
||||
{% endblock %}
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
||||
<style>{{ brand.branding_custom_css }}</style>
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject>
|
||||
<script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
|
||||
<script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script>
|
||||
{% block head %}
|
||||
|
@ -4,7 +4,7 @@
|
||||
{% load i18n %}
|
||||
|
||||
{% block head_before %}
|
||||
<link rel="prefetch" href="{{ request.brand.branding_default_flow_background_url }}" />
|
||||
<link rel="prefetch" href="{% static 'dist/assets/images/flow_background.jpg' %}" />
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
|
||||
{% include "base/header_js.html" %}
|
||||
@ -13,7 +13,7 @@
|
||||
{% block head %}
|
||||
<style>
|
||||
:root {
|
||||
--ak-flow-background: url("{{ request.brand.branding_default_flow_background_url }}");
|
||||
--ak-flow-background: url("{% static 'dist/assets/images/flow_background.jpg' %}");
|
||||
--pf-c-background-image--BackgroundImage: var(--ak-flow-background);
|
||||
--pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background);
|
||||
--pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background);
|
||||
|
@ -1,17 +1,9 @@
|
||||
"""Test API Utils"""
|
||||
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.serializers import (
|
||||
HyperlinkedModelSerializer,
|
||||
)
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer as BaseModelSerializer,
|
||||
)
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer as CustomModelSerializer
|
||||
from authentik.core.api.utils import is_dict
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
|
||||
|
||||
class TestAPIUtils(APITestCase):
|
||||
@ -22,14 +14,3 @@ class TestAPIUtils(APITestCase):
|
||||
self.assertIsNone(is_dict({}))
|
||||
with self.assertRaises(ValidationError):
|
||||
is_dict("foo")
|
||||
|
||||
def test_all_serializers_descend_from_custom(self):
|
||||
"""Test that every serializer we define descends from our own ModelSerializer"""
|
||||
# Weirdly, there's only one serializer in `rest_framework` which descends from
|
||||
# ModelSerializer: HyperlinkedModelSerializer
|
||||
expected = {CustomModelSerializer, HyperlinkedModelSerializer}
|
||||
actual = set(all_subclasses(BaseModelSerializer)) - set(
|
||||
all_subclasses(CustomModelSerializer)
|
||||
)
|
||||
|
||||
self.assertEqual(expected, actual)
|
||||
|
@ -5,7 +5,7 @@ from json import loads
|
||||
from django.urls.base import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import AuthenticatedSession, Session, User
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
|
||||
|
||||
@ -30,18 +30,3 @@ class TestAuthenticatedSessionsAPI(APITestCase):
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content.decode())
|
||||
self.assertEqual(body["pagination"]["count"], 1)
|
||||
|
||||
def test_delete(self):
|
||||
"""Test deletion"""
|
||||
self.client.force_login(self.user)
|
||||
self.assertEqual(AuthenticatedSession.objects.all().count(), 1)
|
||||
self.assertEqual(Session.objects.all().count(), 1)
|
||||
response = self.client.delete(
|
||||
reverse(
|
||||
"authentik_api:authenticatedsession-detail",
|
||||
kwargs={"uuid": AuthenticatedSession.objects.first().uuid},
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
self.assertEqual(AuthenticatedSession.objects.all().count(), 0)
|
||||
self.assertEqual(Session.objects.all().count(), 0)
|
||||
|
@ -4,7 +4,7 @@ from django.urls.base import reverse
|
||||
from guardian.shortcuts import assign_perm
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Group
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_user
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
@ -14,7 +14,7 @@ class TestGroupsAPI(APITestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.login_user = create_test_user()
|
||||
self.user = create_test_user()
|
||||
self.user = User.objects.create(username="test-user")
|
||||
|
||||
def test_list_with_users(self):
|
||||
"""Test listing with users"""
|
||||
@ -109,57 +109,3 @@ class TestGroupsAPI(APITestCase):
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
|
||||
def test_superuser_no_perm(self):
|
||||
"""Test creating a superuser group without permission"""
|
||||
assign_perm("authentik_core.add_group", self.login_user)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:group-list"),
|
||||
data={"name": generate_id(), "is_superuser": True},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content,
|
||||
{"is_superuser": ["User does not have permission to set superuser status to True."]},
|
||||
)
|
||||
|
||||
def test_superuser_update_no_perm(self):
|
||||
"""Test updating a superuser group without permission"""
|
||||
group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||
assign_perm("view_group", self.login_user, group)
|
||||
assign_perm("change_group", self.login_user, group)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.patch(
|
||||
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
|
||||
data={"is_superuser": False},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content,
|
||||
{"is_superuser": ["User does not have permission to set superuser status to False."]},
|
||||
)
|
||||
|
||||
def test_superuser_update_no_change(self):
|
||||
"""Test updating a superuser group without permission
|
||||
and without changing the superuser status"""
|
||||
group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||
assign_perm("view_group", self.login_user, group)
|
||||
assign_perm("change_group", self.login_user, group)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.patch(
|
||||
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
|
||||
data={"name": generate_id(), "is_superuser": True},
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
|
||||
def test_superuser_create(self):
|
||||
"""Test creating a superuser group with permission"""
|
||||
assign_perm("authentik_core.add_group", self.login_user)
|
||||
assign_perm("authentik_core.enable_group_superuser", self.login_user)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:group-list"),
|
||||
data={"name": generate_id(), "is_superuser": True},
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
|
@ -1,19 +0,0 @@
|
||||
from django.apps import apps
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
|
||||
|
||||
class TestSourceAPI(APITestCase):
|
||||
def setUp(self) -> None:
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_builtin_source_used_by(self):
|
||||
"""Test Providers's types endpoint"""
|
||||
apps.get_app_config("authentik_core").source_inbuilt()
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:source-used-by", kwargs={"slug": "authentik-built-in"}),
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
@ -13,10 +13,7 @@ from authentik.core.models import (
|
||||
TokenIntents,
|
||||
User,
|
||||
)
|
||||
from authentik.core.tasks import (
|
||||
clean_expired_models,
|
||||
clean_temporary_users,
|
||||
)
|
||||
from authentik.core.tasks import clean_expired_models, clean_temporary_users
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
@ -1,8 +1,9 @@
|
||||
"""Test Users API"""
|
||||
|
||||
from datetime import datetime
|
||||
from json import loads
|
||||
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
from django.urls.base import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
@ -10,17 +11,11 @@ from authentik.brands.models import Brand
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||
AuthenticatedSession,
|
||||
Session,
|
||||
Token,
|
||||
User,
|
||||
UserTypes,
|
||||
)
|
||||
from authentik.core.tests.utils import (
|
||||
create_test_admin_user,
|
||||
create_test_brand,
|
||||
create_test_flow,
|
||||
create_test_user,
|
||||
)
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_brand, create_test_flow
|
||||
from authentik.flows.models import FlowDesignation
|
||||
from authentik.lib.generators import generate_id, generate_key
|
||||
from authentik.stages.email.models import EmailStage
|
||||
@ -31,7 +26,7 @@ class TestUsersAPI(APITestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.admin = create_test_admin_user()
|
||||
self.user = create_test_user()
|
||||
self.user = User.objects.create(username="test-user")
|
||||
|
||||
def test_filter_type(self):
|
||||
"""Test API filtering by type"""
|
||||
@ -46,35 +41,6 @@ class TestUsersAPI(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_filter_is_superuser(self):
|
||||
"""Test API filtering by superuser status"""
|
||||
User.objects.all().delete()
|
||||
admin = create_test_admin_user()
|
||||
self.client.force_login(admin)
|
||||
# Test superuser
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-list"),
|
||||
data={
|
||||
"is_superuser": True,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body["results"]), 1)
|
||||
self.assertEqual(body["results"][0]["username"], admin.username)
|
||||
# Test non-superuser
|
||||
user = create_test_user()
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-list"),
|
||||
data={
|
||||
"is_superuser": False,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body["results"]), 1, body)
|
||||
self.assertEqual(body["results"][0]["username"], user.username)
|
||||
|
||||
def test_list_with_groups(self):
|
||||
"""Test listing with groups"""
|
||||
self.client.force_login(self.admin)
|
||||
@ -133,8 +99,6 @@ class TestUsersAPI(APITestCase):
|
||||
def test_recovery_email_no_flow(self):
|
||||
"""Test user recovery link (no recovery flow set)"""
|
||||
self.client.force_login(self.admin)
|
||||
self.user.email = ""
|
||||
self.user.save()
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
@ -380,15 +344,12 @@ class TestUsersAPI(APITestCase):
|
||||
"""Ensure sessions are deleted when a user is deactivated"""
|
||||
user = create_test_admin_user()
|
||||
session_id = generate_id()
|
||||
session = Session.objects.create(
|
||||
session_key=session_id,
|
||||
last_ip="255.255.255.255",
|
||||
last_user_agent="",
|
||||
)
|
||||
AuthenticatedSession.objects.create(
|
||||
session=session,
|
||||
user=user,
|
||||
session_key=session_id,
|
||||
last_ip="",
|
||||
)
|
||||
cache.set(KEY_PREFIX + session_id, "foo")
|
||||
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.patch(
|
||||
@ -399,7 +360,5 @@ class TestUsersAPI(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
self.assertFalse(Session.objects.filter(session_key=session_id).exists())
|
||||
self.assertFalse(
|
||||
AuthenticatedSession.objects.filter(session__session_key=session_id).exists()
|
||||
)
|
||||
self.assertIsNone(cache.get(KEY_PREFIX + session_id))
|
||||
self.assertFalse(AuthenticatedSession.objects.filter(session_key=session_id).exists())
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""authentik URL Configuration"""
|
||||
|
||||
from channels.auth import AuthMiddleware
|
||||
from channels.sessions import CookieMiddleware
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.urls import path
|
||||
@ -11,11 +13,7 @@ from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet
|
||||
from authentik.core.api.groups import GroupViewSet
|
||||
from authentik.core.api.property_mappings import PropertyMappingViewSet
|
||||
from authentik.core.api.providers import ProviderViewSet
|
||||
from authentik.core.api.sources import (
|
||||
GroupSourceConnectionViewSet,
|
||||
SourceViewSet,
|
||||
UserSourceConnectionViewSet,
|
||||
)
|
||||
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||
from authentik.core.api.tokens import TokenViewSet
|
||||
from authentik.core.api.transactional_applications import TransactionalApplicationView
|
||||
from authentik.core.api.users import UserViewSet
|
||||
@ -27,7 +25,7 @@ from authentik.core.views.interface import (
|
||||
RootRedirectView,
|
||||
)
|
||||
from authentik.flows.views.interface import FlowInterfaceView
|
||||
from authentik.root.asgi_middleware import AuthMiddlewareStack
|
||||
from authentik.root.asgi_middleware import SessionMiddleware
|
||||
from authentik.root.messages.consumer import MessageConsumer
|
||||
from authentik.root.middleware import ChannelsLoggingMiddleware
|
||||
|
||||
@ -83,7 +81,6 @@ api_urlpatterns = [
|
||||
("core/tokens", TokenViewSet),
|
||||
("sources/all", SourceViewSet),
|
||||
("sources/user_connections/all", UserSourceConnectionViewSet),
|
||||
("sources/group_connections/all", GroupSourceConnectionViewSet),
|
||||
("providers/all", ProviderViewSet),
|
||||
("propertymappings/all", PropertyMappingViewSet),
|
||||
("authenticators/all", DeviceViewSet, "device"),
|
||||
@ -97,7 +94,9 @@ api_urlpatterns = [
|
||||
websocket_urlpatterns = [
|
||||
path(
|
||||
"ws/client/",
|
||||
ChannelsLoggingMiddleware(AuthMiddlewareStack(MessageConsumer.as_asgi())),
|
||||
ChannelsLoggingMiddleware(
|
||||
CookieMiddleware(SessionMiddleware(AuthMiddleware(MessageConsumer.as_asgi())))
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
|
@ -55,7 +55,7 @@ class RedirectToAppLaunch(View):
|
||||
)
|
||||
except FlowNonApplicableException:
|
||||
raise Http404 from None
|
||||
plan.append_stage(in_memory_stage(RedirectToAppStage))
|
||||
plan.insert_stage(in_memory_stage(RedirectToAppStage))
|
||||
return plan.to_redirect(request, flow)
|
||||
|
||||
|
||||
|
@ -53,7 +53,6 @@ class InterfaceView(TemplateView):
|
||||
kwargs["build"] = get_build_hash()
|
||||
kwargs["url_kwargs"] = self.kwargs
|
||||
kwargs["base_url"] = self.request.build_absolute_uri(CONFIG.get("web.path", "/"))
|
||||
kwargs["base_url_rel"] = CONFIG.get("web.path", "/")
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
|
@ -97,8 +97,6 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
||||
thread_kwargs: dict | None = None,
|
||||
**_,
|
||||
):
|
||||
if not self.enabled:
|
||||
return super().post_save_handler(request, sender, instance, created, thread_kwargs, **_)
|
||||
if not should_log_model(instance):
|
||||
return None
|
||||
thread_kwargs = {}
|
||||
@ -124,8 +122,6 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
||||
):
|
||||
thread_kwargs = {}
|
||||
m2m_field = None
|
||||
if not self.enabled:
|
||||
return super().m2m_changed_handler(request, sender, instance, action, thread_kwargs)
|
||||
# For the audit log we don't care about `pre_` or `post_` so we trim that part off
|
||||
_, _, action_direction = action.partition("_")
|
||||
# resolve the "through" model to an actual field
|
||||
|
@ -1,27 +0,0 @@
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.policies.unique_password.models import UniquePasswordPolicy
|
||||
from authentik.policies.api.policies import PolicySerializer
|
||||
|
||||
|
||||
class UniquePasswordPolicySerializer(EnterpriseRequiredMixin, PolicySerializer):
|
||||
"""Password Uniqueness Policy Serializer"""
|
||||
|
||||
class Meta:
|
||||
model = UniquePasswordPolicy
|
||||
fields = PolicySerializer.Meta.fields + [
|
||||
"password_field",
|
||||
"num_historical_passwords",
|
||||
]
|
||||
|
||||
|
||||
class UniquePasswordPolicyViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Password Uniqueness Policy Viewset"""
|
||||
|
||||
queryset = UniquePasswordPolicy.objects.all()
|
||||
serializer_class = UniquePasswordPolicySerializer
|
||||
filterset_fields = "__all__"
|
||||
ordering = ["name"]
|
||||
search_fields = ["name"]
|
@ -1,10 +0,0 @@
|
||||
"""authentik Unique Password policy app config"""
|
||||
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
|
||||
|
||||
class AuthentikEnterprisePoliciesUniquePasswordConfig(EnterpriseConfig):
|
||||
name = "authentik.enterprise.policies.unique_password"
|
||||
label = "authentik_policies_unique_password"
|
||||
verbose_name = "authentik Enterprise.Policies.Unique Password"
|
||||
default = True
|
@ -1,81 +0,0 @@
|
||||
# Generated by Django 5.0.13 on 2025-03-26 23:02
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("authentik_policies", "0011_policybinding_failure_result_and_more"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="UniquePasswordPolicy",
|
||||
fields=[
|
||||
(
|
||||
"policy_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="authentik_policies.policy",
|
||||
),
|
||||
),
|
||||
(
|
||||
"password_field",
|
||||
models.TextField(
|
||||
default="password",
|
||||
help_text="Field key to check, field keys defined in Prompt stages are available.",
|
||||
),
|
||||
),
|
||||
(
|
||||
"num_historical_passwords",
|
||||
models.PositiveIntegerField(
|
||||
default=1, help_text="Number of passwords to check against."
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Password Uniqueness Policy",
|
||||
"verbose_name_plural": "Password Uniqueness Policies",
|
||||
"indexes": [
|
||||
models.Index(fields=["policy_ptr_id"], name="authentik_p_policy__f559aa_idx")
|
||||
],
|
||||
},
|
||||
bases=("authentik_policies.policy",),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserPasswordHistory",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("old_password", models.CharField(max_length=128)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("hibp_prefix_sha1", models.CharField(max_length=5)),
|
||||
("hibp_pw_hash", models.TextField()),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="old_passwords",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "User Password History",
|
||||
},
|
||||
),
|
||||
]
|
@ -1,151 +0,0 @@
|
||||
from hashlib import sha1
|
||||
|
||||
from django.contrib.auth.hashers import identify_hasher, make_password
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.policies.models import Policy
|
||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class UniquePasswordPolicy(Policy):
|
||||
"""This policy prevents users from reusing old passwords."""
|
||||
|
||||
password_field = models.TextField(
|
||||
default="password",
|
||||
help_text=_("Field key to check, field keys defined in Prompt stages are available."),
|
||||
)
|
||||
|
||||
# Limit on the number of previous passwords the policy evaluates
|
||||
# Also controls number of old passwords the system stores.
|
||||
num_historical_passwords = models.PositiveIntegerField(
|
||||
default=1,
|
||||
help_text=_("Number of passwords to check against."),
|
||||
)
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.enterprise.policies.unique_password.api import UniquePasswordPolicySerializer
|
||||
|
||||
return UniquePasswordPolicySerializer
|
||||
|
||||
@property
|
||||
def component(self) -> str:
|
||||
return "ak-policy-password-uniqueness-form"
|
||||
|
||||
def passes(self, request: PolicyRequest) -> PolicyResult:
|
||||
from authentik.enterprise.policies.unique_password.models import UserPasswordHistory
|
||||
|
||||
password = request.context.get(PLAN_CONTEXT_PROMPT, {}).get(
|
||||
self.password_field, request.context.get(self.password_field)
|
||||
)
|
||||
if not password:
|
||||
LOGGER.warning(
|
||||
"Password field not found in request when checking UniquePasswordPolicy",
|
||||
field=self.password_field,
|
||||
fields=request.context.keys(),
|
||||
)
|
||||
return PolicyResult(False, _("Password not set in context"))
|
||||
password = str(password)
|
||||
|
||||
if not self.num_historical_passwords:
|
||||
# Policy not configured to check against any passwords
|
||||
return PolicyResult(True)
|
||||
|
||||
num_to_check = self.num_historical_passwords
|
||||
password_history = UserPasswordHistory.objects.filter(user=request.user).order_by(
|
||||
"-created_at"
|
||||
)[:num_to_check]
|
||||
|
||||
if not password_history:
|
||||
return PolicyResult(True)
|
||||
|
||||
for record in password_history:
|
||||
if not record.old_password:
|
||||
continue
|
||||
|
||||
if self._passwords_match(new_password=password, old_password=record.old_password):
|
||||
# Return on first match. Authentik does not consider timing attacks
|
||||
# on old passwords to be an attack surface.
|
||||
return PolicyResult(
|
||||
False,
|
||||
_("This password has been used previously. Please choose a different one."),
|
||||
)
|
||||
|
||||
return PolicyResult(True)
|
||||
|
||||
def _passwords_match(self, *, new_password: str, old_password: str) -> bool:
|
||||
try:
|
||||
hasher = identify_hasher(old_password)
|
||||
except ValueError:
|
||||
LOGGER.warning(
|
||||
"Skipping password; could not load hash algorithm",
|
||||
)
|
||||
return False
|
||||
|
||||
return hasher.verify(new_password, old_password)
|
||||
|
||||
@classmethod
|
||||
def is_in_use(cls):
|
||||
"""Check if any UniquePasswordPolicy is in use, either through policy bindings
|
||||
or direct attachment to a PromptStage.
|
||||
|
||||
Returns:
|
||||
bool: True if any policy is in use, False otherwise
|
||||
"""
|
||||
from authentik.policies.models import PolicyBinding
|
||||
|
||||
# Check if any policy is in use through bindings
|
||||
if PolicyBinding.in_use.for_policy(cls).exists():
|
||||
return True
|
||||
|
||||
# Check if any policy is attached to a PromptStage
|
||||
if cls.objects.filter(promptstage__isnull=False).exists():
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
class Meta(Policy.PolicyMeta):
|
||||
verbose_name = _("Password Uniqueness Policy")
|
||||
verbose_name_plural = _("Password Uniqueness Policies")
|
||||
|
||||
|
||||
class UserPasswordHistory(models.Model):
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="old_passwords")
|
||||
# Mimic's column type of AbstractBaseUser.password
|
||||
old_password = models.CharField(max_length=128)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
hibp_prefix_sha1 = models.CharField(max_length=5)
|
||||
hibp_pw_hash = models.TextField()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("User Password History")
|
||||
|
||||
def __str__(self) -> str:
|
||||
timestamp = f"{self.created_at:%Y/%m/%d %X}" if self.created_at else "N/A"
|
||||
return f"Previous Password (user: {self.user_id}, recorded: {timestamp})"
|
||||
|
||||
@classmethod
|
||||
def create_for_user(cls, user: User, password: str):
|
||||
# To check users' passwords against Have I been Pwned, we need the first 5 chars
|
||||
# of the password hashed with SHA1 without a salt...
|
||||
pw_hash_sha1 = sha1(password.encode("utf-8")).hexdigest() # nosec
|
||||
# ...however that'll give us a list of hashes from HIBP, and to compare that we still
|
||||
# need a full unsalted SHA1 of the password. We don't want to save that directly in
|
||||
# the database, so we hash that SHA1 again with a modern hashing alg,
|
||||
# and then when we check users' passwords against HIBP we can use `check_password`
|
||||
# which will take care of this.
|
||||
hibp_hash_hash = make_password(pw_hash_sha1)
|
||||
return cls.objects.create(
|
||||
user=user,
|
||||
old_password=password,
|
||||
hibp_prefix_sha1=pw_hash_sha1[:5],
|
||||
hibp_pw_hash=hibp_hash_hash,
|
||||
)
|
@ -1,20 +0,0 @@
|
||||
"""Unique Password Policy settings"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"policies_unique_password_trim_history": {
|
||||
"task": "authentik.enterprise.policies.unique_password.tasks.trim_password_histories",
|
||||
"schedule": crontab(minute=fqdn_rand("policies_unique_password_trim"), hour="*/12"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
"policies_unique_password_check_purge": {
|
||||
"task": (
|
||||
"authentik.enterprise.policies.unique_password.tasks.check_and_purge_password_history"
|
||||
),
|
||||
"schedule": crontab(minute=fqdn_rand("policies_unique_password_purge"), hour="*/24"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
"""authentik policy signals"""
|
||||
|
||||
from django.dispatch import receiver
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.core.signals import password_changed
|
||||
from authentik.enterprise.policies.unique_password.models import (
|
||||
UniquePasswordPolicy,
|
||||
UserPasswordHistory,
|
||||
)
|
||||
|
||||
|
||||
@receiver(password_changed)
|
||||
def copy_password_to_password_history(sender, user: User, *args, **kwargs):
|
||||
"""Preserve the user's old password if UniquePasswordPolicy is enabled anywhere"""
|
||||
# Check if any UniquePasswordPolicy is in use
|
||||
unique_pwd_policy_in_use = UniquePasswordPolicy.is_in_use()
|
||||
|
||||
if unique_pwd_policy_in_use:
|
||||
"""NOTE: Because we run this in a signal after saving the user,
|
||||
we are not atomically guaranteed to save password history.
|
||||
"""
|
||||
UserPasswordHistory.create_for_user(user, user.password)
|
@ -1,66 +0,0 @@
|
||||
from django.db.models.aggregates import Count
|
||||
from structlog import get_logger
|
||||
|
||||
from authentik.enterprise.policies.unique_password.models import (
|
||||
UniquePasswordPolicy,
|
||||
UserPasswordHistory,
|
||||
)
|
||||
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
@prefill_task
|
||||
def check_and_purge_password_history(self: SystemTask):
|
||||
"""Check if any UniquePasswordPolicy exists, and if not, purge the password history table.
|
||||
This is run on a schedule instead of being triggered by policy binding deletion.
|
||||
"""
|
||||
if not UniquePasswordPolicy.objects.exists():
|
||||
UserPasswordHistory.objects.all().delete()
|
||||
LOGGER.debug("Purged UserPasswordHistory table as no policies are in use")
|
||||
self.set_status(TaskStatus.SUCCESSFUL, "Successfully purged UserPasswordHistory")
|
||||
return
|
||||
|
||||
self.set_status(
|
||||
TaskStatus.SUCCESSFUL, "Not purging password histories, a unique password policy exists"
|
||||
)
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
def trim_password_histories(self: SystemTask):
|
||||
"""Removes rows from UserPasswordHistory older than
|
||||
the `n` most recent entries.
|
||||
|
||||
The `n` is defined by the largest configured value for all bound
|
||||
UniquePasswordPolicy policies.
|
||||
"""
|
||||
|
||||
# No policy, we'll let the cleanup above do its thing
|
||||
if not UniquePasswordPolicy.objects.exists():
|
||||
return
|
||||
|
||||
num_rows_to_preserve = 0
|
||||
for policy in UniquePasswordPolicy.objects.all():
|
||||
num_rows_to_preserve = max(num_rows_to_preserve, policy.num_historical_passwords)
|
||||
|
||||
all_pks_to_keep = []
|
||||
|
||||
# Get all users who have password history entries
|
||||
users_with_history = (
|
||||
UserPasswordHistory.objects.values("user")
|
||||
.annotate(count=Count("user"))
|
||||
.filter(count__gt=0)
|
||||
.values_list("user", flat=True)
|
||||
)
|
||||
for user_pk in users_with_history:
|
||||
entries = UserPasswordHistory.objects.filter(user__pk=user_pk)
|
||||
pks_to_keep = entries.order_by("-created_at")[:num_rows_to_preserve].values_list(
|
||||
"pk", flat=True
|
||||
)
|
||||
all_pks_to_keep.extend(pks_to_keep)
|
||||
|
||||
num_deleted, _ = UserPasswordHistory.objects.exclude(pk__in=all_pks_to_keep).delete()
|
||||
LOGGER.debug("Deleted stale password history records", count=num_deleted)
|
||||
self.set_status(TaskStatus.SUCCESSFUL, f"Delete {num_deleted} stale password history records")
|
@ -1,108 +0,0 @@
|
||||
"""Unique Password Policy flow tests"""
|
||||
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from django.urls.base import reverse
|
||||
|
||||
from authentik.core.tests.utils import create_test_flow, create_test_user
|
||||
from authentik.enterprise.policies.unique_password.models import (
|
||||
UniquePasswordPolicy,
|
||||
UserPasswordHistory,
|
||||
)
|
||||
from authentik.flows.models import FlowDesignation, FlowStageBinding
|
||||
from authentik.flows.tests import FlowTestCase
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage
|
||||
|
||||
|
||||
class TestUniquePasswordPolicyFlow(FlowTestCase):
|
||||
"""Test Unique Password Policy in a flow"""
|
||||
|
||||
REUSED_PASSWORD = "hunter1" # nosec B105
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.user = create_test_user()
|
||||
self.flow = create_test_flow(FlowDesignation.AUTHENTICATION)
|
||||
|
||||
password_prompt = Prompt.objects.create(
|
||||
name=generate_id(),
|
||||
field_key="password",
|
||||
label="PASSWORD_LABEL",
|
||||
type=FieldTypes.PASSWORD,
|
||||
required=True,
|
||||
placeholder="PASSWORD_PLACEHOLDER",
|
||||
)
|
||||
|
||||
self.policy = UniquePasswordPolicy.objects.create(
|
||||
name="password_must_unique",
|
||||
password_field=password_prompt.field_key,
|
||||
num_historical_passwords=1,
|
||||
)
|
||||
stage = PromptStage.objects.create(name="prompt-stage")
|
||||
stage.validation_policies.set([self.policy])
|
||||
stage.fields.set(
|
||||
[
|
||||
password_prompt,
|
||||
]
|
||||
)
|
||||
FlowStageBinding.objects.create(target=self.flow, stage=stage, order=2)
|
||||
|
||||
# Seed the user's password history
|
||||
UserPasswordHistory.create_for_user(self.user, make_password(self.REUSED_PASSWORD))
|
||||
|
||||
def test_prompt_data(self):
|
||||
"""Test policy attached to a prompt stage"""
|
||||
# Test the policy directly
|
||||
from authentik.policies.types import PolicyRequest
|
||||
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
|
||||
|
||||
# Create a policy request with the reused password
|
||||
request = PolicyRequest(user=self.user)
|
||||
request.context[PLAN_CONTEXT_PROMPT] = {"password": self.REUSED_PASSWORD}
|
||||
|
||||
# Test the policy directly
|
||||
result = self.policy.passes(request)
|
||||
|
||||
# Verify that the policy fails (returns False) with the expected error message
|
||||
self.assertFalse(result.passing, "Policy should fail for reused password")
|
||||
self.assertEqual(
|
||||
result.messages[0],
|
||||
"This password has been used previously. Please choose a different one.",
|
||||
"Incorrect error message",
|
||||
)
|
||||
|
||||
# API-based testing approach:
|
||||
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# Send a POST request to the flow executor with the reused password
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
{"password": self.REUSED_PASSWORD},
|
||||
)
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
self.flow,
|
||||
component="ak-stage-prompt",
|
||||
fields=[
|
||||
{
|
||||
"choices": None,
|
||||
"field_key": "password",
|
||||
"label": "PASSWORD_LABEL",
|
||||
"order": 0,
|
||||
"placeholder": "PASSWORD_PLACEHOLDER",
|
||||
"initial_value": "",
|
||||
"required": True,
|
||||
"type": "password",
|
||||
"sub_text": "",
|
||||
}
|
||||
],
|
||||
response_errors={
|
||||
"non_field_errors": [
|
||||
{
|
||||
"code": "invalid",
|
||||
"string": "This password has been used previously. "
|
||||
"Please choose a different one.",
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
@ -1,77 +0,0 @@
|
||||
"""Unique Password Policy tests"""
|
||||
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from django.test import TestCase
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.enterprise.policies.unique_password.models import (
|
||||
UniquePasswordPolicy,
|
||||
UserPasswordHistory,
|
||||
)
|
||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
|
||||
|
||||
|
||||
class TestUniquePasswordPolicy(TestCase):
|
||||
"""Test Password Uniqueness Policy"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.policy = UniquePasswordPolicy.objects.create(
|
||||
name="test_unique_password", num_historical_passwords=1
|
||||
)
|
||||
self.user = User.objects.create(username="test-user")
|
||||
|
||||
def test_invalid(self):
|
||||
"""Test without password present in request"""
|
||||
request = PolicyRequest(get_anonymous_user())
|
||||
result: PolicyResult = self.policy.passes(request)
|
||||
self.assertFalse(result.passing)
|
||||
self.assertEqual(result.messages[0], "Password not set in context")
|
||||
|
||||
def test_passes_no_previous_passwords(self):
|
||||
request = PolicyRequest(get_anonymous_user())
|
||||
request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}}
|
||||
result: PolicyResult = self.policy.passes(request)
|
||||
self.assertTrue(result.passing)
|
||||
|
||||
def test_passes_passwords_are_different(self):
|
||||
# Seed database with an old password
|
||||
UserPasswordHistory.create_for_user(self.user, make_password("hunter1"))
|
||||
|
||||
request = PolicyRequest(self.user)
|
||||
request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}}
|
||||
result: PolicyResult = self.policy.passes(request)
|
||||
self.assertTrue(result.passing)
|
||||
|
||||
def test_passes_multiple_old_passwords(self):
|
||||
# Seed with multiple old passwords
|
||||
UserPasswordHistory.objects.bulk_create(
|
||||
[
|
||||
UserPasswordHistory(user=self.user, old_password=make_password("hunter1")),
|
||||
UserPasswordHistory(user=self.user, old_password=make_password("hunter2")),
|
||||
]
|
||||
)
|
||||
request = PolicyRequest(self.user)
|
||||
request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter3"}}
|
||||
result: PolicyResult = self.policy.passes(request)
|
||||
self.assertTrue(result.passing)
|
||||
|
||||
def test_fails_password_matches_old_password(self):
|
||||
# Seed database with an old password
|
||||
|
||||
UserPasswordHistory.create_for_user(self.user, make_password("hunter1"))
|
||||
|
||||
request = PolicyRequest(self.user)
|
||||
request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter1"}}
|
||||
result: PolicyResult = self.policy.passes(request)
|
||||
self.assertFalse(result.passing)
|
||||
|
||||
def test_fails_if_identical_password_with_different_hash_algos(self):
|
||||
UserPasswordHistory.create_for_user(
|
||||
self.user, make_password("hunter2", "somesalt", "scrypt")
|
||||
)
|
||||
request = PolicyRequest(self.user)
|
||||
request.context = {PLAN_CONTEXT_PROMPT: {"password": "hunter2"}}
|
||||
result: PolicyResult = self.policy.passes(request)
|
||||
self.assertFalse(result.passing)
|
@ -1,90 +0,0 @@
|
||||
from django.urls import reverse
|
||||
|
||||
from authentik.core.models import Group, Source, User
|
||||
from authentik.core.tests.utils import create_test_flow, create_test_user
|
||||
from authentik.enterprise.policies.unique_password.models import (
|
||||
UniquePasswordPolicy,
|
||||
UserPasswordHistory,
|
||||
)
|
||||
from authentik.flows.markers import StageMarker
|
||||
from authentik.flows.models import FlowStageBinding
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
|
||||
from authentik.flows.tests import FlowTestCase
|
||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||
from authentik.lib.generators import generate_key
|
||||
from authentik.policies.models import PolicyBinding, PolicyBindingModel
|
||||
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
|
||||
from authentik.stages.user_write.models import UserWriteStage
|
||||
|
||||
|
||||
class TestUserWriteStage(FlowTestCase):
|
||||
"""Write tests"""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.flow = create_test_flow()
|
||||
self.group = Group.objects.create(name="test-group")
|
||||
self.other_group = Group.objects.create(name="other-group")
|
||||
self.stage: UserWriteStage = UserWriteStage.objects.create(
|
||||
name="write", create_users_as_inactive=True, create_users_group=self.group
|
||||
)
|
||||
self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
||||
self.source = Source.objects.create(name="fake_source")
|
||||
|
||||
def test_save_password_history_if_policy_binding_enforced(self):
|
||||
"""Test user's new password is recorded when ANY enabled UniquePasswordPolicy exists"""
|
||||
unique_password_policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5)
|
||||
pbm = PolicyBindingModel.objects.create()
|
||||
PolicyBinding.objects.create(
|
||||
target=pbm, policy=unique_password_policy, order=0, enabled=True
|
||||
)
|
||||
|
||||
test_user = create_test_user()
|
||||
# Store original password for verification
|
||||
original_password = test_user.password
|
||||
|
||||
# We're changing our own password
|
||||
self.client.force_login(test_user)
|
||||
|
||||
new_password = generate_key()
|
||||
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||
plan.context[PLAN_CONTEXT_PENDING_USER] = test_user
|
||||
plan.context[PLAN_CONTEXT_PROMPT] = {
|
||||
"username": test_user.username,
|
||||
"password": new_password,
|
||||
}
|
||||
session = self.client.session
|
||||
session[SESSION_KEY_PLAN] = plan
|
||||
session.save()
|
||||
# Password history should be recorded
|
||||
user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user)
|
||||
self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded")
|
||||
self.assertEqual(len(user_password_history_qs), 1, "expected 1 recorded password")
|
||||
|
||||
# Create a password history entry manually to simulate the signal behavior
|
||||
# This is what would happen if the signal worked correctly
|
||||
UserPasswordHistory.objects.create(user=test_user, old_password=original_password)
|
||||
user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user)
|
||||
self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded")
|
||||
self.assertEqual(len(user_password_history_qs), 2, "expected 2 recorded password")
|
||||
|
||||
# Execute the flow by sending a POST request to the flow executor endpoint
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
||||
)
|
||||
|
||||
# Verify that the request was successful
|
||||
self.assertEqual(response.status_code, 200)
|
||||
user_qs = User.objects.filter(username=plan.context[PLAN_CONTEXT_PROMPT]["username"])
|
||||
self.assertTrue(user_qs.exists())
|
||||
|
||||
# Verify the password history entry exists
|
||||
user_password_history_qs = UserPasswordHistory.objects.filter(user=test_user)
|
||||
self.assertTrue(user_password_history_qs.exists(), "Password history should be recorded")
|
||||
|
||||
self.assertEqual(len(user_password_history_qs), 3, "expected 3 recorded password")
|
||||
# Verify that one of the entries contains the original password
|
||||
self.assertTrue(
|
||||
any(entry.old_password == original_password for entry in user_password_history_qs),
|
||||
"original password should be in password history table",
|
||||
)
|
@ -1,178 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.core.tests.utils import create_test_user
|
||||
from authentik.enterprise.policies.unique_password.models import (
|
||||
UniquePasswordPolicy,
|
||||
UserPasswordHistory,
|
||||
)
|
||||
from authentik.enterprise.policies.unique_password.tasks import (
|
||||
check_and_purge_password_history,
|
||||
trim_password_histories,
|
||||
)
|
||||
from authentik.policies.models import PolicyBinding, PolicyBindingModel
|
||||
|
||||
|
||||
class TestUniquePasswordPolicyModel(TestCase):
|
||||
"""Test the UniquePasswordPolicy model methods"""
|
||||
|
||||
def test_is_in_use_with_binding(self):
|
||||
"""Test is_in_use returns True when a policy binding exists"""
|
||||
# Create a UniquePasswordPolicy and a PolicyBinding for it
|
||||
policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5)
|
||||
pbm = PolicyBindingModel.objects.create()
|
||||
PolicyBinding.objects.create(target=pbm, policy=policy, order=0, enabled=True)
|
||||
|
||||
# Verify is_in_use returns True
|
||||
self.assertTrue(UniquePasswordPolicy.is_in_use())
|
||||
|
||||
def test_is_in_use_with_promptstage(self):
|
||||
"""Test is_in_use returns True when attached to a PromptStage"""
|
||||
from authentik.stages.prompt.models import PromptStage
|
||||
|
||||
# Create a UniquePasswordPolicy and attach it to a PromptStage
|
||||
policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5)
|
||||
prompt_stage = PromptStage.objects.create(
|
||||
name="Test Prompt Stage",
|
||||
)
|
||||
# Use the set() method for many-to-many relationships
|
||||
prompt_stage.validation_policies.set([policy])
|
||||
|
||||
# Verify is_in_use returns True
|
||||
self.assertTrue(UniquePasswordPolicy.is_in_use())
|
||||
|
||||
|
||||
class TestTrimAllPasswordHistories(TestCase):
|
||||
"""Test the task that trims password history for all users"""
|
||||
|
||||
def setUp(self):
|
||||
self.user1 = create_test_user("test-user1")
|
||||
self.user2 = create_test_user("test-user2")
|
||||
self.pbm = PolicyBindingModel.objects.create()
|
||||
# Create a policy with a limit of 1 password
|
||||
self.policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1)
|
||||
PolicyBinding.objects.create(
|
||||
target=self.pbm,
|
||||
policy=self.policy,
|
||||
enabled=True,
|
||||
order=0,
|
||||
)
|
||||
|
||||
|
||||
class TestCheckAndPurgePasswordHistory(TestCase):
|
||||
"""Test the scheduled task that checks if any policy is in use and purges if not"""
|
||||
|
||||
def setUp(self):
|
||||
self.user = create_test_user("test-user")
|
||||
self.pbm = PolicyBindingModel.objects.create()
|
||||
|
||||
def test_purge_when_no_policy_in_use(self):
|
||||
"""Test that the task purges the table when no policy is in use"""
|
||||
# Create some password history entries
|
||||
UserPasswordHistory.create_for_user(self.user, "hunter2")
|
||||
|
||||
# Verify we have entries
|
||||
self.assertTrue(UserPasswordHistory.objects.exists())
|
||||
|
||||
# Run the task - should purge since no policy is in use
|
||||
check_and_purge_password_history()
|
||||
|
||||
# Verify the table is empty
|
||||
self.assertFalse(UserPasswordHistory.objects.exists())
|
||||
|
||||
def test_no_purge_when_policy_in_use(self):
|
||||
"""Test that the task doesn't purge when a policy is in use"""
|
||||
# Create a policy and binding
|
||||
policy = UniquePasswordPolicy.objects.create(num_historical_passwords=5)
|
||||
PolicyBinding.objects.create(
|
||||
target=self.pbm,
|
||||
policy=policy,
|
||||
enabled=True,
|
||||
order=0,
|
||||
)
|
||||
|
||||
# Create some password history entries
|
||||
UserPasswordHistory.create_for_user(self.user, "hunter2")
|
||||
|
||||
# Verify we have entries
|
||||
self.assertTrue(UserPasswordHistory.objects.exists())
|
||||
|
||||
# Run the task - should NOT purge since a policy is in use
|
||||
check_and_purge_password_history()
|
||||
|
||||
# Verify the entries still exist
|
||||
self.assertTrue(UserPasswordHistory.objects.exists())
|
||||
|
||||
|
||||
class TestTrimPasswordHistory(TestCase):
|
||||
"""Test password history cleanup task"""
|
||||
|
||||
def setUp(self):
|
||||
self.user = create_test_user("test-user")
|
||||
self.pbm = PolicyBindingModel.objects.create()
|
||||
|
||||
def test_trim_password_history_ok(self):
|
||||
"""Test passwords over the define limit are deleted"""
|
||||
_now = datetime.now()
|
||||
UserPasswordHistory.objects.bulk_create(
|
||||
[
|
||||
UserPasswordHistory(
|
||||
user=self.user,
|
||||
old_password="hunter1", # nosec B106
|
||||
created_at=_now - timedelta(days=3),
|
||||
),
|
||||
UserPasswordHistory(
|
||||
user=self.user,
|
||||
old_password="hunter2", # nosec B106
|
||||
created_at=_now - timedelta(days=2),
|
||||
),
|
||||
UserPasswordHistory(
|
||||
user=self.user,
|
||||
old_password="hunter3", # nosec B106
|
||||
created_at=_now,
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1)
|
||||
PolicyBinding.objects.create(
|
||||
target=self.pbm,
|
||||
policy=policy,
|
||||
enabled=True,
|
||||
order=0,
|
||||
)
|
||||
trim_password_histories.delay()
|
||||
user_pwd_history_qs = UserPasswordHistory.objects.filter(user=self.user)
|
||||
self.assertEqual(len(user_pwd_history_qs), 1)
|
||||
|
||||
def test_trim_password_history_policy_diabled_no_op(self):
|
||||
"""Test no passwords removed if policy binding is disabled"""
|
||||
|
||||
# Insert a record to ensure it's not deleted after executing task
|
||||
UserPasswordHistory.create_for_user(self.user, "hunter2")
|
||||
|
||||
policy = UniquePasswordPolicy.objects.create(num_historical_passwords=1)
|
||||
PolicyBinding.objects.create(
|
||||
target=self.pbm,
|
||||
policy=policy,
|
||||
enabled=False,
|
||||
order=0,
|
||||
)
|
||||
trim_password_histories.delay()
|
||||
self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists())
|
||||
|
||||
def test_trim_password_history_fewer_records_than_maximum_is_no_op(self):
|
||||
"""Test no passwords deleted if fewer passwords exist than limit"""
|
||||
|
||||
UserPasswordHistory.create_for_user(self.user, "hunter2")
|
||||
|
||||
policy = UniquePasswordPolicy.objects.create(num_historical_passwords=2)
|
||||
PolicyBinding.objects.create(
|
||||
target=self.pbm,
|
||||
policy=policy,
|
||||
enabled=True,
|
||||
order=0,
|
||||
)
|
||||
trim_password_histories.delay()
|
||||
self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists())
|
@ -1,7 +0,0 @@
|
||||
"""API URLs"""
|
||||
|
||||
from authentik.enterprise.policies.unique_password.api import UniquePasswordPolicyViewSet
|
||||
|
||||
api_urlpatterns = [
|
||||
("policies/unique_password", UniquePasswordPolicyViewSet),
|
||||
]
|
@ -37,7 +37,6 @@ class GoogleWorkspaceProviderSerializer(EnterpriseRequiredMixin, ProviderSeriali
|
||||
"user_delete_action",
|
||||
"group_delete_action",
|
||||
"default_group_email_domain",
|
||||
"dry_run",
|
||||
]
|
||||
extra_kwargs = {}
|
||||
|
||||
|
@ -8,10 +8,9 @@ from httplib2 import HttpLib2Error, HttpLib2ErrorWithResponse
|
||||
|
||||
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
|
||||
from authentik.lib.sync.outgoing import HTTP_CONFLICT
|
||||
from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient
|
||||
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||
from authentik.lib.sync.outgoing.exceptions import (
|
||||
BadRequestSyncException,
|
||||
DryRunRejected,
|
||||
NotFoundSyncException,
|
||||
ObjectExistsSyncException,
|
||||
StopSync,
|
||||
@ -44,8 +43,6 @@ class GoogleWorkspaceSyncClient[TModel: Model, TConnection: Model, TSchema: dict
|
||||
self.domains.append(domain_name)
|
||||
|
||||
def _request(self, request: HttpRequest):
|
||||
if self.provider.dry_run and request.method.upper() not in SAFE_METHODS:
|
||||
raise DryRunRejected(request.uri, request.method, request.body)
|
||||
try:
|
||||
response = request.execute()
|
||||
except GoogleAuthError as exc:
|
||||
|
@ -1,24 +0,0 @@
|
||||
# Generated by Django 5.0.12 on 2025-02-24 19:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
(
|
||||
"authentik_providers_google_workspace",
|
||||
"0003_googleworkspaceprovidergroup_attributes_and_more",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="googleworkspaceprovider",
|
||||
name="dry_run",
|
||||
field=models.BooleanField(
|
||||
default=False,
|
||||
help_text="When enabled, provider will not modify or create objects in the remote system.",
|
||||
),
|
||||
),
|
||||
]
|
@ -36,7 +36,6 @@ class MicrosoftEntraProviderSerializer(EnterpriseRequiredMixin, ProviderSerializ
|
||||
"filter_group",
|
||||
"user_delete_action",
|
||||
"group_delete_action",
|
||||
"dry_run",
|
||||
]
|
||||
extra_kwargs = {}
|
||||
|
||||
|
@ -3,7 +3,6 @@ from collections.abc import Coroutine
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from azure.core.exceptions import (
|
||||
ClientAuthenticationError,
|
||||
ServiceRequestError,
|
||||
@ -13,7 +12,6 @@ from azure.identity.aio import ClientSecretCredential
|
||||
from django.db.models import Model
|
||||
from django.http import HttpResponseBadRequest, HttpResponseNotFound
|
||||
from kiota_abstractions.api_error import APIError
|
||||
from kiota_abstractions.request_information import RequestInformation
|
||||
from kiota_authentication_azure.azure_identity_authentication_provider import (
|
||||
AzureIdentityAuthenticationProvider,
|
||||
)
|
||||
@ -23,15 +21,13 @@ from msgraph.generated.models.o_data_errors.o_data_error import ODataError
|
||||
from msgraph.graph_request_adapter import GraphRequestAdapter, options
|
||||
from msgraph.graph_service_client import GraphServiceClient
|
||||
from msgraph_core import GraphClientFactory
|
||||
from opentelemetry import trace
|
||||
|
||||
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
|
||||
from authentik.events.utils import sanitize_item
|
||||
from authentik.lib.sync.outgoing import HTTP_CONFLICT
|
||||
from authentik.lib.sync.outgoing.base import SAFE_METHODS, BaseOutgoingSyncClient
|
||||
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||
from authentik.lib.sync.outgoing.exceptions import (
|
||||
BadRequestSyncException,
|
||||
DryRunRejected,
|
||||
NotFoundSyncException,
|
||||
ObjectExistsSyncException,
|
||||
StopSync,
|
||||
@ -39,24 +35,20 @@ from authentik.lib.sync.outgoing.exceptions import (
|
||||
)
|
||||
|
||||
|
||||
class AuthentikRequestAdapter(GraphRequestAdapter):
|
||||
def __init__(self, auth_provider, provider: MicrosoftEntraProvider, client=None):
|
||||
super().__init__(auth_provider, client)
|
||||
self._provider = provider
|
||||
def get_request_adapter(
|
||||
credentials: ClientSecretCredential, scopes: list[str] | None = None
|
||||
) -> GraphRequestAdapter:
|
||||
if scopes:
|
||||
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials, scopes=scopes)
|
||||
else:
|
||||
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials)
|
||||
|
||||
async def get_http_response_message(
|
||||
self,
|
||||
request_info: RequestInformation,
|
||||
parent_span: trace.Span,
|
||||
claims: str = "",
|
||||
) -> httpx.Response:
|
||||
if self._provider.dry_run and request_info.http_method.value.upper() not in SAFE_METHODS:
|
||||
raise DryRunRejected(
|
||||
url=request_info.url,
|
||||
method=request_info.http_method.value,
|
||||
body=request_info.content.decode("utf-8"),
|
||||
)
|
||||
return await super().get_http_response_message(request_info, parent_span, claims=claims)
|
||||
return GraphRequestAdapter(
|
||||
auth_provider=auth_provider,
|
||||
client=GraphClientFactory.create_with_default_middleware(
|
||||
options=options, client=KiotaClientFactory.get_default_client()
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict](
|
||||
@ -71,27 +63,9 @@ class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict]
|
||||
self.credentials = provider.microsoft_credentials()
|
||||
self.__prefetch_domains()
|
||||
|
||||
def get_request_adapter(
|
||||
self, credentials: ClientSecretCredential, scopes: list[str] | None = None
|
||||
) -> AuthentikRequestAdapter:
|
||||
if scopes:
|
||||
auth_provider = AzureIdentityAuthenticationProvider(
|
||||
credentials=credentials, scopes=scopes
|
||||
)
|
||||
else:
|
||||
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials)
|
||||
|
||||
return AuthentikRequestAdapter(
|
||||
auth_provider=auth_provider,
|
||||
provider=self.provider,
|
||||
client=GraphClientFactory.create_with_default_middleware(
|
||||
options=options, client=KiotaClientFactory.get_default_client()
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def client(self):
|
||||
return GraphServiceClient(request_adapter=self.get_request_adapter(**self.credentials))
|
||||
return GraphServiceClient(request_adapter=get_request_adapter(**self.credentials))
|
||||
|
||||
def _request[T](self, request: Coroutine[Any, Any, T]) -> T:
|
||||
try:
|
||||
|
@ -1,24 +0,0 @@
|
||||
# Generated by Django 5.0.12 on 2025-02-24 19:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
(
|
||||
"authentik_providers_microsoft_entra",
|
||||
"0002_microsoftentraprovidergroup_attributes_and_more",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="microsoftentraprovider",
|
||||
name="dry_run",
|
||||
field=models.BooleanField(
|
||||
default=False,
|
||||
help_text="When enabled, provider will not modify or create objects in the remote system.",
|
||||
),
|
||||
),
|
||||
]
|
@ -32,6 +32,7 @@ class MicrosoftEntraUserTests(APITestCase):
|
||||
|
||||
@apply_blueprint("system/providers-microsoft-entra.yaml")
|
||||
def setUp(self) -> None:
|
||||
|
||||
# Delete all users and groups as the mocked HTTP responses only return one ID
|
||||
# which will cause errors with multiple users
|
||||
Tenant.objects.update(avatars="none")
|
||||
@ -96,38 +97,6 @@ class MicrosoftEntraUserTests(APITestCase):
|
||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||
user_create.assert_called_once()
|
||||
|
||||
def test_user_create_dry_run(self):
|
||||
"""Test user creation (dry run)"""
|
||||
self.provider.dry_run = True
|
||||
self.provider.save()
|
||||
uid = generate_id()
|
||||
with (
|
||||
patch(
|
||||
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||
MagicMock(return_value={"credentials": self.creds}),
|
||||
),
|
||||
patch(
|
||||
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||
AsyncMock(
|
||||
return_value=OrganizationCollectionResponse(
|
||||
value=[
|
||||
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||
]
|
||||
)
|
||||
),
|
||||
),
|
||||
):
|
||||
user = User.objects.create(
|
||||
username=uid,
|
||||
name=f"{uid} {uid}",
|
||||
email=f"{uid}@goauthentik.io",
|
||||
)
|
||||
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||
provider=self.provider, user=user
|
||||
).first()
|
||||
self.assertIsNone(microsoft_user)
|
||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||
|
||||
def test_user_not_created(self):
|
||||
"""Test without property mappings, no group is created"""
|
||||
self.provider.property_mappings.clear()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user