Merge branch 'main' into web/legibility/dual-select-events
This commit is contained in:
@ -1,16 +1,16 @@
|
||||
[bumpversion]
|
||||
current_version = 2024.12.2
|
||||
current_version = 2025.2.0
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||
serialize =
|
||||
serialize =
|
||||
{major}.{minor}.{patch}-{rc_t}{rc_n}
|
||||
{major}.{minor}.{patch}
|
||||
message = release: {new_version}
|
||||
tag_name = version/{new_version}
|
||||
|
||||
[bumpversion:part:rc_t]
|
||||
values =
|
||||
values =
|
||||
rc
|
||||
final
|
||||
optional_value = final
|
||||
@ -31,4 +31,4 @@ optional_value = final
|
||||
|
||||
[bumpversion:file:web/src/common/constants.ts]
|
||||
|
||||
[bumpversion:file:website/docs/install-config/install/aws/template.yaml]
|
||||
[bumpversion:file:lifecycle/aws/template.yaml]
|
||||
|
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -28,7 +28,11 @@ Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
<!--
|
||||
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
|
||||
-->
|
||||
|
||||
- authentik version: [e.g. 2025.2.0]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
|
7
.github/ISSUE_TEMPLATE/question.md
vendored
7
.github/ISSUE_TEMPLATE/question.md
vendored
@ -20,7 +20,12 @@ Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
<!--
|
||||
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
|
||||
-->
|
||||
|
||||
|
||||
- authentik version: [e.g. 2025.2.0]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
|
@ -35,14 +35,6 @@ runs:
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```shell
|
||||
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
|
||||
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
</details>
|
||||
<details>
|
||||
@ -60,18 +52,6 @@ runs:
|
||||
tag: ${{ inputs.tag }}
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```yaml
|
||||
authentik:
|
||||
outposts:
|
||||
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
global:
|
||||
image:
|
||||
repository: ghcr.io/goauthentik/dev-server
|
||||
tag: ${{ inputs.tag }}-arm64
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
</details>
|
||||
edit-mode: replace
|
||||
|
14
.github/actions/docker-push-variables/action.yml
vendored
14
.github/actions/docker-push-variables/action.yml
vendored
@ -9,6 +9,9 @@ inputs:
|
||||
image-arch:
|
||||
required: false
|
||||
description: "Docker image arch"
|
||||
release:
|
||||
required: true
|
||||
description: "True if this is a release build, false if this is a dev/PR build"
|
||||
|
||||
outputs:
|
||||
shouldPush:
|
||||
@ -29,15 +32,24 @@ outputs:
|
||||
imageTags:
|
||||
description: "Docker image tags"
|
||||
value: ${{ steps.ev.outputs.imageTags }}
|
||||
imageTagsJSON:
|
||||
description: "Docker image tags, as a JSON array"
|
||||
value: ${{ steps.ev.outputs.imageTagsJSON }}
|
||||
attestImageNames:
|
||||
description: "Docker image names used for attestation"
|
||||
value: ${{ steps.ev.outputs.attestImageNames }}
|
||||
cacheTo:
|
||||
description: "cache-to value for the docker build step"
|
||||
value: ${{ steps.ev.outputs.cacheTo }}
|
||||
imageMainTag:
|
||||
description: "Docker image main tag"
|
||||
value: ${{ steps.ev.outputs.imageMainTag }}
|
||||
imageMainName:
|
||||
description: "Docker image main name"
|
||||
value: ${{ steps.ev.outputs.imageMainName }}
|
||||
imageBuildArgs:
|
||||
description: "Docker image build args"
|
||||
value: ${{ steps.ev.outputs.imageBuildArgs }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
@ -48,6 +60,8 @@ runs:
|
||||
env:
|
||||
IMAGE_NAME: ${{ inputs.image-name }}
|
||||
IMAGE_ARCH: ${{ inputs.image-arch }}
|
||||
RELEASE: ${{ inputs.release }}
|
||||
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
REF: ${{ github.ref }}
|
||||
run: |
|
||||
python3 ${{ github.action_path }}/push_vars.py
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
import configparser
|
||||
import os
|
||||
from json import dumps
|
||||
from time import time
|
||||
|
||||
parser = configparser.ConfigParser()
|
||||
@ -48,7 +49,7 @@ if is_release:
|
||||
]
|
||||
else:
|
||||
suffix = ""
|
||||
if image_arch and image_arch != "amd64":
|
||||
if image_arch:
|
||||
suffix = f"-{image_arch}"
|
||||
for name in image_names:
|
||||
image_tags += [
|
||||
@ -70,12 +71,31 @@ def get_attest_image_names(image_with_tags: list[str]):
|
||||
return ",".join(set(image_tags))
|
||||
|
||||
|
||||
# Generate `cache-to` param
|
||||
cache_to = ""
|
||||
if should_push:
|
||||
_cache_tag = "buildcache"
|
||||
if image_arch:
|
||||
_cache_tag += f"-{image_arch}"
|
||||
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
|
||||
|
||||
|
||||
image_build_args = []
|
||||
if os.getenv("RELEASE", "false").lower() == "true":
|
||||
image_build_args = [f"VERSION={os.getenv('REF')}"]
|
||||
else:
|
||||
image_build_args = [f"GIT_BUILD_HASH={sha}"]
|
||||
image_build_args = "\n".join(image_build_args)
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print(f"shouldPush={str(should_push).lower()}", file=_output)
|
||||
print(f"sha={sha}", file=_output)
|
||||
print(f"version={version}", file=_output)
|
||||
print(f"prerelease={prerelease}", file=_output)
|
||||
print(f"imageTags={','.join(image_tags)}", file=_output)
|
||||
print(f"imageTagsJSON={dumps(image_tags)}", file=_output)
|
||||
print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output)
|
||||
print(f"imageMainTag={image_main_tag}", file=_output)
|
||||
print(f"imageMainName={image_tags[0]}", file=_output)
|
||||
print(f"cacheTo={cache_to}", file=_output)
|
||||
print(f"imageBuildArgs={image_build_args}", file=_output)
|
||||
|
11
.github/actions/docker-push-variables/test.sh
vendored
11
.github/actions/docker-push-variables/test.sh
vendored
@ -1,7 +1,18 @@
|
||||
#!/bin/bash -x
|
||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||
# Non-pushing PR
|
||||
GITHUB_OUTPUT=/dev/stdout \
|
||||
GITHUB_REF=ref \
|
||||
GITHUB_SHA=sha \
|
||||
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
|
||||
GITHUB_REPOSITORY=goauthentik/authentik \
|
||||
python $SCRIPT_DIR/push_vars.py
|
||||
|
||||
# Pushing PR/main
|
||||
GITHUB_OUTPUT=/dev/stdout \
|
||||
GITHUB_REF=ref \
|
||||
GITHUB_SHA=sha \
|
||||
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
|
||||
GITHUB_REPOSITORY=goauthentik/authentik \
|
||||
DOCKER_USERNAME=foo \
|
||||
python $SCRIPT_DIR/push_vars.py
|
||||
|
2
.github/actions/setup/action.yml
vendored
2
.github/actions/setup/action.yml
vendored
@ -35,7 +35,7 @@ runs:
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||
docker compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry install --sync
|
||||
poetry sync
|
||||
cd web && npm ci
|
||||
- name: Generate config
|
||||
shell: poetry run python {0}
|
||||
|
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@ -82,6 +82,16 @@ updates:
|
||||
docusaurus:
|
||||
patterns:
|
||||
- "@docusaurus/*"
|
||||
- package-ecosystem: npm
|
||||
directory: "/lifecycle/aws"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "lifecycle/aws:"
|
||||
labels:
|
||||
- dependencies
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
|
96
.github/workflows/_reusable-docker-build-single.yaml
vendored
Normal file
96
.github/workflows/_reusable-docker-build-single.yaml
vendored
Normal file
@ -0,0 +1,96 @@
|
||||
# Re-usable workflow for a single-architecture build
|
||||
name: Single-arch Container build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
image_name:
|
||||
required: true
|
||||
type: string
|
||||
image_arch:
|
||||
required: true
|
||||
type: string
|
||||
runs-on:
|
||||
required: true
|
||||
type: string
|
||||
registry_dockerhub:
|
||||
default: false
|
||||
type: boolean
|
||||
registry_ghcr:
|
||||
default: false
|
||||
type: boolean
|
||||
release:
|
||||
default: false
|
||||
type: boolean
|
||||
outputs:
|
||||
image-digest:
|
||||
value: ${{ jobs.build.outputs.image-digest }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build ${{ inputs.image_arch }}
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
outputs:
|
||||
image-digest: ${{ steps.push.outputs.digest }}
|
||||
permissions:
|
||||
# Needed to upload container images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
attestations: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-qemu-action@v3.4.0
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ${{ inputs.image_name }}
|
||||
image-arch: ${{ inputs.image_arch }}
|
||||
release: ${{ inputs.release }}
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ inputs.registry_dockerhub }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
if: ${{ inputs.registry_ghcr }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: make empty clients
|
||||
if: ${{ inputs.release }}
|
||||
run: |
|
||||
mkdir -p ./gen-ts-api
|
||||
mkdir -p ./gen-go-api
|
||||
- name: generate ts client
|
||||
if: ${{ !inputs.release }}
|
||||
run: make gen-client-ts
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
id: push
|
||||
with:
|
||||
context: .
|
||||
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
build-args: |
|
||||
${{ steps.ev.outputs.imageBuildArgs }}
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
platforms: linux/${{ inputs.image_arch }}
|
||||
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
|
||||
cache-to: ${{ steps.ev.outputs.cacheTo }}
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
104
.github/workflows/_reusable-docker-build.yaml
vendored
Normal file
104
.github/workflows/_reusable-docker-build.yaml
vendored
Normal file
@ -0,0 +1,104 @@
|
||||
# Re-usable workflow for a multi-architecture build
|
||||
name: Multi-arch container build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
image_name:
|
||||
required: true
|
||||
type: string
|
||||
registry_dockerhub:
|
||||
default: false
|
||||
type: boolean
|
||||
registry_ghcr:
|
||||
default: true
|
||||
type: boolean
|
||||
release:
|
||||
default: false
|
||||
type: boolean
|
||||
outputs: {}
|
||||
|
||||
jobs:
|
||||
build-server-amd64:
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ${{ inputs.image_name }}
|
||||
image_arch: amd64
|
||||
runs-on: ubuntu-latest
|
||||
registry_dockerhub: ${{ inputs.registry_dockerhub }}
|
||||
registry_ghcr: ${{ inputs.registry_ghcr }}
|
||||
release: ${{ inputs.release }}
|
||||
build-server-arm64:
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ${{ inputs.image_name }}
|
||||
image_arch: arm64
|
||||
runs-on: ubuntu-22.04-arm
|
||||
registry_dockerhub: ${{ inputs.registry_dockerhub }}
|
||||
registry_ghcr: ${{ inputs.registry_ghcr }}
|
||||
release: ${{ inputs.release }}
|
||||
get-tags:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build-server-amd64
|
||||
- build-server-arm64
|
||||
outputs:
|
||||
tags: ${{ steps.ev.outputs.imageTagsJSON }}
|
||||
shouldPush: ${{ steps.ev.outputs.shouldPush }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ${{ inputs.image_name }}
|
||||
merge-server:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.get-tags.outputs.shouldPush == 'true' }}
|
||||
needs:
|
||||
- get-tags
|
||||
- build-server-amd64
|
||||
- build-server-arm64
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
tag: ${{ fromJson(needs.get-tags.outputs.tags) }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ${{ inputs.image_name }}
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ inputs.registry_dockerhub }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
if: ${{ inputs.registry_ghcr }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: int128/docker-manifest-create-action@v2
|
||||
id: build
|
||||
with:
|
||||
tags: ${{ matrix.tag }}
|
||||
sources: |
|
||||
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
|
||||
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
subject-digest: ${{ steps.build.outputs.digest }}
|
||||
push-to-registry: true
|
6
.github/workflows/ci-aws-cfn.yml
vendored
6
.github/workflows/ci-aws-cfn.yml
vendored
@ -25,10 +25,10 @@ jobs:
|
||||
uses: ./.github/actions/setup
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
node-version-file: lifecycle/aws/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
cache-dependency-path: lifecycle/aws/package-lock.json
|
||||
- working-directory: lifecycle/aws/
|
||||
run: |
|
||||
npm ci
|
||||
- name: Check changes have been applied
|
||||
|
28
.github/workflows/ci-main-daily.yml
vendored
Normal file
28
.github/workflows/ci-main-daily.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
---
|
||||
name: authentik-ci-main-daily
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Every night at 3am
|
||||
- cron: "0 3 * * *"
|
||||
|
||||
jobs:
|
||||
test-container:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- docs
|
||||
- version-2024-12
|
||||
- version-2024-10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
current="$(pwd)"
|
||||
dir="/tmp/authentik/${{ matrix.version }}"
|
||||
mkdir -p $dir
|
||||
cd $dir
|
||||
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
|
||||
${current}/scripts/test_docker.sh
|
97
.github/workflows/ci-main.yml
vendored
97
.github/workflows/ci-main.yml
vendored
@ -43,15 +43,26 @@ jobs:
|
||||
uses: ./.github/actions/setup
|
||||
- name: run migrations
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-migrations-from-stable:
|
||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
|
||||
test-make-seed:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: seed
|
||||
run: |
|
||||
echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
seed: ${{ steps.seed.outputs.seed }}
|
||||
test-migrations-from-stable:
|
||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
needs: test-make-seed
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@ -93,18 +104,23 @@ jobs:
|
||||
env:
|
||||
# Test in the main database that we just migrated from the previous stable version
|
||||
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
|
||||
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||
CI_RUN_ID: ${{ matrix.run_id }}
|
||||
CI_TOTAL_RUNS: "5"
|
||||
run: |
|
||||
poetry run make test
|
||||
poetry run make ci-test
|
||||
test-unittest:
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 20
|
||||
needs: test-make-seed
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup authentik env
|
||||
@ -112,9 +128,12 @@ jobs:
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: run unittest
|
||||
env:
|
||||
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||
CI_RUN_ID: ${{ matrix.run_id }}
|
||||
CI_TOTAL_RUNS: "5"
|
||||
run: |
|
||||
poetry run make test
|
||||
poetry run coverage xml
|
||||
poetry run make ci-test
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
@ -223,68 +242,18 @@ jobs:
|
||||
with:
|
||||
jobs: ${{ toJSON(needs) }}
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
# Needed to upload container images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
attestations: write
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.3.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/dev-server
|
||||
image-arch: ${{ matrix.arch }}
|
||||
- name: Login to Container Registry
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: generate ts client
|
||||
run: make gen-client-ts
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
id: push
|
||||
with:
|
||||
context: .
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache
|
||||
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max' || '' }}
|
||||
platforms: linux/${{ matrix.arch }}
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
needs: ci-core-mark
|
||||
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ghcr.io/goauthentik/dev-server
|
||||
release: false
|
||||
pr-comment:
|
||||
needs:
|
||||
- build
|
||||
|
4
.github/workflows/ci-outpost.yml
vendored
4
.github/workflows/ci-outpost.yml
vendored
@ -72,7 +72,7 @@ jobs:
|
||||
- rac
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
# Needed to upload container images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
@ -82,7 +82,7 @@ jobs:
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.3.0
|
||||
uses: docker/setup-qemu-action@v3.4.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
|
65
.github/workflows/release-publish.yml
vendored
65
.github/workflows/release-publish.yml
vendored
@ -7,64 +7,23 @@ on:
|
||||
|
||||
jobs:
|
||||
build-server:
|
||||
runs-on: ubuntu-latest
|
||||
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
# Needed to upload container images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
attestations: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.3.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/server,beryju/authentik
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: make empty clients
|
||||
run: |
|
||||
mkdir -p ./gen-ts-api
|
||||
mkdir -p ./gen-go-api
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
id: push
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
build-args: |
|
||||
VERSION=${{ github.ref }}
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
with:
|
||||
image_name: ghcr.io/goauthentik/server,beryju/authentik
|
||||
release: true
|
||||
registry_dockerhub: true
|
||||
registry_ghcr: true
|
||||
build-outpost:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
# Needed to upload container images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
@ -83,7 +42,7 @@ jobs:
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.3.0
|
||||
uses: docker/setup-qemu-action@v3.4.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
@ -188,8 +147,8 @@ jobs:
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
- name: Upload template
|
||||
run: |
|
||||
aws s3 cp --acl=public-read website/docs/install-config/install/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
|
||||
aws s3 cp --acl=public-read website/docs/install-config/install/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
|
||||
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
|
||||
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
|
||||
test-release:
|
||||
needs:
|
||||
- build-server
|
||||
|
11
.github/workflows/release-tag.yml
vendored
11
.github/workflows/release-tag.yml
vendored
@ -14,16 +14,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Pre-release test
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
docker buildx install
|
||||
mkdir -p ./gen-ts-api
|
||||
docker build -t testing:latest .
|
||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
||||
echo "AUTHENTIK_TAG=latest" >> .env
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql redis
|
||||
docker compose run -u root server test-all
|
||||
make test-docker
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
|
6
.github/workflows/repo-stale.yml
vendored
6
.github/workflows/repo-stale.yml
vendored
@ -1,8 +1,8 @@
|
||||
name: 'authentik-repo-stale'
|
||||
name: "authentik-repo-stale"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '30 1 * * *'
|
||||
- cron: "30 1 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
@ -25,7 +25,7 @@ jobs:
|
||||
days-before-stale: 60
|
||||
days-before-close: 7
|
||||
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
|
||||
stale-issue-label: wontfix
|
||||
stale-issue-label: status/stale
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
|
@ -1,9 +1,13 @@
|
||||
---
|
||||
name: authentik-backend-translate-extract-compile
|
||||
name: authentik-translate-extract-compile
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
@ -32,6 +36,7 @@ jobs:
|
||||
poetry run ak compilemessages
|
||||
make web-check-compile
|
||||
- name: Create Pull Request
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -209,3 +209,6 @@ source_docs/
|
||||
|
||||
### Golang ###
|
||||
/vendor/
|
||||
|
||||
### Docker ###
|
||||
docker-compose.override.yml
|
||||
|
7
.vscode/extensions.json
vendored
7
.vscode/extensions.json
vendored
@ -2,6 +2,7 @@
|
||||
"recommendations": [
|
||||
"bashmish.es6-string-css",
|
||||
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
||||
"charliermarsh.ruff",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"esbenp.prettier-vscode",
|
||||
@ -10,12 +11,12 @@
|
||||
"Gruntfuggly.todo-tree",
|
||||
"mechatroner.rainbow-csv",
|
||||
"ms-python.black-formatter",
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.debugpy",
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"ms-python.black-formatter",
|
||||
"redhat.vscode-yaml",
|
||||
"Tobermory.es6-string-html",
|
||||
"unifiedjs.vscode-mdx"
|
||||
"unifiedjs.vscode-mdx",
|
||||
]
|
||||
}
|
||||
|
66
.vscode/launch.json
vendored
66
.vscode/launch.json
vendored
@ -2,26 +2,76 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: PDB attach Server",
|
||||
"type": "python",
|
||||
"name": "Debug: Attach Server Core",
|
||||
"type": "debugpy",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 6800
|
||||
"port": 9901
|
||||
},
|
||||
"justMyCode": true,
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "."
|
||||
}
|
||||
],
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Python: PDB attach Worker",
|
||||
"type": "python",
|
||||
"name": "Debug: Attach Worker",
|
||||
"type": "debugpy",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 6900
|
||||
"port": 9901
|
||||
},
|
||||
"justMyCode": true,
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "."
|
||||
}
|
||||
],
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Server Router",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/server",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start LDAP Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/ldap",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Proxy Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/proxy",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start RAC Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/rac",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Radius Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/radius",
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -15,6 +15,7 @@ go.mod @goauthentik/backend
|
||||
go.sum @goauthentik/backend
|
||||
# Infrastructure
|
||||
.github/ @goauthentik/infrastructure
|
||||
lifecycle/aws/ @goauthentik/infrastructure
|
||||
Dockerfile @goauthentik/infrastructure
|
||||
*Dockerfile @goauthentik/infrastructure
|
||||
.dockerignore @goauthentik/infrastructure
|
||||
|
34
Dockerfile
34
Dockerfile
@ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 5: Python dependencies
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS python-deps
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
@ -116,15 +116,30 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
||||
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
||||
--mount=type=cache,target=/root/.cache/pip \
|
||||
--mount=type=cache,target=/root/.cache/pypoetry \
|
||||
pip install --no-cache cffi && \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
build-essential libffi-dev \
|
||||
# Required for cryptography
|
||||
curl pkg-config \
|
||||
# Required for lxml
|
||||
libxslt-dev zlib1g-dev \
|
||||
# Required for xmlsec
|
||||
libltdl-dev \
|
||||
# Required for kadmin
|
||||
sccache clang && \
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y && \
|
||||
. "$HOME/.cargo/env" && \
|
||||
python -m venv /ak-root/venv/ && \
|
||||
bash -c "source ${VENV_PATH}/bin/activate && \
|
||||
pip3 install --upgrade pip && \
|
||||
pip3 install poetry && \
|
||||
pip3 install --upgrade pip poetry && \
|
||||
poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \
|
||||
poetry install --only=main --no-ansi --no-interaction --no-root && \
|
||||
pip install --force-reinstall /wheels/*"
|
||||
pip uninstall cryptography -y && \
|
||||
poetry install --only=main --no-ansi --no-interaction --no-root"
|
||||
|
||||
# Stage 6: Run
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS final-image
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image
|
||||
|
||||
ARG VERSION
|
||||
ARG GIT_BUILD_HASH
|
||||
@ -140,10 +155,12 @@ WORKDIR /
|
||||
|
||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||
RUN apt-get update && \
|
||||
apt-get upgrade -y && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 && \
|
||||
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \
|
||||
# Required for bootstrap & healtcheck
|
||||
apt-get install -y --no-install-recommends runit && \
|
||||
pip3 install --no-cache-dir --upgrade pip && \
|
||||
apt-get clean && \
|
||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||
@ -176,9 +193,8 @@ ENV TMPDIR=/dev/shm/ \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
|
||||
VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false
|
||||
|
||||
ENV GOFIPS=1
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
GOFIPS=1
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
||||
|
||||
|
38
Makefile
38
Makefile
@ -5,7 +5,9 @@ PWD = $(shell pwd)
|
||||
UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
PY_SOURCES = authentik tests scripts lifecycle .github website/docs/install-config/install/aws
|
||||
PY_SOURCES = authentik tests scripts lifecycle .github
|
||||
GO_SOURCES = cmd internal
|
||||
WEB_SOURCES = web/src web/packages
|
||||
DOCKER_IMAGE ?= "authentik:test"
|
||||
|
||||
GEN_API_TS = "gen-ts-api"
|
||||
@ -20,10 +22,11 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
-S 'web/src/locales/**' \
|
||||
-S 'website/docs/developer-docs/api/reference/**' \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
-S '**/node_modules/**' \
|
||||
-S '**/dist/**' \
|
||||
$(PY_SOURCES) \
|
||||
$(GO_SOURCES) \
|
||||
$(WEB_SOURCES) \
|
||||
website/src \
|
||||
website/blog \
|
||||
website/docs \
|
||||
@ -45,15 +48,6 @@ help: ## Show this help
|
||||
go-test:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
test-docker: ## Run all tests in a docker-compose
|
||||
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
||||
docker compose pull -q
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql redis
|
||||
docker compose run -u root server test-all
|
||||
rm -f .env
|
||||
|
||||
test: ## Run the server tests and produce a coverage report (locally)
|
||||
coverage run manage.py test --keepdb authentik
|
||||
coverage html
|
||||
@ -78,6 +72,9 @@ migrate: ## Run the Authentik Django server's migrations
|
||||
|
||||
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||
|
||||
aws-cfn:
|
||||
cd lifecycle/aws && npm run aws-cfn
|
||||
|
||||
core-i18n-extract:
|
||||
ak makemessages \
|
||||
--add-location file \
|
||||
@ -149,7 +146,7 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/${GEN_API_TS} \
|
||||
@ -252,9 +249,6 @@ website-build:
|
||||
website-watch: ## Build and watch the documentation website, updating automatically
|
||||
cd website && npm run watch
|
||||
|
||||
aws-cfn:
|
||||
cd website && npm run aws-cfn
|
||||
|
||||
#########################
|
||||
## Docker
|
||||
#########################
|
||||
@ -263,6 +257,9 @@ docker: ## Build a docker image of the current source tree
|
||||
mkdir -p ${GEN_API_TS}
|
||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||
|
||||
test-docker:
|
||||
BUILD=true ./scripts/test_docker.sh
|
||||
|
||||
#########################
|
||||
## CI
|
||||
#########################
|
||||
@ -287,3 +284,8 @@ ci-bandit: ci--meta-debug
|
||||
|
||||
ci-pending-migrations: ci--meta-debug
|
||||
ak makemigrations --check
|
||||
|
||||
ci-test: ci--meta-debug
|
||||
coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
|
||||
coverage report
|
||||
coverage xml
|
||||
|
@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
||||
|
||||
| Version | Supported |
|
||||
| --------- | --------- |
|
||||
| 2024.10.x | ✅ |
|
||||
| 2024.12.x | ✅ |
|
||||
| 2025.2.x | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from os import environ
|
||||
|
||||
__version__ = "2024.12.2"
|
||||
__version__ = "2025.2.0"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
@ -50,7 +50,7 @@ from authentik.enterprise.providers.microsoft_entra.models import (
|
||||
MicrosoftEntraProviderGroup,
|
||||
MicrosoftEntraProviderUser,
|
||||
)
|
||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||
from authentik.enterprise.providers.ssf.models import StreamEvent
|
||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
|
||||
EndpointDevice,
|
||||
EndpointDeviceConnection,
|
||||
@ -71,6 +71,7 @@ from authentik.providers.oauth2.models import (
|
||||
DeviceToken,
|
||||
RefreshToken,
|
||||
)
|
||||
from authentik.providers.rac.models import ConnectionToken
|
||||
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||
from authentik.rbac.models import Role
|
||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||
@ -131,6 +132,7 @@ def excluded_models() -> list[type[Model]]:
|
||||
EndpointDevice,
|
||||
EndpointDeviceConnection,
|
||||
DeviceToken,
|
||||
StreamEvent,
|
||||
)
|
||||
|
||||
|
||||
|
@ -1,15 +1,16 @@
|
||||
"""Application Roles API Viewset"""
|
||||
|
||||
from django.http import HttpRequest
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.core.models import (
|
||||
Application,
|
||||
ApplicationEntitlement,
|
||||
User,
|
||||
)
|
||||
|
||||
|
||||
@ -18,7 +19,10 @@ class ApplicationEntitlementSerializer(ModelSerializer):
|
||||
|
||||
def validate_app(self, app: Application) -> Application:
|
||||
"""Ensure user has permission to view"""
|
||||
user: User = self._context["request"].user
|
||||
request: HttpRequest = self.context.get("request")
|
||||
if not request and SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
||||
return app
|
||||
user = request.user
|
||||
if user.has_perm("view_application", app) or user.has_perm(
|
||||
"authentik_core.view_application"
|
||||
):
|
||||
|
@ -3,6 +3,7 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.fields import (
|
||||
BooleanField,
|
||||
CharField,
|
||||
@ -16,7 +17,6 @@ from rest_framework.viewsets import ViewSet
|
||||
|
||||
from authentik.core.api.utils import MetaNameSerializer
|
||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
|
||||
from authentik.rbac.decorators import permission_required
|
||||
from authentik.stages.authenticator import device_classes, devices_for_user
|
||||
from authentik.stages.authenticator.models import Device
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||
@ -73,7 +73,9 @@ class AdminDeviceViewSet(ViewSet):
|
||||
def get_devices(self, **kwargs):
|
||||
"""Get all devices in all child classes"""
|
||||
for model in device_classes():
|
||||
device_set = model.objects.filter(**kwargs)
|
||||
device_set = get_objects_for_user(
|
||||
self.request.user, f"{model._meta.app_label}.view_{model._meta.model_name}", model
|
||||
).filter(**kwargs)
|
||||
yield from device_set
|
||||
|
||||
@extend_schema(
|
||||
@ -86,10 +88,6 @@ class AdminDeviceViewSet(ViewSet):
|
||||
],
|
||||
responses={200: DeviceSerializer(many=True)},
|
||||
)
|
||||
@permission_required(
|
||||
None,
|
||||
[f"{model._meta.app_label}.view_{model._meta.model_name}" for model in device_classes()],
|
||||
)
|
||||
def list(self, request: Request) -> Response:
|
||||
"""Get all devices for current user"""
|
||||
kwargs = {}
|
||||
|
@ -4,6 +4,7 @@ from json import loads
|
||||
|
||||
from django.db.models import Prefetch
|
||||
from django.http import Http404
|
||||
from django.utils.translation import gettext as _
|
||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.utils import (
|
||||
@ -81,9 +82,37 @@ class GroupSerializer(ModelSerializer):
|
||||
if not self.instance or not parent:
|
||||
return parent
|
||||
if str(parent.group_uuid) == str(self.instance.group_uuid):
|
||||
raise ValidationError("Cannot set group as parent of itself.")
|
||||
raise ValidationError(_("Cannot set group as parent of itself."))
|
||||
return parent
|
||||
|
||||
def validate_is_superuser(self, superuser: bool):
|
||||
"""Ensure that the user creating this group has permissions to set the superuser flag"""
|
||||
request: Request = self.context.get("request", None)
|
||||
if not request:
|
||||
return superuser
|
||||
# If we're updating an instance, and the state hasn't changed, we don't need to check perms
|
||||
if self.instance and superuser == self.instance.is_superuser:
|
||||
return superuser
|
||||
user: User = request.user
|
||||
perm = (
|
||||
"authentik_core.enable_group_superuser"
|
||||
if superuser
|
||||
else "authentik_core.disable_group_superuser"
|
||||
)
|
||||
has_perm = user.has_perm(perm)
|
||||
if self.instance and not has_perm:
|
||||
has_perm = user.has_perm(perm, self.instance)
|
||||
if not has_perm:
|
||||
raise ValidationError(
|
||||
_(
|
||||
(
|
||||
"User does not have permission to set "
|
||||
"superuser status to {superuser_status}."
|
||||
).format_map({"superuser_status": superuser})
|
||||
)
|
||||
)
|
||||
return superuser
|
||||
|
||||
class Meta:
|
||||
model = Group
|
||||
fields = [
|
||||
|
@ -85,7 +85,7 @@ class SourceViewSet(
|
||||
serializer_class = SourceSerializer
|
||||
lookup_field = "slug"
|
||||
search_fields = ["slug", "name"]
|
||||
filterset_fields = ["slug", "name", "managed"]
|
||||
filterset_fields = ["slug", "name", "managed", "pbm_uuid"]
|
||||
|
||||
def get_queryset(self): # pragma: no cover
|
||||
return Source.objects.select_subclasses()
|
||||
|
@ -236,9 +236,11 @@ class UserSerializer(ModelSerializer):
|
||||
"path",
|
||||
"type",
|
||||
"uuid",
|
||||
"password_change_date",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"name": {"allow_blank": True},
|
||||
"password_change_date": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
@ -427,7 +429,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
queryset = User.objects.none()
|
||||
ordering = ["username"]
|
||||
serializer_class = UserSerializer
|
||||
search_fields = ["username", "name", "is_active", "email", "uuid"]
|
||||
search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"]
|
||||
filterset_class = UsersFilter
|
||||
|
||||
def get_queryset(self):
|
||||
@ -585,7 +587,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Set password for user"""
|
||||
user: User = self.get_object()
|
||||
try:
|
||||
user.set_password(request.data.get("password"))
|
||||
user.set_password(request.data.get("password"), request=request)
|
||||
user.save()
|
||||
except (ValidationError, IntegrityError) as exc:
|
||||
LOGGER.debug("Failed to set password", exc=exc)
|
||||
|
@ -44,13 +44,12 @@ class TokenBackend(InbuiltBackend):
|
||||
self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any
|
||||
) -> User | None:
|
||||
try:
|
||||
|
||||
user = User._default_manager.get_by_natural_key(username)
|
||||
|
||||
except User.DoesNotExist:
|
||||
# Run the default password hasher once to reduce the timing
|
||||
# difference between an existing and a nonexistent user (#20760).
|
||||
User().set_password(password)
|
||||
User().set_password(password, request=request)
|
||||
return None
|
||||
|
||||
tokens = Token.filter_not_expired(
|
||||
|
@ -5,6 +5,7 @@ from typing import TextIO
|
||||
from daphne.management.commands.runserver import Command as RunServer
|
||||
from daphne.server import Server
|
||||
|
||||
from authentik.lib.debug import start_debug_server
|
||||
from authentik.root.signals import post_startup, pre_startup, startup
|
||||
|
||||
|
||||
@ -13,6 +14,7 @@ class SignalServer(Server):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
start_debug_server()
|
||||
|
||||
def ready_callable():
|
||||
pre_startup.send(sender=self)
|
||||
|
@ -9,6 +9,7 @@ from django.db import close_old_connections
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.debug import start_debug_server
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -28,10 +29,7 @@ class Command(BaseCommand):
|
||||
def handle(self, **options):
|
||||
LOGGER.debug("Celery options", **options)
|
||||
close_old_connections()
|
||||
if CONFIG.get_bool("remote_debug"):
|
||||
import debugpy
|
||||
|
||||
debugpy.listen(("0.0.0.0", 6900)) # nosec
|
||||
start_debug_server()
|
||||
worker: Worker = CELERY_APP.Worker(
|
||||
no_color=False,
|
||||
quiet=True,
|
||||
|
@ -0,0 +1,45 @@
|
||||
# Generated by Django 5.0.10 on 2025-01-13 18:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0041_applicationentitlement"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="authenticatedsession",
|
||||
index=models.Index(fields=["expires"], name="authentik_c_expires_08251d_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="authenticatedsession",
|
||||
index=models.Index(fields=["expiring"], name="authentik_c_expirin_9cd839_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="authenticatedsession",
|
||||
index=models.Index(
|
||||
fields=["expiring", "expires"], name="authentik_c_expirin_195a84_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="authenticatedsession",
|
||||
index=models.Index(fields=["session_key"], name="authentik_c_session_d0f005_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="token",
|
||||
index=models.Index(fields=["expires"], name="authentik_c_expires_a62b4b_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="token",
|
||||
index=models.Index(fields=["expiring"], name="authentik_c_expirin_a1b838_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="token",
|
||||
index=models.Index(
|
||||
fields=["expiring", "expires"], name="authentik_c_expirin_ba04d9_idx"
|
||||
),
|
||||
),
|
||||
]
|
26
authentik/core/migrations/0043_alter_group_options.py
Normal file
26
authentik/core/migrations/0043_alter_group_options.py
Normal file
@ -0,0 +1,26 @@
|
||||
# Generated by Django 5.0.11 on 2025-01-30 23:55
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="group",
|
||||
options={
|
||||
"permissions": [
|
||||
("add_user_to_group", "Add user to group"),
|
||||
("remove_user_from_group", "Remove user from group"),
|
||||
("enable_group_superuser", "Enable superuser status"),
|
||||
("disable_group_superuser", "Disable superuser status"),
|
||||
],
|
||||
"verbose_name": "Group",
|
||||
"verbose_name_plural": "Groups",
|
||||
},
|
||||
),
|
||||
]
|
@ -204,6 +204,8 @@ class Group(SerializerModel, AttributesMixin):
|
||||
permissions = [
|
||||
("add_user_to_group", _("Add user to group")),
|
||||
("remove_user_from_group", _("Remove user from group")),
|
||||
("enable_group_superuser", _("Enable superuser status")),
|
||||
("disable_group_superuser", _("Disable superuser status")),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
@ -356,13 +358,13 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser):
|
||||
"""superuser == staff user"""
|
||||
return self.is_superuser # type: ignore
|
||||
|
||||
def set_password(self, raw_password, signal=True, sender=None):
|
||||
def set_password(self, raw_password, signal=True, sender=None, request=None):
|
||||
if self.pk and signal:
|
||||
from authentik.core.signals import password_changed
|
||||
|
||||
if not sender:
|
||||
sender = self
|
||||
password_changed.send(sender=sender, user=self, password=raw_password)
|
||||
password_changed.send(sender=sender, user=self, password=raw_password, request=request)
|
||||
self.password_change_date = now()
|
||||
return super().set_password(raw_password)
|
||||
|
||||
@ -599,6 +601,14 @@ class Application(SerializerModel, PolicyBindingModel):
|
||||
return None
|
||||
return candidates[-1]
|
||||
|
||||
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
|
||||
"""Get Backchannel provider for a specific type"""
|
||||
providers = self.backchannel_providers.filter(
|
||||
**{f"{provider_type._meta.model_name}__isnull": False},
|
||||
**kwargs,
|
||||
)
|
||||
return getattr(providers.first(), provider_type._meta.model_name)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.name)
|
||||
|
||||
@ -846,6 +856,11 @@ class ExpiringModel(models.Model):
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
indexes = [
|
||||
models.Index(fields=["expires"]),
|
||||
models.Index(fields=["expiring"]),
|
||||
models.Index(fields=["expiring", "expires"]),
|
||||
]
|
||||
|
||||
def expire_action(self, *args, **kwargs):
|
||||
"""Handler which is called when this object is expired. By
|
||||
@ -901,7 +916,7 @@ class Token(SerializerModel, ManagedModel, ExpiringModel):
|
||||
class Meta:
|
||||
verbose_name = _("Token")
|
||||
verbose_name_plural = _("Tokens")
|
||||
indexes = [
|
||||
indexes = ExpiringModel.Meta.indexes + [
|
||||
models.Index(fields=["identifier"]),
|
||||
models.Index(fields=["key"]),
|
||||
]
|
||||
@ -1001,6 +1016,9 @@ class AuthenticatedSession(ExpiringModel):
|
||||
class Meta:
|
||||
verbose_name = _("Authenticated Session")
|
||||
verbose_name_plural = _("Authenticated Sessions")
|
||||
indexes = ExpiringModel.Meta.indexes + [
|
||||
models.Index(fields=["session_key"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Authenticated Session {self.session_key[:10]}"
|
||||
|
@ -35,8 +35,7 @@ from authentik.flows.planner import (
|
||||
FlowPlanner,
|
||||
)
|
||||
from authentik.flows.stage import StageView
|
||||
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN
|
||||
from authentik.lib.utils.urls import redirect_with_qs
|
||||
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET
|
||||
from authentik.lib.views import bad_request_message
|
||||
from authentik.policies.denied import AccessDeniedResponse
|
||||
from authentik.policies.utils import delete_none_values
|
||||
@ -47,8 +46,9 @@ from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec
|
||||
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
|
||||
SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages"
|
||||
SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec
|
||||
|
||||
|
||||
class MessageStage(StageView):
|
||||
@ -219,28 +219,28 @@ class SourceFlowManager:
|
||||
}
|
||||
)
|
||||
flow_context.update(self.policy_context)
|
||||
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
||||
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
|
||||
self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug)
|
||||
plan = token.plan
|
||||
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
|
||||
plan.context.update(flow_context)
|
||||
for stage in self.get_stages_to_append(flow):
|
||||
plan.append_stage(stage)
|
||||
if stages:
|
||||
for stage in stages:
|
||||
plan.append_stage(stage)
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
flow_slug = token.flow.slug
|
||||
token.delete()
|
||||
return redirect_with_qs(
|
||||
"authentik_core:if-flow",
|
||||
self.request.GET,
|
||||
flow_slug=flow_slug,
|
||||
)
|
||||
flow_context.setdefault(PLAN_CONTEXT_REDIRECT, final_redirect)
|
||||
|
||||
if not flow:
|
||||
# We only check for the flow token here if we don't have a flow, otherwise we rely on
|
||||
# SESSION_KEY_SOURCE_FLOW_STAGES to delegate the usage of this token and dynamically add
|
||||
# stages that deal with this token to return to another flow
|
||||
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
||||
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
|
||||
self._logger.info(
|
||||
"Replacing source flow with overridden flow", flow=token.flow.slug
|
||||
)
|
||||
plan = token.plan
|
||||
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
|
||||
plan.context.update(flow_context)
|
||||
for stage in self.get_stages_to_append(flow):
|
||||
plan.append_stage(stage)
|
||||
if stages:
|
||||
for stage in stages:
|
||||
plan.append_stage(stage)
|
||||
redirect = plan.to_redirect(self.request, token.flow)
|
||||
token.delete()
|
||||
return redirect
|
||||
return bad_request_message(
|
||||
self.request,
|
||||
_("Configured flow does not exist."),
|
||||
@ -259,6 +259,8 @@ class SourceFlowManager:
|
||||
if stages:
|
||||
for stage in stages:
|
||||
plan.append_stage(stage)
|
||||
for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []):
|
||||
plan.append_stage(stage)
|
||||
return plan.to_redirect(self.request, flow)
|
||||
|
||||
def handle_auth(
|
||||
@ -295,6 +297,8 @@ class SourceFlowManager:
|
||||
# When request isn't authenticated we jump straight to auth
|
||||
if not self.request.user.is_authenticated:
|
||||
return self.handle_auth(connection)
|
||||
# When an override flow token exists we actually still use a flow for link
|
||||
# to continue the existing flow we came from
|
||||
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
||||
return self._prepare_flow(None, connection)
|
||||
connection.save()
|
||||
|
@ -67,6 +67,8 @@ def clean_expired_models(self: SystemTask):
|
||||
raise ImproperlyConfigured(
|
||||
"Invalid session_storage setting, allowed values are db and cache"
|
||||
)
|
||||
if CONFIG.get("session_storage", "cache") == "db":
|
||||
DBSessionStore.clear_expired()
|
||||
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
|
||||
|
||||
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")
|
||||
|
@ -8,6 +8,8 @@
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
|
||||
{# Darkreader breaks the site regardless of theme as its not compatible with webcomponents, and we default to a dark theme based on preferred colour-scheme #}
|
||||
<meta name="darkreader-lock">
|
||||
<title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title>
|
||||
<link rel="icon" href="{{ brand.branding_favicon_url }}">
|
||||
<link rel="shortcut icon" href="{{ brand.branding_favicon_url }}">
|
||||
|
@ -4,7 +4,7 @@ from django.urls.base import reverse
|
||||
from guardian.shortcuts import assign_perm
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.core.models import Group
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_user
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
@ -14,7 +14,7 @@ class TestGroupsAPI(APITestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.login_user = create_test_user()
|
||||
self.user = User.objects.create(username="test-user")
|
||||
self.user = create_test_user()
|
||||
|
||||
def test_list_with_users(self):
|
||||
"""Test listing with users"""
|
||||
@ -109,3 +109,57 @@ class TestGroupsAPI(APITestCase):
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
|
||||
def test_superuser_no_perm(self):
|
||||
"""Test creating a superuser group without permission"""
|
||||
assign_perm("authentik_core.add_group", self.login_user)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:group-list"),
|
||||
data={"name": generate_id(), "is_superuser": True},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content,
|
||||
{"is_superuser": ["User does not have permission to set superuser status to True."]},
|
||||
)
|
||||
|
||||
def test_superuser_update_no_perm(self):
|
||||
"""Test updating a superuser group without permission"""
|
||||
group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||
assign_perm("view_group", self.login_user, group)
|
||||
assign_perm("change_group", self.login_user, group)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.patch(
|
||||
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
|
||||
data={"is_superuser": False},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content,
|
||||
{"is_superuser": ["User does not have permission to set superuser status to False."]},
|
||||
)
|
||||
|
||||
def test_superuser_update_no_change(self):
|
||||
"""Test updating a superuser group without permission
|
||||
and without changing the superuser status"""
|
||||
group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||
assign_perm("view_group", self.login_user, group)
|
||||
assign_perm("change_group", self.login_user, group)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.patch(
|
||||
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
|
||||
data={"name": generate_id(), "is_superuser": True},
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
|
||||
def test_superuser_create(self):
|
||||
"""Test creating a superuser group with permission"""
|
||||
assign_perm("authentik_core.add_group", self.login_user)
|
||||
assign_perm("authentik_core.enable_group_superuser", self.login_user)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:group-list"),
|
||||
data={"name": generate_id(), "is_superuser": True},
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
|
@ -97,6 +97,8 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
||||
thread_kwargs: dict | None = None,
|
||||
**_,
|
||||
):
|
||||
if not self.enabled:
|
||||
return super().post_save_handler(request, sender, instance, created, thread_kwargs, **_)
|
||||
if not should_log_model(instance):
|
||||
return None
|
||||
thread_kwargs = {}
|
||||
@ -122,6 +124,8 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
||||
):
|
||||
thread_kwargs = {}
|
||||
m2m_field = None
|
||||
if not self.enabled:
|
||||
return super().m2m_changed_handler(request, sender, instance, action, thread_kwargs)
|
||||
# For the audit log we don't care about `pre_` or `post_` so we trim that part off
|
||||
_, _, action_direction = action.partition("_")
|
||||
# resolve the "through" model to an actual field
|
||||
|
@ -0,0 +1,27 @@
|
||||
# Generated by Django 5.0.10 on 2025-01-13 18:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_enterprise", "0003_remove_licenseusage_within_limits_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="licenseusage",
|
||||
index=models.Index(fields=["expires"], name="authentik_e_expires_3f2956_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="licenseusage",
|
||||
index=models.Index(fields=["expiring"], name="authentik_e_expirin_11d3d7_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="licenseusage",
|
||||
index=models.Index(
|
||||
fields=["expiring", "expires"], name="authentik_e_expirin_4d558f_idx"
|
||||
),
|
||||
),
|
||||
]
|
@ -93,3 +93,4 @@ class LicenseUsage(ExpiringModel):
|
||||
class Meta:
|
||||
verbose_name = _("License Usage")
|
||||
verbose_name_plural = _("License Usage Records")
|
||||
indexes = ExpiringModel.Meta.indexes
|
||||
|
@ -1,14 +0,0 @@
|
||||
"""RAC app config"""
|
||||
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
|
||||
|
||||
class AuthentikEnterpriseProviderRAC(EnterpriseConfig):
|
||||
"""authentik enterprise rac app config"""
|
||||
|
||||
name = "authentik.enterprise.providers.rac"
|
||||
label = "authentik_providers_rac"
|
||||
verbose_name = "authentik Enterprise.Providers.RAC"
|
||||
default = True
|
||||
mountpoint = ""
|
||||
ws_mountpoint = "authentik.enterprise.providers.rac.urls"
|
64
authentik/enterprise/providers/ssf/api/providers.py
Normal file
64
authentik/enterprise/providers/ssf/api/providers.py
Normal file
@ -0,0 +1,64 @@
|
||||
"""SSF Provider API Views"""
|
||||
|
||||
from django.urls import reverse
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.tokens import TokenSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||
|
||||
|
||||
class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
||||
"""SSFProvider Serializer"""
|
||||
|
||||
ssf_url = SerializerMethodField()
|
||||
token_obj = TokenSerializer(source="token", required=False, read_only=True)
|
||||
|
||||
def get_ssf_url(self, instance: SSFProvider) -> str | None:
|
||||
request: Request = self._context.get("request")
|
||||
if not request:
|
||||
return None
|
||||
if not instance.backchannel_application:
|
||||
return None
|
||||
return request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ssf:configuration",
|
||||
kwargs={
|
||||
"application_slug": instance.backchannel_application.slug,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = SSFProvider
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
"component",
|
||||
"verbose_name",
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
"signing_key",
|
||||
"token_obj",
|
||||
"oidc_auth_providers",
|
||||
"ssf_url",
|
||||
"event_retention",
|
||||
]
|
||||
extra_kwargs = {}
|
||||
|
||||
|
||||
class SSFProviderViewSet(UsedByMixin, ModelViewSet):
|
||||
"""SSFProvider Viewset"""
|
||||
|
||||
queryset = SSFProvider.objects.all()
|
||||
serializer_class = SSFProviderSerializer
|
||||
filterset_fields = {
|
||||
"application": ["isnull"],
|
||||
"name": ["iexact"],
|
||||
}
|
||||
search_fields = ["name"]
|
||||
ordering = ["name"]
|
37
authentik/enterprise/providers/ssf/api/streams.py
Normal file
37
authentik/enterprise/providers/ssf/api/streams.py
Normal file
@ -0,0 +1,37 @@
|
||||
"""SSF Stream API Views"""
|
||||
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
|
||||
from authentik.enterprise.providers.ssf.models import Stream
|
||||
|
||||
|
||||
class SSFStreamSerializer(ModelSerializer):
|
||||
"""SSFStream Serializer"""
|
||||
|
||||
provider_obj = SSFProviderSerializer(source="provider", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Stream
|
||||
fields = [
|
||||
"pk",
|
||||
"provider",
|
||||
"provider_obj",
|
||||
"delivery_method",
|
||||
"endpoint_url",
|
||||
"events_requested",
|
||||
"format",
|
||||
"aud",
|
||||
"iss",
|
||||
]
|
||||
|
||||
|
||||
class SSFStreamViewSet(ReadOnlyModelViewSet):
|
||||
"""SSFStream Viewset"""
|
||||
|
||||
queryset = Stream.objects.all()
|
||||
serializer_class = SSFStreamSerializer
|
||||
filterset_fields = ["provider", "endpoint_url", "delivery_method"]
|
||||
search_fields = ["provider__name", "endpoint_url"]
|
||||
ordering = ["provider", "uuid"]
|
13
authentik/enterprise/providers/ssf/apps.py
Normal file
13
authentik/enterprise/providers/ssf/apps.py
Normal file
@ -0,0 +1,13 @@
|
||||
"""SSF app config"""
|
||||
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
|
||||
|
||||
class AuthentikEnterpriseProviderSSF(EnterpriseConfig):
|
||||
"""authentik enterprise ssf app config"""
|
||||
|
||||
name = "authentik.enterprise.providers.ssf"
|
||||
label = "authentik_providers_ssf"
|
||||
verbose_name = "authentik Enterprise.Providers.SSF"
|
||||
default = True
|
||||
mountpoint = ""
|
201
authentik/enterprise/providers/ssf/migrations/0001_initial.py
Normal file
201
authentik/enterprise/providers/ssf/migrations/0001_initial.py
Normal file
@ -0,0 +1,201 @@
|
||||
# Generated by Django 5.0.11 on 2025-02-05 16:20
|
||||
|
||||
import authentik.lib.utils.time
|
||||
import django.contrib.postgres.fields
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
|
||||
("authentik_crypto", "0004_alter_certificatekeypair_name"),
|
||||
("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="SSFProvider",
|
||||
fields=[
|
||||
(
|
||||
"provider_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="authentik_core.provider",
|
||||
),
|
||||
),
|
||||
(
|
||||
"event_retention",
|
||||
models.TextField(
|
||||
default="days=30",
|
||||
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||
),
|
||||
),
|
||||
(
|
||||
"oidc_auth_providers",
|
||||
models.ManyToManyField(
|
||||
blank=True, default=None, to="authentik_providers_oauth2.oauth2provider"
|
||||
),
|
||||
),
|
||||
(
|
||||
"signing_key",
|
||||
models.ForeignKey(
|
||||
help_text="Key used to sign the SSF Events.",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_crypto.certificatekeypair",
|
||||
verbose_name="Signing Key",
|
||||
),
|
||||
),
|
||||
(
|
||||
"token",
|
||||
models.ForeignKey(
|
||||
default=None,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_core.token",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Shared Signals Framework Provider",
|
||||
"verbose_name_plural": "Shared Signals Framework Providers",
|
||||
"permissions": [("add_stream", "Add stream to SSF provider")],
|
||||
},
|
||||
bases=("authentik_core.provider",),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Stream",
|
||||
fields=[
|
||||
(
|
||||
"uuid",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||
),
|
||||
),
|
||||
(
|
||||
"delivery_method",
|
||||
models.TextField(
|
||||
choices=[
|
||||
(
|
||||
"https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||
"Risc Push",
|
||||
),
|
||||
(
|
||||
"https://schemas.openid.net/secevent/risc/delivery-method/poll",
|
||||
"Risc Poll",
|
||||
),
|
||||
]
|
||||
),
|
||||
),
|
||||
("endpoint_url", models.TextField(null=True)),
|
||||
(
|
||||
"events_requested",
|
||||
django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.TextField(
|
||||
choices=[
|
||||
(
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
"Caep Session Revoked",
|
||||
),
|
||||
(
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"Caep Credential Change",
|
||||
),
|
||||
(
|
||||
"https://schemas.openid.net/secevent/ssf/event-type/verification",
|
||||
"Set Verification",
|
||||
),
|
||||
]
|
||||
),
|
||||
default=list,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
("format", models.TextField()),
|
||||
(
|
||||
"aud",
|
||||
django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.TextField(), default=list, size=None
|
||||
),
|
||||
),
|
||||
("iss", models.TextField()),
|
||||
(
|
||||
"provider",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_providers_ssf.ssfprovider",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "SSF Stream",
|
||||
"verbose_name_plural": "SSF Streams",
|
||||
"default_permissions": ["change", "delete", "view"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="StreamEvent",
|
||||
fields=[
|
||||
("created", models.DateTimeField(auto_now_add=True)),
|
||||
("last_updated", models.DateTimeField(auto_now=True)),
|
||||
("expires", models.DateTimeField(default=None, null=True)),
|
||||
("expiring", models.BooleanField(default=True)),
|
||||
(
|
||||
"uuid",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.TextField(
|
||||
choices=[
|
||||
("pending_new", "Pending New"),
|
||||
("pending_failed", "Pending Failed"),
|
||||
("sent", "Sent"),
|
||||
]
|
||||
),
|
||||
),
|
||||
(
|
||||
"type",
|
||||
models.TextField(
|
||||
choices=[
|
||||
(
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
"Caep Session Revoked",
|
||||
),
|
||||
(
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"Caep Credential Change",
|
||||
),
|
||||
(
|
||||
"https://schemas.openid.net/secevent/ssf/event-type/verification",
|
||||
"Set Verification",
|
||||
),
|
||||
]
|
||||
),
|
||||
),
|
||||
("payload", models.JSONField(default=dict)),
|
||||
(
|
||||
"stream",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_providers_ssf.stream",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "SSF Stream Event",
|
||||
"verbose_name_plural": "SSF Stream Events",
|
||||
"ordering": ("-created",),
|
||||
},
|
||||
),
|
||||
]
|
178
authentik/enterprise/providers/ssf/models.py
Normal file
178
authentik/enterprise/providers/ssf/models.py
Normal file
@ -0,0 +1,178 @@
|
||||
from datetime import datetime
|
||||
from functools import cached_property
|
||||
from uuid import uuid4
|
||||
|
||||
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
||||
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
||||
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.templatetags.static import static
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from jwt import encode
|
||||
|
||||
from authentik.core.models import BackchannelProvider, ExpiringModel, Token
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.lib.models import CreatedUpdatedModel
|
||||
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||
from authentik.providers.oauth2.models import JWTAlgorithms, OAuth2Provider
|
||||
|
||||
|
||||
class EventTypes(models.TextChoices):
|
||||
"""SSF Event types supported by authentik"""
|
||||
|
||||
CAEP_SESSION_REVOKED = "https://schemas.openid.net/secevent/caep/event-type/session-revoked"
|
||||
CAEP_CREDENTIAL_CHANGE = "https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||
SET_VERIFICATION = "https://schemas.openid.net/secevent/ssf/event-type/verification"
|
||||
|
||||
|
||||
class DeliveryMethods(models.TextChoices):
|
||||
"""SSF Delivery methods"""
|
||||
|
||||
RISC_PUSH = "https://schemas.openid.net/secevent/risc/delivery-method/push"
|
||||
RISC_POLL = "https://schemas.openid.net/secevent/risc/delivery-method/poll"
|
||||
|
||||
|
||||
class SSFEventStatus(models.TextChoices):
|
||||
"""SSF Event status"""
|
||||
|
||||
PENDING_NEW = "pending_new"
|
||||
PENDING_FAILED = "pending_failed"
|
||||
SENT = "sent"
|
||||
|
||||
|
||||
class SSFProvider(BackchannelProvider):
|
||||
"""Shared Signals Framework provider to allow applications to
|
||||
receive user events from authentik."""
|
||||
|
||||
signing_key = models.ForeignKey(
|
||||
CertificateKeyPair,
|
||||
verbose_name=_("Signing Key"),
|
||||
on_delete=models.CASCADE,
|
||||
help_text=_("Key used to sign the SSF Events."),
|
||||
)
|
||||
|
||||
oidc_auth_providers = models.ManyToManyField(OAuth2Provider, blank=True, default=None)
|
||||
|
||||
token = models.ForeignKey(Token, on_delete=models.CASCADE, null=True, default=None)
|
||||
|
||||
event_retention = models.TextField(
|
||||
default="days=30",
|
||||
validators=[timedelta_string_validator],
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def jwt_key(self) -> tuple[PrivateKeyTypes, str]:
|
||||
"""Get either the configured certificate or the client secret"""
|
||||
key: CertificateKeyPair = self.signing_key
|
||||
private_key = key.private_key
|
||||
if isinstance(private_key, RSAPrivateKey):
|
||||
return private_key, JWTAlgorithms.RS256
|
||||
if isinstance(private_key, EllipticCurvePrivateKey):
|
||||
return private_key, JWTAlgorithms.ES256
|
||||
raise ValueError(f"Invalid private key type: {type(private_key)}")
|
||||
|
||||
@property
|
||||
def service_account_identifier(self) -> str:
|
||||
return f"ak-providers-ssf-{self.pk}"
|
||||
|
||||
@property
|
||||
def serializer(self):
|
||||
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
|
||||
|
||||
return SSFProviderSerializer
|
||||
|
||||
@property
|
||||
def icon_url(self) -> str | None:
|
||||
return static("authentik/sources/ssf.svg")
|
||||
|
||||
@property
|
||||
def component(self) -> str:
|
||||
return "ak-provider-ssf-form"
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Shared Signals Framework Provider")
|
||||
verbose_name_plural = _("Shared Signals Framework Providers")
|
||||
permissions = [
|
||||
# This overrides the default "add_stream" permission of the Stream object,
|
||||
# as the user requesting to add a stream must have the permission on the provider
|
||||
("add_stream", _("Add stream to SSF provider")),
|
||||
]
|
||||
|
||||
|
||||
class Stream(models.Model):
|
||||
"""SSF Stream"""
|
||||
|
||||
uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False)
|
||||
provider = models.ForeignKey(SSFProvider, on_delete=models.CASCADE)
|
||||
|
||||
delivery_method = models.TextField(choices=DeliveryMethods.choices)
|
||||
endpoint_url = models.TextField(null=True)
|
||||
|
||||
events_requested = ArrayField(models.TextField(choices=EventTypes.choices), default=list)
|
||||
format = models.TextField()
|
||||
aud = ArrayField(models.TextField(), default=list)
|
||||
|
||||
iss = models.TextField()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("SSF Stream")
|
||||
verbose_name_plural = _("SSF Streams")
|
||||
default_permissions = ["change", "delete", "view"]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "SSF Stream"
|
||||
|
||||
def prepare_event_payload(self, type: EventTypes, event_data: dict, **kwargs) -> dict:
|
||||
jti = uuid4()
|
||||
_now = now()
|
||||
return {
|
||||
"uuid": jti,
|
||||
"stream_id": str(self.pk),
|
||||
"type": type,
|
||||
"expiring": True,
|
||||
"status": SSFEventStatus.PENDING_NEW,
|
||||
"expires": _now + timedelta_from_string(self.provider.event_retention),
|
||||
"payload": {
|
||||
"jti": jti.hex,
|
||||
"aud": self.aud,
|
||||
"iat": int(datetime.now().timestamp()),
|
||||
"iss": self.iss,
|
||||
"events": {type: event_data},
|
||||
**kwargs,
|
||||
},
|
||||
}
|
||||
|
||||
def encode(self, data: dict) -> str:
|
||||
headers = {}
|
||||
if self.provider.signing_key:
|
||||
headers["kid"] = self.provider.signing_key.kid
|
||||
key, alg = self.provider.jwt_key
|
||||
return encode(data, key, algorithm=alg, headers=headers)
|
||||
|
||||
|
||||
class StreamEvent(CreatedUpdatedModel, ExpiringModel):
|
||||
"""Single stream event to be sent"""
|
||||
|
||||
uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False)
|
||||
|
||||
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
|
||||
status = models.TextField(choices=SSFEventStatus.choices)
|
||||
|
||||
type = models.TextField(choices=EventTypes.choices)
|
||||
payload = models.JSONField(default=dict)
|
||||
|
||||
def expire_action(self, *args, **kwargs):
|
||||
"""Only allow automatic cleanup of successfully sent event"""
|
||||
if self.status != SSFEventStatus.SENT:
|
||||
return
|
||||
return super().expire_action(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return f"Stream event {self.type}"
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("SSF Stream Event")
|
||||
verbose_name_plural = _("SSF Stream Events")
|
||||
ordering = ("-created",)
|
193
authentik/enterprise/providers/ssf/signals.py
Normal file
193
authentik/enterprise/providers/ssf/signals.py
Normal file
@ -0,0 +1,193 @@
|
||||
from hashlib import sha256
|
||||
|
||||
from django.contrib.auth.signals import user_logged_out
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_delete, post_save, pre_delete
|
||||
from django.dispatch import receiver
|
||||
from django.http.request import HttpRequest
|
||||
from guardian.shortcuts import assign_perm
|
||||
|
||||
from authentik.core.models import (
|
||||
USER_PATH_SYSTEM_PREFIX,
|
||||
AuthenticatedSession,
|
||||
Token,
|
||||
TokenIntents,
|
||||
User,
|
||||
UserTypes,
|
||||
)
|
||||
from authentik.core.signals import password_changed
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
EventTypes,
|
||||
SSFProvider,
|
||||
)
|
||||
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
|
||||
from authentik.events.middleware import audit_ignore
|
||||
from authentik.stages.authenticator.models import Device
|
||||
from authentik.stages.authenticator_duo.models import DuoDevice
|
||||
from authentik.stages.authenticator_static.models import StaticDevice
|
||||
from authentik.stages.authenticator_totp.models import TOTPDevice
|
||||
from authentik.stages.authenticator_webauthn.models import (
|
||||
UNKNOWN_DEVICE_TYPE_AAGUID,
|
||||
WebAuthnDevice,
|
||||
)
|
||||
|
||||
USER_PATH_PROVIDERS_SSF = USER_PATH_SYSTEM_PREFIX + "/providers/ssf"
|
||||
|
||||
|
||||
@receiver(post_save, sender=SSFProvider)
|
||||
def ssf_providers_post_save(sender: type[Model], instance: SSFProvider, created: bool, **_):
|
||||
"""Create service account before provider is saved"""
|
||||
identifier = instance.service_account_identifier
|
||||
user, _ = User.objects.update_or_create(
|
||||
username=identifier,
|
||||
defaults={
|
||||
"name": f"SSF Provider {instance.name} Service-Account",
|
||||
"type": UserTypes.INTERNAL_SERVICE_ACCOUNT,
|
||||
"path": USER_PATH_PROVIDERS_SSF,
|
||||
},
|
||||
)
|
||||
assign_perm("add_stream", user, instance)
|
||||
token, token_created = Token.objects.update_or_create(
|
||||
identifier=identifier,
|
||||
defaults={
|
||||
"user": user,
|
||||
"intent": TokenIntents.INTENT_API,
|
||||
"expiring": False,
|
||||
"managed": f"goauthentik.io/providers/ssf/{instance.pk}",
|
||||
},
|
||||
)
|
||||
if created or token_created:
|
||||
with audit_ignore():
|
||||
instance.token = token
|
||||
instance.save()
|
||||
|
||||
|
||||
@receiver(user_logged_out)
|
||||
def ssf_user_logged_out_session_revoked(sender, request: HttpRequest, user: User, **_):
|
||||
"""Session revoked trigger (user logged out)"""
|
||||
if not request.session or not request.session.session_key or not user:
|
||||
return
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_SESSION_REVOKED,
|
||||
{
|
||||
"initiating_entity": "user",
|
||||
},
|
||||
sub_id={
|
||||
"format": "complex",
|
||||
"session": {
|
||||
"format": "opaque",
|
||||
"id": sha256(request.session.session_key.encode("ascii")).hexdigest(),
|
||||
},
|
||||
"user": {
|
||||
"format": "email",
|
||||
"email": user.email,
|
||||
},
|
||||
},
|
||||
request=request,
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=AuthenticatedSession)
|
||||
def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSession, **_):
|
||||
"""Session revoked trigger (users' session has been deleted)
|
||||
|
||||
As this signal is also triggered with a regular logout, we can't be sure
|
||||
if the session has been deleted by an admin or by the user themselves."""
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_SESSION_REVOKED,
|
||||
{
|
||||
"initiating_entity": "user",
|
||||
},
|
||||
sub_id={
|
||||
"format": "complex",
|
||||
"session": {
|
||||
"format": "opaque",
|
||||
"id": sha256(instance.session_key.encode("ascii")).hexdigest(),
|
||||
},
|
||||
"user": {
|
||||
"format": "email",
|
||||
"email": instance.user.email,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@receiver(password_changed)
|
||||
def ssf_password_changed_cred_change(sender, user: User, password: str | None, **_):
|
||||
"""Credential change trigger (password changed)"""
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||
{
|
||||
"credential_type": "password",
|
||||
"change_type": "revoke" if password is None else "update",
|
||||
},
|
||||
sub_id={
|
||||
"format": "complex",
|
||||
"user": {
|
||||
"format": "email",
|
||||
"email": user.email,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
device_type_map = {
|
||||
StaticDevice: "pin",
|
||||
TOTPDevice: "pin",
|
||||
WebAuthnDevice: "fido-u2f",
|
||||
DuoDevice: "app",
|
||||
}
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
def ssf_device_post_save(sender: type[Model], instance: Device, created: bool, **_):
|
||||
if not isinstance(instance, Device):
|
||||
return
|
||||
if not instance.confirmed:
|
||||
return
|
||||
device_type = device_type_map.get(instance.__class__)
|
||||
data = {
|
||||
"credential_type": device_type,
|
||||
"change_type": "create" if created else "update",
|
||||
"friendly_name": instance.name,
|
||||
}
|
||||
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
|
||||
data["fido2_aaguid"] = instance.aaguid
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||
data,
|
||||
sub_id={
|
||||
"format": "complex",
|
||||
"user": {
|
||||
"format": "email",
|
||||
"email": instance.user.email,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_delete)
|
||||
def ssf_device_post_delete(sender: type[Model], instance: Device, **_):
|
||||
if not isinstance(instance, Device):
|
||||
return
|
||||
if not instance.confirmed:
|
||||
return
|
||||
device_type = device_type_map.get(instance.__class__)
|
||||
data = {
|
||||
"credential_type": device_type,
|
||||
"change_type": "delete",
|
||||
"friendly_name": instance.name,
|
||||
}
|
||||
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
|
||||
data["fido2_aaguid"] = instance.aaguid
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||
data,
|
||||
sub_id={
|
||||
"format": "complex",
|
||||
"user": {
|
||||
"format": "email",
|
||||
"email": instance.user.email,
|
||||
},
|
||||
},
|
||||
)
|
136
authentik/enterprise/providers/ssf/tasks.py
Normal file
136
authentik/enterprise/providers/ssf/tasks.py
Normal file
@ -0,0 +1,136 @@
|
||||
from celery import group
|
||||
from django.http import HttpRequest
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from requests.exceptions import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
DeliveryMethods,
|
||||
EventTypes,
|
||||
SSFEventStatus,
|
||||
Stream,
|
||||
StreamEvent,
|
||||
)
|
||||
from authentik.events.logs import LogEvent
|
||||
from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
session = get_http_session()
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def send_ssf_event(
|
||||
event_type: EventTypes,
|
||||
data: dict,
|
||||
stream_filter: dict | None = None,
|
||||
request: HttpRequest | None = None,
|
||||
**extra_data,
|
||||
):
|
||||
"""Wrapper to send an SSF event to multiple streams"""
|
||||
payload = []
|
||||
if not stream_filter:
|
||||
stream_filter = {}
|
||||
stream_filter["events_requested__contains"] = [event_type]
|
||||
if request and hasattr(request, "request_id"):
|
||||
extra_data.setdefault("txn", request.request_id)
|
||||
for stream in Stream.objects.filter(**stream_filter):
|
||||
event_data = stream.prepare_event_payload(event_type, data, **extra_data)
|
||||
payload.append((str(stream.uuid), event_data))
|
||||
return _send_ssf_event.delay(payload)
|
||||
|
||||
|
||||
def _check_app_access(stream_uuid: str, event_data: dict) -> bool:
|
||||
"""Check if event is related to user and if so, check
|
||||
if the user has access to the application"""
|
||||
stream = Stream.objects.filter(pk=stream_uuid).first()
|
||||
if not stream:
|
||||
return False
|
||||
# `event_data` is a dict version of a StreamEvent
|
||||
sub_id = event_data.get("payload", {}).get("sub_id", {})
|
||||
email = sub_id.get("user", {}).get("email", None)
|
||||
if not email:
|
||||
return True
|
||||
user = User.objects.filter(email=email).first()
|
||||
if not user:
|
||||
return True
|
||||
engine = PolicyEngine(stream.provider.backchannel_application, user)
|
||||
engine.use_cache = False
|
||||
engine.build()
|
||||
return engine.passing
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def _send_ssf_event(event_data: list[tuple[str, dict]]):
|
||||
tasks = []
|
||||
for stream, data in event_data:
|
||||
if not _check_app_access(stream, data):
|
||||
continue
|
||||
event = StreamEvent.objects.create(**data)
|
||||
tasks.extend(send_single_ssf_event(stream, str(event.uuid)))
|
||||
main_task = group(*tasks)
|
||||
main_task()
|
||||
|
||||
|
||||
def send_single_ssf_event(stream_id: str, evt_id: str):
|
||||
stream = Stream.objects.filter(pk=stream_id).first()
|
||||
if not stream:
|
||||
return
|
||||
event = StreamEvent.objects.filter(pk=evt_id).first()
|
||||
if not event:
|
||||
return
|
||||
if event.status == SSFEventStatus.SENT:
|
||||
return
|
||||
if stream.delivery_method == DeliveryMethods.RISC_PUSH:
|
||||
return [ssf_push_event.si(str(event.pk))]
|
||||
return []
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
def ssf_push_event(self: SystemTask, event_id: str):
|
||||
self.save_on_success = False
|
||||
event = StreamEvent.objects.filter(pk=event_id).first()
|
||||
if not event:
|
||||
return
|
||||
self.set_uid(event_id)
|
||||
if event.status == SSFEventStatus.SENT:
|
||||
self.set_status(TaskStatus.SUCCESSFUL)
|
||||
return
|
||||
try:
|
||||
response = session.post(
|
||||
event.stream.endpoint_url,
|
||||
data=event.stream.encode(event.payload),
|
||||
headers={"Content-Type": "application/secevent+jwt", "Accept": "application/json"},
|
||||
)
|
||||
response.raise_for_status()
|
||||
event.status = SSFEventStatus.SENT
|
||||
event.save()
|
||||
self.set_status(TaskStatus.SUCCESSFUL)
|
||||
return
|
||||
except RequestException as exc:
|
||||
LOGGER.warning("Failed to send SSF event", exc=exc)
|
||||
self.set_status(TaskStatus.ERROR)
|
||||
attrs = {}
|
||||
if exc.response:
|
||||
attrs["response"] = {
|
||||
"content": exc.response.text,
|
||||
"status": exc.response.status_code,
|
||||
}
|
||||
self.set_error(
|
||||
exc,
|
||||
LogEvent(
|
||||
_("Failed to send request"),
|
||||
log_level="warning",
|
||||
logger=self.__name__,
|
||||
attributes=attrs,
|
||||
),
|
||||
)
|
||||
# Re-up the expiry of the stream event
|
||||
event.expires = now() + timedelta_from_string(event.stream.provider.event_retention)
|
||||
event.status = SSFEventStatus.PENDING_FAILED
|
||||
event.save()
|
46
authentik/enterprise/providers/ssf/tests/test_config.py
Normal file
46
authentik/enterprise/providers/ssf/tests/test_config.py
Normal file
@ -0,0 +1,46 @@
|
||||
import json
|
||||
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_cert
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
SSFProvider,
|
||||
)
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestConfiguration(APITestCase):
|
||||
def setUp(self):
|
||||
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.provider = SSFProvider.objects.create(
|
||||
name=generate_id(),
|
||||
signing_key=create_test_cert(),
|
||||
backchannel_application=self.application,
|
||||
)
|
||||
|
||||
def test_config_fetch(self):
|
||||
"""test SSF configuration (unauthenticated)"""
|
||||
res = self.client.get(
|
||||
reverse(
|
||||
"authentik_providers_ssf:configuration",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
content = json.loads(res.content)
|
||||
self.assertEqual(content["spec_version"], "1_0-ID2")
|
||||
|
||||
def test_config_fetch_authenticated(self):
|
||||
"""test SSF configuration (authenticated)"""
|
||||
res = self.client.get(
|
||||
reverse(
|
||||
"authentik_providers_ssf:configuration",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
content = json.loads(res.content)
|
||||
self.assertEqual(content["spec_version"], "1_0-ID2")
|
51
authentik/enterprise/providers/ssf/tests/test_jwks.py
Normal file
51
authentik/enterprise/providers/ssf/tests/test_jwks.py
Normal file
@ -0,0 +1,51 @@
|
||||
"""JWKS tests"""
|
||||
|
||||
import base64
|
||||
import json
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.x509 import load_der_x509_certificate
|
||||
from django.test import TestCase
|
||||
from django.urls.base import reverse
|
||||
from jwt import PyJWKSet
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_cert
|
||||
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestJWKS(TestCase):
|
||||
"""Test JWKS view"""
|
||||
|
||||
def test_rs256(self):
|
||||
"""Test JWKS request with RS256"""
|
||||
provider = SSFProvider.objects.create(
|
||||
name=generate_id(),
|
||||
signing_key=create_test_cert(),
|
||||
)
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app.backchannel_providers.add(provider)
|
||||
response = self.client.get(
|
||||
reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug})
|
||||
)
|
||||
body = json.loads(response.content.decode())
|
||||
self.assertEqual(len(body["keys"]), 1)
|
||||
PyJWKSet.from_dict(body)
|
||||
key = body["keys"][0]
|
||||
load_der_x509_certificate(base64.b64decode(key["x5c"][0]), default_backend()).public_key()
|
||||
|
||||
def test_es256(self):
|
||||
"""Test JWKS request with ES256"""
|
||||
provider = SSFProvider.objects.create(
|
||||
name=generate_id(),
|
||||
signing_key=create_test_cert(),
|
||||
)
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app.backchannel_providers.add(provider)
|
||||
response = self.client.get(
|
||||
reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug})
|
||||
)
|
||||
body = json.loads(response.content.decode())
|
||||
self.assertEqual(len(body["keys"]), 1)
|
||||
PyJWKSet.from_dict(body)
|
168
authentik/enterprise/providers/ssf/tests/test_signals.py
Normal file
168
authentik/enterprise/providers/ssf/tests/test_signals.py
Normal file
@ -0,0 +1,168 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application, Group
|
||||
from authentik.core.tests.utils import (
|
||||
create_test_cert,
|
||||
create_test_user,
|
||||
)
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
EventTypes,
|
||||
SSFEventStatus,
|
||||
SSFProvider,
|
||||
Stream,
|
||||
StreamEvent,
|
||||
)
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||
|
||||
|
||||
class TestSignals(APITestCase):
|
||||
"""Test individual SSF Signals"""
|
||||
|
||||
def setUp(self):
|
||||
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.provider = SSFProvider.objects.create(
|
||||
name=generate_id(),
|
||||
signing_key=create_test_cert(),
|
||||
backchannel_application=self.application,
|
||||
)
|
||||
res = self.client.post(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
data={
|
||||
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||
"aud": ["https://app.authentik.company"],
|
||||
"delivery": {
|
||||
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||
"endpoint_url": "https://app.authentik.company",
|
||||
},
|
||||
"events_requested": [
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
],
|
||||
"format": "iss_sub",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 201, res.content)
|
||||
|
||||
def test_signal_logout(self):
|
||||
"""Test user logout"""
|
||||
user = create_test_user()
|
||||
self.client.force_login(user)
|
||||
self.client.logout()
|
||||
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
event_payload = event.payload["events"][
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked"
|
||||
]
|
||||
self.assertEqual(event_payload["initiating_entity"], "user")
|
||||
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||
self.assertEqual(event.payload["sub_id"]["session"]["format"], "opaque")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||
|
||||
def test_signal_password_change(self):
|
||||
"""Test user password change"""
|
||||
user = create_test_user()
|
||||
self.client.force_login(user)
|
||||
user.set_password(generate_id())
|
||||
user.save()
|
||||
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
event_payload = event.payload["events"][
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||
]
|
||||
self.assertEqual(event_payload["change_type"], "update")
|
||||
self.assertEqual(event_payload["credential_type"], "password")
|
||||
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||
|
||||
def test_signal_authenticator_added(self):
|
||||
"""Test authenticator creation signal"""
|
||||
user = create_test_user()
|
||||
self.client.force_login(user)
|
||||
dev = WebAuthnDevice.objects.create(
|
||||
user=user,
|
||||
name=generate_id(),
|
||||
credential_id=generate_id(),
|
||||
public_key=generate_id(),
|
||||
aaguid=str(uuid4()),
|
||||
)
|
||||
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).exclude().first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
event_payload = event.payload["events"][
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||
]
|
||||
self.assertEqual(event_payload["change_type"], "create")
|
||||
self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid)
|
||||
self.assertEqual(event_payload["friendly_name"], dev.name)
|
||||
self.assertEqual(event_payload["credential_type"], "fido-u2f")
|
||||
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||
|
||||
def test_signal_authenticator_deleted(self):
|
||||
"""Test authenticator deletion signal"""
|
||||
user = create_test_user()
|
||||
self.client.force_login(user)
|
||||
dev = WebAuthnDevice.objects.create(
|
||||
user=user,
|
||||
name=generate_id(),
|
||||
credential_id=generate_id(),
|
||||
public_key=generate_id(),
|
||||
aaguid=str(uuid4()),
|
||||
)
|
||||
dev.delete()
|
||||
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).exclude().first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
event_payload = event.payload["events"][
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||
]
|
||||
self.assertEqual(event_payload["change_type"], "delete")
|
||||
self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid)
|
||||
self.assertEqual(event_payload["friendly_name"], dev.name)
|
||||
self.assertEqual(event_payload["credential_type"], "fido-u2f")
|
||||
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||
|
||||
def test_signal_policy_ignore(self):
|
||||
"""Test event not being created for user that doesn't have access to the application"""
|
||||
PolicyBinding.objects.create(
|
||||
target=self.application, group=Group.objects.create(name=generate_id()), order=0
|
||||
)
|
||||
user = create_test_user()
|
||||
self.client.force_login(user)
|
||||
user.set_password(generate_id())
|
||||
user.save()
|
||||
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(
|
||||
stream=stream, type=EventTypes.CAEP_CREDENTIAL_CHANGE
|
||||
).first()
|
||||
self.assertIsNone(event)
|
154
authentik/enterprise/providers/ssf/tests/test_stream.py
Normal file
154
authentik/enterprise/providers/ssf/tests/test_stream.py
Normal file
@ -0,0 +1,154 @@
|
||||
import json
|
||||
from dataclasses import asdict
|
||||
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
SSFEventStatus,
|
||||
SSFProvider,
|
||||
Stream,
|
||||
StreamEvent,
|
||||
)
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.id_token import IDToken
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
|
||||
|
||||
class TestStream(APITestCase):
|
||||
def setUp(self):
|
||||
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.provider = SSFProvider.objects.create(
|
||||
name=generate_id(),
|
||||
signing_key=create_test_cert(),
|
||||
backchannel_application=self.application,
|
||||
)
|
||||
|
||||
def test_stream_add_token(self):
|
||||
"""test stream add (token auth)"""
|
||||
res = self.client.post(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
data={
|
||||
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||
"aud": ["https://app.authentik.company"],
|
||||
"delivery": {
|
||||
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||
"endpoint_url": "https://app.authentik.company",
|
||||
},
|
||||
"events_requested": [
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
],
|
||||
"format": "iss_sub",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
self.assertEqual(
|
||||
event.payload["events"],
|
||||
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
|
||||
)
|
||||
|
||||
def test_stream_add_poll(self):
|
||||
"""test stream add - poll method"""
|
||||
res = self.client.post(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
data={
|
||||
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||
"aud": ["https://app.authentik.company"],
|
||||
"delivery": {
|
||||
"method": "https://schemas.openid.net/secevent/risc/delivery-method/poll",
|
||||
},
|
||||
"events_requested": [
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
],
|
||||
"format": "iss_sub",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content,
|
||||
{"delivery": {"method": ["Polling for SSF events is not currently supported."]}},
|
||||
)
|
||||
|
||||
def test_stream_add_oidc(self):
|
||||
"""test stream add (oidc auth)"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
authorization_flow=create_test_flow(),
|
||||
)
|
||||
self.application.provider = provider
|
||||
self.application.save()
|
||||
user = create_test_admin_user()
|
||||
token = AccessToken.objects.create(
|
||||
provider=provider,
|
||||
user=user,
|
||||
token=generate_id(),
|
||||
auth_time=timezone.now(),
|
||||
_scope="openid user profile",
|
||||
_id_token=json.dumps(
|
||||
asdict(
|
||||
IDToken("foo", "bar"),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
res = self.client.post(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
data={
|
||||
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||
"aud": ["https://app.authentik.company"],
|
||||
"delivery": {
|
||||
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||
"endpoint_url": "https://app.authentik.company",
|
||||
},
|
||||
"events_requested": [
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
],
|
||||
"format": "iss_sub",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Bearer {token.token}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
self.assertEqual(
|
||||
event.payload["events"],
|
||||
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
|
||||
)
|
||||
|
||||
def test_stream_delete(self):
|
||||
"""delete stream"""
|
||||
stream = Stream.objects.create(provider=self.provider)
|
||||
res = self.client.delete(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 204)
|
||||
self.assertFalse(Stream.objects.filter(pk=stream.pk).exists())
|
32
authentik/enterprise/providers/ssf/urls.py
Normal file
32
authentik/enterprise/providers/ssf/urls.py
Normal file
@ -0,0 +1,32 @@
|
||||
"""SSF provider URLs"""
|
||||
|
||||
from django.urls import path
|
||||
|
||||
from authentik.enterprise.providers.ssf.api.providers import SSFProviderViewSet
|
||||
from authentik.enterprise.providers.ssf.api.streams import SSFStreamViewSet
|
||||
from authentik.enterprise.providers.ssf.views.configuration import ConfigurationView
|
||||
from authentik.enterprise.providers.ssf.views.jwks import JWKSview
|
||||
from authentik.enterprise.providers.ssf.views.stream import StreamView
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"application/ssf/<slug:application_slug>/ssf-jwks/",
|
||||
JWKSview.as_view(),
|
||||
name="jwks",
|
||||
),
|
||||
path(
|
||||
".well-known/ssf-configuration/<slug:application_slug>",
|
||||
ConfigurationView.as_view(),
|
||||
name="configuration",
|
||||
),
|
||||
path(
|
||||
"application/ssf/<slug:application_slug>/stream/",
|
||||
StreamView.as_view(),
|
||||
name="stream",
|
||||
),
|
||||
]
|
||||
|
||||
api_urlpatterns = [
|
||||
("providers/ssf", SSFProviderViewSet),
|
||||
("ssf/streams", SSFStreamViewSet),
|
||||
]
|
66
authentik/enterprise/providers/ssf/views/auth.py
Normal file
66
authentik/enterprise/providers/ssf/views/auth.py
Normal file
@ -0,0 +1,66 @@
|
||||
"""SSF Token auth"""
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from django.db.models import Q
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
from rest_framework.request import Request
|
||||
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||
from authentik.providers.oauth2.models import AccessToken
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||
|
||||
|
||||
class SSFTokenAuth(BaseAuthentication):
|
||||
"""SSF Token auth"""
|
||||
|
||||
view: "SSFView"
|
||||
|
||||
def __init__(self, view: "SSFView") -> None:
|
||||
super().__init__()
|
||||
self.view = view
|
||||
|
||||
def check_token(self, key: str) -> Token | None:
|
||||
"""Check that a token exists, is not expired, and is assigned to the correct provider"""
|
||||
token = Token.filter_not_expired(key=key, intent=TokenIntents.INTENT_API).first()
|
||||
if not token:
|
||||
return None
|
||||
provider: SSFProvider = token.ssfprovider_set.first()
|
||||
if not provider:
|
||||
return None
|
||||
self.view.application = provider.backchannel_application
|
||||
self.view.provider = provider
|
||||
return token
|
||||
|
||||
def check_jwt(self, jwt: str) -> AccessToken | None:
|
||||
"""Check JWT-based authentication, this supports tokens issued either by providers
|
||||
configured directly in the provider, and by providers assigned to the application
|
||||
that the SSF provider is a backchannel provider of."""
|
||||
token = AccessToken.filter_not_expired(token=jwt, revoked=False).first()
|
||||
if not token:
|
||||
return None
|
||||
ssf_provider = SSFProvider.objects.filter(
|
||||
Q(oidc_auth_providers__in=[token.provider])
|
||||
| Q(backchannel_application__provider__in=[token.provider]),
|
||||
).first()
|
||||
if not ssf_provider:
|
||||
return None
|
||||
self.view.application = ssf_provider.backchannel_application
|
||||
self.view.provider = ssf_provider
|
||||
return token
|
||||
|
||||
def authenticate(self, request: Request) -> tuple[User, Any] | None:
|
||||
auth = get_authorization_header(request).decode()
|
||||
auth_type, _, key = auth.partition(" ")
|
||||
if auth_type != "Bearer":
|
||||
return None
|
||||
token = self.check_token(key)
|
||||
if token:
|
||||
return (token.user, token)
|
||||
jwt_token = self.check_jwt(key)
|
||||
if jwt_token:
|
||||
return (jwt_token.user, token)
|
||||
return None
|
23
authentik/enterprise/providers/ssf/views/base.py
Normal file
23
authentik/enterprise/providers/ssf/views/base.py
Normal file
@ -0,0 +1,23 @@
|
||||
from django.http import HttpRequest
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.views import APIView
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||
from authentik.enterprise.providers.ssf.views.auth import SSFTokenAuth
|
||||
|
||||
|
||||
class SSFView(APIView):
|
||||
application: Application
|
||||
provider: SSFProvider
|
||||
logger: BoundLogger
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def setup(self, request: HttpRequest, *args, **kwargs) -> None:
|
||||
self.logger = get_logger().bind()
|
||||
super().setup(request, *args, **kwargs)
|
||||
|
||||
def get_authenticators(self):
|
||||
return [SSFTokenAuth(self)]
|
55
authentik/enterprise/providers/ssf/views/configuration.py
Normal file
55
authentik/enterprise/providers/ssf/views/configuration.py
Normal file
@ -0,0 +1,55 @@
|
||||
from django.http import Http404, HttpRequest, HttpResponse, JsonResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.urls import reverse
|
||||
from rest_framework.permissions import AllowAny
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.enterprise.providers.ssf.models import DeliveryMethods, SSFProvider
|
||||
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||
|
||||
|
||||
class ConfigurationView(SSFView):
|
||||
"""SSF configuration endpoint"""
|
||||
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
def get_authenticators(self):
|
||||
return []
|
||||
|
||||
def get(self, request: HttpRequest, application_slug: str, *args, **kwargs) -> HttpResponse:
|
||||
application = get_object_or_404(Application, slug=application_slug)
|
||||
provider = application.backchannel_provider_for(SSFProvider)
|
||||
if not provider:
|
||||
raise Http404
|
||||
data = {
|
||||
"spec_version": "1_0-ID2",
|
||||
"issuer": self.request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ssf:configuration",
|
||||
kwargs={
|
||||
"application_slug": application.slug,
|
||||
},
|
||||
)
|
||||
),
|
||||
"jwks_uri": self.request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ssf:jwks",
|
||||
kwargs={
|
||||
"application_slug": application.slug,
|
||||
},
|
||||
)
|
||||
),
|
||||
"configuration_endpoint": self.request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={
|
||||
"application_slug": application.slug,
|
||||
},
|
||||
)
|
||||
),
|
||||
"delivery_methods_supported": [
|
||||
DeliveryMethods.RISC_PUSH,
|
||||
],
|
||||
"authorization_schemes": [{"spec_urn": "urn:ietf:rfc:6749"}],
|
||||
}
|
||||
return JsonResponse(data)
|
31
authentik/enterprise/providers/ssf/views/jwks.py
Normal file
31
authentik/enterprise/providers/ssf/views/jwks.py
Normal file
@ -0,0 +1,31 @@
|
||||
from django.http import Http404, HttpRequest, HttpResponse, JsonResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.views import View
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||
from authentik.providers.oauth2.views.jwks import JWKSView as OAuthJWKSView
|
||||
|
||||
|
||||
class JWKSview(View):
|
||||
"""SSF JWKS endpoint, similar to the OAuth2 provider's endpoint"""
|
||||
|
||||
def get(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
||||
"""Show JWK Key data for Provider"""
|
||||
application = get_object_or_404(Application, slug=application_slug)
|
||||
provider = application.backchannel_provider_for(SSFProvider)
|
||||
if not provider:
|
||||
raise Http404
|
||||
signing_key: CertificateKeyPair = provider.signing_key
|
||||
|
||||
response_data = {}
|
||||
|
||||
jwk = OAuthJWKSView.get_jwk_for_key(signing_key, "sig")
|
||||
if jwk:
|
||||
response_data["keys"] = [jwk]
|
||||
|
||||
response = JsonResponse(response_data)
|
||||
response["Access-Control-Allow-Origin"] = "*"
|
||||
|
||||
return response
|
130
authentik/enterprise/providers/ssf/views/stream.py
Normal file
130
authentik/enterprise/providers/ssf/views/stream.py
Normal file
@ -0,0 +1,130 @@
|
||||
from django.http import HttpRequest
|
||||
from django.urls import reverse
|
||||
from rest_framework.exceptions import PermissionDenied, ValidationError
|
||||
from rest_framework.fields import CharField, ChoiceField, ListField, SerializerMethodField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
DeliveryMethods,
|
||||
EventTypes,
|
||||
SSFProvider,
|
||||
Stream,
|
||||
)
|
||||
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
|
||||
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class StreamDeliverySerializer(PassiveSerializer):
|
||||
method = ChoiceField(choices=[(x.value, x.value) for x in DeliveryMethods])
|
||||
endpoint_url = CharField(required=False)
|
||||
|
||||
def validate_method(self, method: DeliveryMethods):
|
||||
"""Currently only push is supported"""
|
||||
if method == DeliveryMethods.RISC_POLL:
|
||||
raise ValidationError("Polling for SSF events is not currently supported.")
|
||||
return method
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if attrs["method"] == DeliveryMethods.RISC_PUSH:
|
||||
if not attrs.get("endpoint_url"):
|
||||
raise ValidationError("Endpoint URL is required when using push.")
|
||||
return attrs
|
||||
|
||||
|
||||
class StreamSerializer(ModelSerializer):
|
||||
delivery = StreamDeliverySerializer()
|
||||
events_requested = ListField(
|
||||
child=ChoiceField(choices=[(x.value, x.value) for x in EventTypes])
|
||||
)
|
||||
format = CharField()
|
||||
aud = ListField(child=CharField())
|
||||
|
||||
def create(self, validated_data):
|
||||
provider: SSFProvider = validated_data["provider"]
|
||||
request: HttpRequest = self.context["request"]
|
||||
iss = request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ssf:configuration",
|
||||
kwargs={
|
||||
"application_slug": provider.backchannel_application.slug,
|
||||
},
|
||||
)
|
||||
)
|
||||
# Ensure that streams always get SET verification events sent to them
|
||||
validated_data["events_requested"].append(EventTypes.SET_VERIFICATION)
|
||||
return super().create(
|
||||
{
|
||||
"delivery_method": validated_data["delivery"]["method"],
|
||||
"endpoint_url": validated_data["delivery"].get("endpoint_url"),
|
||||
"format": validated_data["format"],
|
||||
"provider": validated_data["provider"],
|
||||
"events_requested": validated_data["events_requested"],
|
||||
"aud": validated_data["aud"],
|
||||
"iss": iss,
|
||||
}
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Stream
|
||||
fields = [
|
||||
"delivery",
|
||||
"events_requested",
|
||||
"format",
|
||||
"aud",
|
||||
]
|
||||
|
||||
|
||||
class StreamResponseSerializer(PassiveSerializer):
|
||||
stream_id = CharField(source="pk")
|
||||
iss = CharField()
|
||||
aud = ListField(child=CharField())
|
||||
delivery = SerializerMethodField()
|
||||
format = CharField()
|
||||
|
||||
events_requested = ListField(child=CharField())
|
||||
events_supported = SerializerMethodField()
|
||||
events_delivered = ListField(child=CharField(), source="events_requested")
|
||||
|
||||
def get_delivery(self, instance: Stream) -> StreamDeliverySerializer:
|
||||
return {
|
||||
"method": instance.delivery_method,
|
||||
"endpoint_url": instance.endpoint_url,
|
||||
}
|
||||
|
||||
def get_events_supported(self, instance: Stream) -> list[str]:
|
||||
return [x.value for x in EventTypes]
|
||||
|
||||
|
||||
class StreamView(SSFView):
|
||||
def post(self, request: Request, *args, **kwargs) -> Response:
|
||||
stream = StreamSerializer(data=request.data, context={"request": request})
|
||||
stream.is_valid(raise_exception=True)
|
||||
if not request.user.has_perm("authentik_providers_ssf.add_stream", self.provider):
|
||||
raise PermissionDenied(
|
||||
"User does not have permission to create stream for this provider."
|
||||
)
|
||||
instance: Stream = stream.save(provider=self.provider)
|
||||
send_ssf_event(
|
||||
EventTypes.SET_VERIFICATION,
|
||||
{
|
||||
"state": None,
|
||||
},
|
||||
stream_filter={"pk": instance.uuid},
|
||||
sub_id={"format": "opaque", "id": str(instance.uuid)},
|
||||
)
|
||||
response = StreamResponseSerializer(instance=instance, context={"request": request}).data
|
||||
return Response(response, status=201)
|
||||
|
||||
def delete(self, request: Request, *args, **kwargs) -> Response:
|
||||
streams = Stream.objects.filter(provider=self.provider)
|
||||
# Technically this parameter is required by the spec...
|
||||
if "stream_id" in request.query_params:
|
||||
streams = streams.filter(stream_id=request.query_params["stream_id"])
|
||||
streams.delete()
|
||||
return Response(status=204)
|
@ -16,7 +16,7 @@ TENANT_APPS = [
|
||||
"authentik.enterprise.audit",
|
||||
"authentik.enterprise.providers.google_workspace",
|
||||
"authentik.enterprise.providers.microsoft_entra",
|
||||
"authentik.enterprise.providers.rac",
|
||||
"authentik.enterprise.providers.ssf",
|
||||
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
||||
"authentik.enterprise.stages.source",
|
||||
]
|
||||
|
@ -9,13 +9,16 @@ from django.utils.timezone import now
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
|
||||
from authentik.core.models import Source, User
|
||||
from authentik.core.sources.flow_manager import SESSION_KEY_OVERRIDE_FLOW_TOKEN
|
||||
from authentik.core.sources.flow_manager import (
|
||||
SESSION_KEY_OVERRIDE_FLOW_TOKEN,
|
||||
SESSION_KEY_SOURCE_FLOW_STAGES,
|
||||
)
|
||||
from authentik.core.types import UILoginButton
|
||||
from authentik.enterprise.stages.source.models import SourceStage
|
||||
from authentik.flows.challenge import Challenge, ChallengeResponse
|
||||
from authentik.flows.models import FlowToken
|
||||
from authentik.flows.models import FlowToken, in_memory_stage
|
||||
from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED
|
||||
from authentik.flows.stage import ChallengeStageView
|
||||
from authentik.flows.stage import ChallengeStageView, StageView
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
|
||||
PLAN_CONTEXT_RESUME_TOKEN = "resume_token" # nosec
|
||||
@ -49,6 +52,7 @@ class SourceStageView(ChallengeStageView):
|
||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||
resume_token = self.create_flow_token()
|
||||
self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token
|
||||
self.request.session[SESSION_KEY_SOURCE_FLOW_STAGES] = [in_memory_stage(SourceStageFinal)]
|
||||
return self.login_button.challenge
|
||||
|
||||
def create_flow_token(self) -> FlowToken:
|
||||
@ -77,3 +81,19 @@ class SourceStageView(ChallengeStageView):
|
||||
|
||||
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
||||
return self.executor.stage_ok()
|
||||
|
||||
|
||||
class SourceStageFinal(StageView):
|
||||
"""Dynamic stage injected in the source flow manager. This is injected in the
|
||||
flow the source flow manager picks (authentication or enrollment), and will run at the end.
|
||||
This stage uses the override flow token to resume execution of the initial flow the
|
||||
source stage is bound to."""
|
||||
|
||||
def dispatch(self):
|
||||
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
|
||||
self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug)
|
||||
plan = token.plan
|
||||
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
|
||||
response = plan.to_redirect(self.request, token.flow)
|
||||
token.delete()
|
||||
return response
|
||||
|
@ -0,0 +1,41 @@
|
||||
# Generated by Django 5.0.10 on 2025-01-13 18:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_events", "0007_event_authentik_e_action_9a9dd9_idx_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="event",
|
||||
index=models.Index(fields=["expires"], name="authentik_e_expires_8c73a8_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="event",
|
||||
index=models.Index(fields=["expiring"], name="authentik_e_expirin_b5cb5e_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="event",
|
||||
index=models.Index(
|
||||
fields=["expiring", "expires"], name="authentik_e_expirin_e37180_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="systemtask",
|
||||
index=models.Index(fields=["expires"], name="authentik_e_expires_4d3985_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="systemtask",
|
||||
index=models.Index(fields=["expiring"], name="authentik_e_expirin_81d649_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="systemtask",
|
||||
index=models.Index(
|
||||
fields=["expiring", "expires"], name="authentik_e_expirin_eb3598_idx"
|
||||
),
|
||||
),
|
||||
]
|
@ -306,7 +306,7 @@ class Event(SerializerModel, ExpiringModel):
|
||||
class Meta:
|
||||
verbose_name = _("Event")
|
||||
verbose_name_plural = _("Events")
|
||||
indexes = [
|
||||
indexes = ExpiringModel.Meta.indexes + [
|
||||
models.Index(fields=["action"]),
|
||||
models.Index(fields=["user"]),
|
||||
models.Index(fields=["app"]),
|
||||
@ -694,3 +694,4 @@ class SystemTask(SerializerModel, ExpiringModel):
|
||||
permissions = [("run_task", _("Run task"))]
|
||||
verbose_name = _("System Task")
|
||||
verbose_name_plural = _("System Tasks")
|
||||
indexes = ExpiringModel.Meta.indexes
|
||||
|
@ -106,9 +106,9 @@ def on_invitation_used(sender, request: HttpRequest, invitation: Invitation, **_
|
||||
|
||||
|
||||
@receiver(password_changed)
|
||||
def on_password_changed(sender, user: User, password: str, **_):
|
||||
def on_password_changed(sender, user: User, password: str, request: HttpRequest | None, **_):
|
||||
"""Log password change"""
|
||||
Event.new(EventAction.PASSWORD_SET).from_http(None, user=user)
|
||||
Event.new(EventAction.PASSWORD_SET).from_http(request, user=user)
|
||||
|
||||
|
||||
@receiver(post_save, sender=Event)
|
||||
|
@ -53,12 +53,13 @@ class SystemTask(TenantTask):
|
||||
if not isinstance(msg, LogEvent):
|
||||
self._messages[idx] = LogEvent(msg, logger=self.__name__, log_level="info")
|
||||
|
||||
def set_error(self, exception: Exception):
|
||||
def set_error(self, exception: Exception, *messages: LogEvent):
|
||||
"""Set result to error and save exception"""
|
||||
self._status = TaskStatus.ERROR
|
||||
self._messages = [
|
||||
LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error")
|
||||
]
|
||||
self._messages = list(messages)
|
||||
self._messages.extend(
|
||||
[LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error")]
|
||||
)
|
||||
|
||||
def before_start(self, task_id, args, kwargs):
|
||||
self._start_precise = perf_counter()
|
||||
|
@ -3,6 +3,7 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.contrib.messages import INFO, add_message
|
||||
from django.http.request import HttpRequest
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
@ -61,6 +62,8 @@ class ReevaluateMarker(StageMarker):
|
||||
engine.request.context.update(plan.context)
|
||||
engine.build()
|
||||
result = engine.result
|
||||
for message in result.messages:
|
||||
add_message(http_request, INFO, message)
|
||||
if result.passing:
|
||||
return binding
|
||||
LOGGER.warning(
|
||||
|
@ -109,6 +109,8 @@ class FlowPlan:
|
||||
|
||||
def pop(self):
|
||||
"""Pop next pending stage from bottom of list"""
|
||||
if not self.markers and not self.bindings:
|
||||
return
|
||||
self.markers.pop(0)
|
||||
self.bindings.pop(0)
|
||||
|
||||
@ -156,12 +158,25 @@ class FlowPlan:
|
||||
final_stage: type[StageView] = self.bindings[-1].stage.view
|
||||
temp_exec = FlowExecutorView(flow=flow, request=request, plan=self)
|
||||
temp_exec.current_stage = self.bindings[-1].stage
|
||||
temp_exec.current_stage_view = final_stage
|
||||
temp_exec.setup(request, flow.slug)
|
||||
stage = final_stage(request=request, executor=temp_exec)
|
||||
return stage.dispatch(request)
|
||||
response = stage.dispatch(request)
|
||||
# Ensure we clean the flow state we have in the session before we redirect away
|
||||
temp_exec.stage_ok()
|
||||
return response
|
||||
|
||||
get_qs = request.GET.copy()
|
||||
if request.user.is_authenticated and (
|
||||
# Object-scoped permission or global permission
|
||||
request.user.has_perm("authentik_flows.inspect_flow", flow)
|
||||
or request.user.has_perm("authentik_flows.inspect_flow")
|
||||
):
|
||||
get_qs["inspector"] = "available"
|
||||
|
||||
return redirect_with_qs(
|
||||
"authentik_core:if-flow",
|
||||
request.GET,
|
||||
get_qs,
|
||||
flow_slug=flow.slug,
|
||||
)
|
||||
|
||||
|
@ -103,7 +103,7 @@ class FlowExecutorView(APIView):
|
||||
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
flow: Flow
|
||||
flow: Flow = None
|
||||
|
||||
plan: FlowPlan | None = None
|
||||
current_binding: FlowStageBinding | None = None
|
||||
@ -114,7 +114,8 @@ class FlowExecutorView(APIView):
|
||||
|
||||
def setup(self, request: HttpRequest, flow_slug: str):
|
||||
super().setup(request, flow_slug=flow_slug)
|
||||
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
||||
if not self.flow:
|
||||
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
||||
self._logger = get_logger().bind(flow_slug=flow_slug)
|
||||
set_tag("authentik.flow", self.flow.slug)
|
||||
|
||||
|
@ -78,7 +78,9 @@ class FlowInspectorView(APIView):
|
||||
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
||||
if settings.DEBUG:
|
||||
return
|
||||
if request.user.has_perm("authentik_flow.inspect_flow", self.flow):
|
||||
if request.user.has_perm(
|
||||
"authentik_flows.inspect_flow", self.flow
|
||||
) or request.user.has_perm("authentik_flows.inspect_flow"):
|
||||
return
|
||||
raise Http404
|
||||
|
||||
@ -94,6 +96,9 @@ class FlowInspectorView(APIView):
|
||||
"""Get current flow state and record it"""
|
||||
plans = []
|
||||
for plan in request.session.get(SESSION_KEY_HISTORY, []):
|
||||
plan: FlowPlan
|
||||
if plan.flow_pk != self.flow.pk.hex:
|
||||
continue
|
||||
plan_serializer = FlowInspectorPlanSerializer(
|
||||
instance=plan, context={"request": request}
|
||||
)
|
||||
|
@ -283,12 +283,15 @@ class ConfigLoader:
|
||||
def get_optional_int(self, path: str, default=None) -> int | None:
|
||||
"""Wrapper for get that converts value into int or None if set"""
|
||||
value = self.get(path, default)
|
||||
|
||||
if value is UNSET:
|
||||
return default
|
||||
try:
|
||||
return int(value)
|
||||
except (ValueError, TypeError) as exc:
|
||||
if value is None or (isinstance(value, str) and value.lower() == "null"):
|
||||
return None
|
||||
return default
|
||||
if value is UNSET:
|
||||
return default
|
||||
self.log("warning", "Failed to parse config as int", path=path, exc=str(exc))
|
||||
return default
|
||||
|
||||
@ -421,4 +424,4 @@ if __name__ == "__main__":
|
||||
if len(argv) < 2: # noqa: PLR2004
|
||||
print(dumps(CONFIG.raw, indent=4, cls=AttrEncoder))
|
||||
else:
|
||||
print(CONFIG.get(argv[1]))
|
||||
print(CONFIG.get(argv[-1]))
|
||||
|
26
authentik/lib/debug.py
Normal file
26
authentik/lib/debug.py
Normal file
@ -0,0 +1,26 @@
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def start_debug_server(**kwargs) -> bool:
|
||||
"""Attempt to start a debugpy server in the current process.
|
||||
Returns true if the server was started successfully, otherwise false"""
|
||||
if not CONFIG.get_bool("debug") and not CONFIG.get_bool("debugger"):
|
||||
return
|
||||
try:
|
||||
import debugpy
|
||||
except ImportError:
|
||||
LOGGER.warning(
|
||||
"Failed to import debugpy. debugpy is not included "
|
||||
"in the default release dependencies and must be installed manually"
|
||||
)
|
||||
return False
|
||||
|
||||
listen: str = CONFIG.get("listen.listen_debug_py", "127.0.0.1:9901")
|
||||
host, _, port = listen.rpartition(":")
|
||||
debugpy.listen((host, int(port)), **kwargs) # nosec
|
||||
LOGGER.debug("Starting debug server", host=host, port=port)
|
||||
return True
|
@ -8,6 +8,7 @@ postgresql:
|
||||
password: "env://POSTGRES_PASSWORD"
|
||||
test:
|
||||
name: test_authentik
|
||||
default_schema: public
|
||||
read_replicas: {}
|
||||
# For example
|
||||
# 0:
|
||||
@ -21,6 +22,7 @@ listen:
|
||||
listen_radius: 0.0.0.0:1812
|
||||
listen_metrics: 0.0.0.0:9300
|
||||
listen_debug: 0.0.0.0:9900
|
||||
listen_debug_py: 0.0.0.0:9901
|
||||
trusted_proxy_cidrs:
|
||||
- 127.0.0.0/8
|
||||
- 10.0.0.0/8
|
||||
@ -57,11 +59,13 @@ cache:
|
||||
# transport_options: ""
|
||||
|
||||
debug: false
|
||||
remote_debug: false
|
||||
debugger: false
|
||||
|
||||
log_level: info
|
||||
|
||||
session_storage: cache
|
||||
sessions:
|
||||
unauthenticated_age: days=1
|
||||
|
||||
error_reporting:
|
||||
enabled: false
|
||||
|
@ -22,9 +22,9 @@ class OutgoingSyncProvider(Model):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def client_for_model[
|
||||
T: User | Group
|
||||
](self, model: type[T]) -> BaseOutgoingSyncClient[T, Any, Any, Self]:
|
||||
def client_for_model[T: User | Group](
|
||||
self, model: type[T]
|
||||
) -> BaseOutgoingSyncClient[T, Any, Any, Self]:
|
||||
raise NotImplementedError
|
||||
|
||||
def get_object_qs[T: User | Group](self, type: type[T]) -> QuerySet[T]:
|
||||
|
54
authentik/lib/utils/email.py
Normal file
54
authentik/lib/utils/email.py
Normal file
@ -0,0 +1,54 @@
|
||||
"""Email utility functions"""
|
||||
|
||||
|
||||
def mask_email(email: str | None) -> str | None:
|
||||
"""Mask email address for privacy
|
||||
|
||||
Args:
|
||||
email: Email address to mask
|
||||
Returns:
|
||||
Masked email address or None if input is None
|
||||
Example:
|
||||
mask_email("myname@company.org")
|
||||
'm*****@c******.org'
|
||||
"""
|
||||
if not email:
|
||||
return None
|
||||
|
||||
# Basic email format validation
|
||||
if email.count("@") != 1:
|
||||
raise ValueError("Invalid email format: Must contain exactly one '@' symbol")
|
||||
|
||||
local, domain = email.split("@")
|
||||
if not local or not domain:
|
||||
raise ValueError("Invalid email format: Local and domain parts cannot be empty")
|
||||
|
||||
domain_parts = domain.split(".")
|
||||
if len(domain_parts) < 2: # noqa: PLR2004
|
||||
raise ValueError("Invalid email format: Domain must contain at least one dot")
|
||||
|
||||
limit = 2
|
||||
|
||||
# Mask local part (keep first char)
|
||||
if len(local) <= limit:
|
||||
masked_local = "*" * len(local)
|
||||
else:
|
||||
masked_local = local[0] + "*" * (len(local) - 1)
|
||||
|
||||
# Mask each domain part except the last one (TLD)
|
||||
masked_domain_parts = []
|
||||
for _i, part in enumerate(domain_parts[:-1]): # Process all parts except TLD
|
||||
if not part: # Check for empty parts (consecutive dots)
|
||||
raise ValueError("Invalid email format: Domain parts cannot be empty")
|
||||
if len(part) <= limit:
|
||||
masked_part = "*" * len(part)
|
||||
else:
|
||||
masked_part = part[0] + "*" * (len(part) - 1)
|
||||
masked_domain_parts.append(masked_part)
|
||||
|
||||
# Add TLD unchanged
|
||||
if not domain_parts[-1]: # Check if TLD is empty
|
||||
raise ValueError("Invalid email format: TLD cannot be empty")
|
||||
masked_domain_parts.append(domain_parts[-1])
|
||||
|
||||
return f"{masked_local}@{'.'.join(masked_domain_parts)}"
|
@ -42,6 +42,8 @@ class DebugSession(Session):
|
||||
|
||||
def get_http_session() -> Session:
|
||||
"""Get a requests session with common headers"""
|
||||
session = DebugSession() if CONFIG.get_bool("debug") else Session()
|
||||
session = Session()
|
||||
if CONFIG.get_bool("debug") or CONFIG.get("log_level") == "trace":
|
||||
session = DebugSession()
|
||||
session.headers["User-Agent"] = authentik_user_agent()
|
||||
return session
|
||||
|
@ -19,7 +19,6 @@ from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
|
||||
from authentik.core.models import Provider
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.enterprise.providers.rac.models import RACProvider
|
||||
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||
from authentik.outposts.api.service_connections import ServiceConnectionSerializer
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST, MANAGED_OUTPOST_NAME
|
||||
@ -31,6 +30,7 @@ from authentik.outposts.models import (
|
||||
)
|
||||
from authentik.providers.ldap.models import LDAPProvider
|
||||
from authentik.providers.proxy.models import ProxyProvider
|
||||
from authentik.providers.rac.models import RACProvider
|
||||
from authentik.providers.radius.models import RadiusProvider
|
||||
|
||||
|
||||
|
@ -18,8 +18,6 @@ from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
|
||||
from structlog.stdlib import get_logger
|
||||
from yaml import safe_load
|
||||
|
||||
from authentik.enterprise.providers.rac.controllers.docker import RACDockerController
|
||||
from authentik.enterprise.providers.rac.controllers.kubernetes import RACKubernetesController
|
||||
from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask, prefill_task
|
||||
from authentik.lib.config import CONFIG
|
||||
@ -41,6 +39,8 @@ from authentik.providers.ldap.controllers.docker import LDAPDockerController
|
||||
from authentik.providers.ldap.controllers.kubernetes import LDAPKubernetesController
|
||||
from authentik.providers.proxy.controllers.docker import ProxyDockerController
|
||||
from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
||||
from authentik.providers.rac.controllers.docker import RACDockerController
|
||||
from authentik.providers.rac.controllers.kubernetes import RACKubernetesController
|
||||
from authentik.providers.radius.controllers.docker import RadiusDockerController
|
||||
from authentik.providers.radius.controllers.kubernetes import RadiusKubernetesController
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
@ -42,6 +42,12 @@ class GeoIPPolicySerializer(CountryFieldMixin, PolicySerializer):
|
||||
"asns",
|
||||
"countries",
|
||||
"countries_obj",
|
||||
"check_history_distance",
|
||||
"history_max_distance_km",
|
||||
"distance_tolerance_km",
|
||||
"history_login_count",
|
||||
"check_impossible_travel",
|
||||
"impossible_tolerance_km",
|
||||
]
|
||||
|
||||
|
||||
|
@ -0,0 +1,43 @@
|
||||
# Generated by Django 5.0.10 on 2025-01-02 20:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_policies_geoip", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="check_history_distance",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="check_impossible_travel",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="distance_tolerance_km",
|
||||
field=models.PositiveIntegerField(default=50),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="history_login_count",
|
||||
field=models.PositiveIntegerField(default=5),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="history_max_distance_km",
|
||||
field=models.PositiveBigIntegerField(default=100),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="impossible_tolerance_km",
|
||||
field=models.PositiveIntegerField(default=100),
|
||||
),
|
||||
]
|
@ -4,15 +4,21 @@ from itertools import chain
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext as _
|
||||
from django_countries.fields import CountryField
|
||||
from geopy import distance
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
|
||||
from authentik.events.context_processors.geoip import GeoIPDict
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.policies.exceptions import PolicyException
|
||||
from authentik.policies.geoip.exceptions import GeoIPNotFoundException
|
||||
from authentik.policies.models import Policy
|
||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||
|
||||
MAX_DISTANCE_HOUR_KM = 1000
|
||||
|
||||
|
||||
class GeoIPPolicy(Policy):
|
||||
"""Ensure the user satisfies requirements of geography or network topology, based on IP
|
||||
@ -21,6 +27,15 @@ class GeoIPPolicy(Policy):
|
||||
asns = ArrayField(models.IntegerField(), blank=True, default=list)
|
||||
countries = CountryField(multiple=True, blank=True)
|
||||
|
||||
distance_tolerance_km = models.PositiveIntegerField(default=50)
|
||||
|
||||
check_history_distance = models.BooleanField(default=False)
|
||||
history_max_distance_km = models.PositiveBigIntegerField(default=100)
|
||||
history_login_count = models.PositiveIntegerField(default=5)
|
||||
|
||||
check_impossible_travel = models.BooleanField(default=False)
|
||||
impossible_tolerance_km = models.PositiveIntegerField(default=100)
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.policies.geoip.api import GeoIPPolicySerializer
|
||||
@ -37,21 +52,27 @@ class GeoIPPolicy(Policy):
|
||||
- the client IP is advertised by an autonomous system with ASN in the `asns`
|
||||
- the client IP is geolocated in a country of `countries`
|
||||
"""
|
||||
results: list[PolicyResult] = []
|
||||
static_results: list[PolicyResult] = []
|
||||
dynamic_results: list[PolicyResult] = []
|
||||
|
||||
if self.asns:
|
||||
results.append(self.passes_asn(request))
|
||||
static_results.append(self.passes_asn(request))
|
||||
if self.countries:
|
||||
results.append(self.passes_country(request))
|
||||
static_results.append(self.passes_country(request))
|
||||
|
||||
if not results:
|
||||
if self.check_history_distance or self.check_impossible_travel:
|
||||
dynamic_results.append(self.passes_distance(request))
|
||||
|
||||
if not static_results and not dynamic_results:
|
||||
return PolicyResult(True)
|
||||
|
||||
passing = any(r.passing for r in results)
|
||||
messages = chain(*[r.messages for r in results])
|
||||
passing = any(r.passing for r in static_results) and all(r.passing for r in dynamic_results)
|
||||
messages = chain(
|
||||
*[r.messages for r in static_results], *[r.messages for r in dynamic_results]
|
||||
)
|
||||
|
||||
result = PolicyResult(passing, *messages)
|
||||
result.source_results = results
|
||||
result.source_results = list(chain(static_results, dynamic_results))
|
||||
|
||||
return result
|
||||
|
||||
@ -73,7 +94,7 @@ class GeoIPPolicy(Policy):
|
||||
|
||||
def passes_country(self, request: PolicyRequest) -> PolicyResult:
|
||||
# This is not a single get chain because `request.context` can contain `{ "geoip": None }`.
|
||||
geoip_data = request.context.get("geoip")
|
||||
geoip_data: GeoIPDict | None = request.context.get("geoip")
|
||||
country = geoip_data.get("country") if geoip_data else None
|
||||
|
||||
if not country:
|
||||
@ -87,6 +108,42 @@ class GeoIPPolicy(Policy):
|
||||
|
||||
return PolicyResult(True)
|
||||
|
||||
def passes_distance(self, request: PolicyRequest) -> PolicyResult:
|
||||
"""Check if current policy execution is out of distance range compared
|
||||
to previous authentication requests"""
|
||||
# Get previous login event and GeoIP data
|
||||
previous_logins = Event.objects.filter(
|
||||
action=EventAction.LOGIN, user__pk=request.user.pk, context__geo__isnull=False
|
||||
).order_by("-created")[: self.history_login_count]
|
||||
_now = now()
|
||||
geoip_data: GeoIPDict | None = request.context.get("geoip")
|
||||
if not geoip_data:
|
||||
return PolicyResult(False)
|
||||
for previous_login in previous_logins:
|
||||
previous_login_geoip: GeoIPDict = previous_login.context["geo"]
|
||||
|
||||
# Figure out distance
|
||||
dist = distance.geodesic(
|
||||
(previous_login_geoip["lat"], previous_login_geoip["long"]),
|
||||
(geoip_data["lat"], geoip_data["long"]),
|
||||
)
|
||||
if self.check_history_distance and dist.km >= (
|
||||
self.history_max_distance_km + self.distance_tolerance_km
|
||||
):
|
||||
return PolicyResult(
|
||||
False, _("Distance from previous authentication is larger than threshold.")
|
||||
)
|
||||
# Check if distance between `previous_login` and now is more
|
||||
# than max distance per hour times the amount of hours since the previous login
|
||||
# (round down to the lowest closest time of hours)
|
||||
# clamped to be at least 1 hour
|
||||
rel_time_hours = max(int((_now - previous_login.created).total_seconds() / 3600), 1)
|
||||
if self.check_impossible_travel and dist.km >= (
|
||||
(MAX_DISTANCE_HOUR_KM * rel_time_hours) + self.distance_tolerance_km
|
||||
):
|
||||
return PolicyResult(False, _("Distance is further than possible."))
|
||||
return PolicyResult(True)
|
||||
|
||||
class Meta(Policy.PolicyMeta):
|
||||
verbose_name = _("GeoIP Policy")
|
||||
verbose_name_plural = _("GeoIP Policies")
|
||||
|
@ -1,8 +1,10 @@
|
||||
"""geoip policy tests"""
|
||||
|
||||
from django.test import TestCase
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
|
||||
from authentik.core.tests.utils import create_test_user
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.utils import get_user
|
||||
from authentik.policies.engine import PolicyRequest, PolicyResult
|
||||
from authentik.policies.exceptions import PolicyException
|
||||
from authentik.policies.geoip.exceptions import GeoIPNotFoundException
|
||||
@ -14,8 +16,8 @@ class TestGeoIPPolicy(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
self.request = PolicyRequest(get_anonymous_user())
|
||||
self.user = create_test_user()
|
||||
self.request = PolicyRequest(self.user)
|
||||
|
||||
self.context_disabled_geoip = {}
|
||||
self.context_unknown_ip = {"asn": None, "geoip": None}
|
||||
@ -126,3 +128,70 @@ class TestGeoIPPolicy(TestCase):
|
||||
result: PolicyResult = policy.passes(self.request)
|
||||
|
||||
self.assertTrue(result.passing)
|
||||
|
||||
def test_history(self):
|
||||
"""Test history checks"""
|
||||
Event.objects.create(
|
||||
action=EventAction.LOGIN,
|
||||
user=get_user(self.user),
|
||||
context={
|
||||
# Random location in Canada
|
||||
"geo": {"lat": 55.868351, "long": -104.441011},
|
||||
},
|
||||
)
|
||||
# Random location in Poland
|
||||
self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679}
|
||||
|
||||
policy = GeoIPPolicy.objects.create(check_history_distance=True)
|
||||
|
||||
result: PolicyResult = policy.passes(self.request)
|
||||
self.assertFalse(result.passing)
|
||||
|
||||
def test_history_no_data(self):
|
||||
"""Test history checks (with no geoip data in context)"""
|
||||
Event.objects.create(
|
||||
action=EventAction.LOGIN,
|
||||
user=get_user(self.user),
|
||||
context={
|
||||
# Random location in Canada
|
||||
"geo": {"lat": 55.868351, "long": -104.441011},
|
||||
},
|
||||
)
|
||||
|
||||
policy = GeoIPPolicy.objects.create(check_history_distance=True)
|
||||
|
||||
result: PolicyResult = policy.passes(self.request)
|
||||
self.assertFalse(result.passing)
|
||||
|
||||
def test_history_impossible_travel(self):
|
||||
"""Test history checks"""
|
||||
Event.objects.create(
|
||||
action=EventAction.LOGIN,
|
||||
user=get_user(self.user),
|
||||
context={
|
||||
# Random location in Canada
|
||||
"geo": {"lat": 55.868351, "long": -104.441011},
|
||||
},
|
||||
)
|
||||
# Random location in Poland
|
||||
self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679}
|
||||
|
||||
policy = GeoIPPolicy.objects.create(check_impossible_travel=True)
|
||||
|
||||
result: PolicyResult = policy.passes(self.request)
|
||||
self.assertFalse(result.passing)
|
||||
|
||||
def test_history_no_geoip(self):
|
||||
"""Test history checks (previous login with no geoip data)"""
|
||||
Event.objects.create(
|
||||
action=EventAction.LOGIN,
|
||||
user=get_user(self.user),
|
||||
context={},
|
||||
)
|
||||
# Random location in Poland
|
||||
self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679}
|
||||
|
||||
policy = GeoIPPolicy.objects.create(check_history_distance=True)
|
||||
|
||||
result: PolicyResult = policy.passes(self.request)
|
||||
self.assertFalse(result.passing)
|
||||
|
@ -148,10 +148,10 @@ class PasswordPolicy(Policy):
|
||||
user_inputs.append(request.user.email)
|
||||
if request.http_request:
|
||||
user_inputs.append(request.http_request.brand.branding_title)
|
||||
# Only calculate result for the first 100 characters, as with over 100 char
|
||||
# Only calculate result for the first 72 characters, as with over 100 char
|
||||
# long passwords we can be reasonably sure that they'll surpass the score anyways
|
||||
# See https://github.com/dropbox/zxcvbn#runtime-latency
|
||||
results = zxcvbn(password[:100], user_inputs)
|
||||
results = zxcvbn(password[:72], user_inputs)
|
||||
LOGGER.debug("password failed", check="zxcvbn", score=results["score"])
|
||||
result = PolicyResult(results["score"] > self.zxcvbn_score_threshold)
|
||||
if not result.passing:
|
||||
|
@ -0,0 +1,30 @@
|
||||
# Generated by Django 5.0.10 on 2025-01-13 18:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
(
|
||||
"authentik_policies_reputation",
|
||||
"0007_reputation_authentik_p_identif_9434d7_idx_and_more",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="reputation",
|
||||
index=models.Index(fields=["expires"], name="authentik_p_expires_da493f_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="reputation",
|
||||
index=models.Index(fields=["expiring"], name="authentik_p_expirin_2ab34f_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="reputation",
|
||||
index=models.Index(
|
||||
fields=["expiring", "expires"], name="authentik_p_expirin_2a8ec7_idx"
|
||||
),
|
||||
),
|
||||
]
|
@ -96,7 +96,7 @@ class Reputation(ExpiringModel, SerializerModel):
|
||||
verbose_name = _("Reputation Score")
|
||||
verbose_name_plural = _("Reputation Scores")
|
||||
unique_together = ("identifier", "ip")
|
||||
indexes = [
|
||||
indexes = ExpiringModel.Meta.indexes + [
|
||||
models.Index(fields=["identifier"]),
|
||||
models.Index(fields=["ip"]),
|
||||
models.Index(fields=["ip", "identifier"]),
|
||||
|
@ -0,0 +1,72 @@
|
||||
# Generated by Django 5.0.10 on 2025-01-13 18:05
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
|
||||
("authentik_providers_oauth2", "0026_alter_accesstoken_session_and_more"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="accesstoken",
|
||||
index=models.Index(fields=["expires"], name="authentik_p_expires_9f24a5_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="accesstoken",
|
||||
index=models.Index(fields=["expiring"], name="authentik_p_expirin_2d9205_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="accesstoken",
|
||||
index=models.Index(
|
||||
fields=["expiring", "expires"], name="authentik_p_expirin_c74005_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="authorizationcode",
|
||||
index=models.Index(fields=["expires"], name="authentik_p_expires_f594b2_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="authorizationcode",
|
||||
index=models.Index(fields=["expiring"], name="authentik_p_expirin_6a5e2c_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="authorizationcode",
|
||||
index=models.Index(
|
||||
fields=["expiring", "expires"], name="authentik_p_expirin_c0f353_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="devicetoken",
|
||||
index=models.Index(fields=["expires"], name="authentik_p_expires_961437_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="devicetoken",
|
||||
index=models.Index(fields=["expiring"], name="authentik_p_expirin_4fd278_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="devicetoken",
|
||||
index=models.Index(
|
||||
fields=["expiring", "expires"], name="authentik_p_expirin_cd6b1c_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="refreshtoken",
|
||||
index=models.Index(fields=["expires"], name="authentik_p_expires_c479a7_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="refreshtoken",
|
||||
index=models.Index(fields=["expiring"], name="authentik_p_expirin_d4d17f_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="refreshtoken",
|
||||
index=models.Index(
|
||||
fields=["expiring", "expires"], name="authentik_p_expirin_acb4a5_idx"
|
||||
),
|
||||
),
|
||||
]
|
@ -281,7 +281,6 @@ class OAuth2Provider(WebfingerProvider, Provider):
|
||||
},
|
||||
)
|
||||
return request.build_absolute_uri(url)
|
||||
|
||||
except Provider.application.RelatedObjectDoesNotExist:
|
||||
return None
|
||||
|
||||
@ -425,6 +424,7 @@ class AuthorizationCode(SerializerModel, ExpiringModel, BaseGrantModel):
|
||||
class Meta:
|
||||
verbose_name = _("Authorization Code")
|
||||
verbose_name_plural = _("Authorization Codes")
|
||||
indexes = ExpiringModel.Meta.indexes
|
||||
|
||||
def __str__(self):
|
||||
return f"Authorization code for {self.provider_id} for user {self.user_id}"
|
||||
@ -453,7 +453,7 @@ class AccessToken(SerializerModel, ExpiringModel, BaseGrantModel):
|
||||
_id_token = models.TextField()
|
||||
|
||||
class Meta:
|
||||
indexes = [
|
||||
indexes = ExpiringModel.Meta.indexes + [
|
||||
HashIndex(fields=["token"]),
|
||||
]
|
||||
verbose_name = _("OAuth2 Access Token")
|
||||
@ -504,7 +504,7 @@ class RefreshToken(SerializerModel, ExpiringModel, BaseGrantModel):
|
||||
)
|
||||
|
||||
class Meta:
|
||||
indexes = [
|
||||
indexes = ExpiringModel.Meta.indexes + [
|
||||
HashIndex(fields=["token"]),
|
||||
]
|
||||
verbose_name = _("OAuth2 Refresh Token")
|
||||
@ -556,6 +556,7 @@ class DeviceToken(ExpiringModel):
|
||||
class Meta:
|
||||
verbose_name = _("Device Token")
|
||||
verbose_name_plural = _("Device Tokens")
|
||||
indexes = ExpiringModel.Meta.indexes
|
||||
|
||||
def __str__(self):
|
||||
return f"Device Token for {self.provider_id}"
|
||||
|
@ -1,9 +1,10 @@
|
||||
from django.contrib.auth.signals import user_logged_out
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
from django.http import HttpRequest
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.providers.oauth2.models import AccessToken
|
||||
from authentik.providers.oauth2.models import AccessToken, DeviceToken, RefreshToken
|
||||
|
||||
|
||||
@receiver(user_logged_out)
|
||||
@ -12,3 +13,13 @@ def user_logged_out_oauth_access_token(sender, request: HttpRequest, user: User,
|
||||
if not request.session or not request.session.session_key:
|
||||
return
|
||||
AccessToken.objects.filter(user=user, session__session_key=request.session.session_key).delete()
|
||||
|
||||
|
||||
@receiver(post_save, sender=User)
|
||||
def user_deactivated(sender, instance: User, **_):
|
||||
"""Remove user tokens when deactivated"""
|
||||
if instance.is_active:
|
||||
return
|
||||
AccessToken.objects.filter(session__user=instance).delete()
|
||||
RefreshToken.objects.filter(session__user=instance).delete()
|
||||
DeviceToken.objects.filter(session__user=instance).delete()
|
||||
|
@ -49,7 +49,9 @@ class TesOAuth2DeviceInit(OAuthTestCase):
|
||||
kwargs={
|
||||
"flow_slug": self.device_flow.slug,
|
||||
},
|
||||
),
|
||||
)
|
||||
+ "?"
|
||||
+ urlencode({"inspector": "available"}),
|
||||
)
|
||||
|
||||
def test_device_init_post(self):
|
||||
@ -63,7 +65,9 @@ class TesOAuth2DeviceInit(OAuthTestCase):
|
||||
kwargs={
|
||||
"flow_slug": self.device_flow.slug,
|
||||
},
|
||||
),
|
||||
)
|
||||
+ "?"
|
||||
+ urlencode({"inspector": "available"}),
|
||||
)
|
||||
res = self.api_client.get(
|
||||
reverse(
|
||||
@ -118,7 +122,9 @@ class TesOAuth2DeviceInit(OAuthTestCase):
|
||||
kwargs={
|
||||
"flow_slug": provider.authorization_flow.slug,
|
||||
},
|
||||
),
|
||||
)
|
||||
+ "?"
|
||||
+ urlencode({"inspector": "available"}),
|
||||
},
|
||||
)
|
||||
|
||||
@ -150,7 +156,7 @@ class TesOAuth2DeviceInit(OAuthTestCase):
|
||||
},
|
||||
)
|
||||
+ "?"
|
||||
+ urlencode({QS_KEY_CODE: token.user_code}),
|
||||
+ urlencode({QS_KEY_CODE: token.user_code, "inspector": "available"}),
|
||||
)
|
||||
|
||||
def test_device_init_denied(self):
|
||||
|
@ -12,6 +12,7 @@ from authentik.core.tests.utils import create_test_admin_user, create_test_cert,
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.models import (
|
||||
AccessToken,
|
||||
ClientTypes,
|
||||
IDToken,
|
||||
OAuth2Provider,
|
||||
RedirectURI,
|
||||
@ -108,3 +109,29 @@ class TesOAuth2Revoke(OAuthTestCase):
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 401)
|
||||
|
||||
def test_revoke_public(self):
|
||||
"""Test revoke public client"""
|
||||
self.provider.client_type = ClientTypes.PUBLIC
|
||||
self.provider.save()
|
||||
token: AccessToken = AccessToken.objects.create(
|
||||
provider=self.provider,
|
||||
user=self.user,
|
||||
token=generate_id(),
|
||||
auth_time=timezone.now(),
|
||||
_scope="openid user profile",
|
||||
_id_token=json.dumps(
|
||||
asdict(
|
||||
IDToken("foo", "bar"),
|
||||
)
|
||||
),
|
||||
)
|
||||
auth_public = b64encode(f"{self.provider.client_id}:{generate_id()}".encode()).decode()
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:token-revoke"),
|
||||
HTTP_AUTHORIZATION=f"Basic {auth_public}",
|
||||
data={
|
||||
"token": token.token,
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
|
@ -150,6 +150,7 @@ class TestToken(OAuthTestCase):
|
||||
"id_token": provider.encode(
|
||||
access.id_token.to_dict(),
|
||||
),
|
||||
"scope": "",
|
||||
},
|
||||
)
|
||||
self.validate_jwt(access, provider)
|
||||
@ -242,6 +243,7 @@ class TestToken(OAuthTestCase):
|
||||
"id_token": provider.encode(
|
||||
access.id_token.to_dict(),
|
||||
),
|
||||
"scope": "offline_access",
|
||||
},
|
||||
)
|
||||
self.validate_jwt(access, provider)
|
||||
@ -301,6 +303,7 @@ class TestToken(OAuthTestCase):
|
||||
"id_token": provider.encode(
|
||||
access.id_token.to_dict(),
|
||||
),
|
||||
"scope": "offline_access",
|
||||
},
|
||||
)
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user