Compare commits
173 Commits
root/move-
...
policies/p
Author | SHA1 | Date | |
---|---|---|---|
b3883f7fbf | |||
87c6b0128a | |||
b243c97916 | |||
3f66527521 | |||
2f7c258657 | |||
917c90374f | |||
e9c944c0d5 | |||
b865e97973 | |||
24a364bd6b | |||
65579c0a2b | |||
de20897321 | |||
39f7bc8e9b | |||
4ade549ce2 | |||
a4d87ef011 | |||
b851c3daaf | |||
198af84b3b | |||
69ced3ae02 | |||
4a2f58561b | |||
8becaf3418 | |||
bcfbc46839 | |||
af287ee7b0 | |||
ebf3d12874 | |||
7fbdd0452e | |||
18298a856f | |||
ef6836207a | |||
5ad176adf2 | |||
011afc8b2f | |||
4c32c1503b | |||
774a8e6eeb | |||
297d7f100a | |||
0d3692a619 | |||
ba20748b07 | |||
3fc296ad0b | |||
0aba428787 | |||
4a88e29de6 | |||
0d6fced7d8 | |||
29c6c1e33b | |||
e2e8b7c114 | |||
bf2e854f12 | |||
3fbc059f2d | |||
e051e8ebd8 | |||
880a99efe5 | |||
27d5063d16 | |||
e130bca344 | |||
325d590679 | |||
f40a4b5076 | |||
89a19f6e4c | |||
9bc51c683e | |||
3d2bd4d8dd | |||
46a968d1dd | |||
49cc70eb96 | |||
143b02b51a | |||
5904fae80b | |||
6f9479a085 | |||
ce10dbfa4e | |||
394881dcd3 | |||
a6e322507c | |||
755e2f1507 | |||
d41c9eb442 | |||
dea48e6ac7 | |||
1614f3174f | |||
d18950f7bb | |||
4fe533a92f | |||
82d4e8aa4e | |||
98129d3e9a | |||
98f3b9ae97 | |||
bd69dbc0e1 | |||
ac4d6ae9f6 | |||
cdc0d0a857 | |||
3656c38aa0 | |||
fe4e364492 | |||
ce86cbe2a0 | |||
8f0e9ff534 | |||
ff60607851 | |||
b6cf27b421 | |||
9457c80d62 | |||
409035b692 | |||
7798d16e01 | |||
8f16a182aa | |||
50c68df0a1 | |||
556248c7c9 | |||
ed2e2380cc | |||
1f79b5acb7 | |||
6185e7cdc7 | |||
aedce2a6a1 | |||
fefa189ff4 | |||
b5bdad6804 | |||
1d03f92dee | |||
01b20153ca | |||
83a2728500 | |||
c57f17bff8 | |||
5533f7dd7a | |||
daebeb1192 | |||
26a08fcaac | |||
330fc8cee3 | |||
205c01038f | |||
23eb93c981 | |||
5679352c15 | |||
fb7d637da1 | |||
cee48909e9 | |||
6549b303d5 | |||
e2d6d3860c | |||
91155f9ce3 | |||
bdcd1059dd | |||
e4b6df3f27 | |||
7a6d7919c8 | |||
fda9b137a7 | |||
7686d12f1b | |||
34ee29227a | |||
334e2c466f | |||
7c944b954c | |||
427a8c91c8 | |||
22d6dd3098 | |||
36c81a30ad | |||
f7dc7faea5 | |||
62720e6c51 | |||
64dfe7e3c2 | |||
c803b4da51 | |||
3568cd601f | |||
8cad66536c | |||
220e79e668 | |||
316f43e6eb | |||
b7053dfffd | |||
fccdaaf210 | |||
cf530c6f31 | |||
94d84ae1dc | |||
de1bb03619 | |||
e41d86bd2a | |||
a10e6b7fd7 | |||
92d6d74c2d | |||
773c57b8d7 | |||
692a6be07f | |||
645323cd02 | |||
06d57a7574 | |||
102c7e4c5c | |||
7e7ed83dfe | |||
141ced8317 | |||
5109af0ab4 | |||
1a1912e391 | |||
6702652824 | |||
b04ff5bbee | |||
3daa39080a | |||
d3d6040e23 | |||
e08ccf4ca0 | |||
0e346c6e7c | |||
62187e60d4 | |||
467b1fcd14 | |||
9e2fccb045 | |||
39d8b41357 | |||
0a0f8433c6 | |||
3b61e08d3d | |||
921e1923b0 | |||
a666c20c40 | |||
1ed96fd5a5 | |||
f245dada2c | |||
7d8094d9c4 | |||
d63cba0a9d | |||
fdc3de8646 | |||
7163d333dc | |||
02bdf093e0 | |||
1ce3dfd17f | |||
ce7e539f59 | |||
12e6282316 | |||
3253de73ec | |||
afe8ab7850 | |||
f2e3199050 | |||
04148e08a7 | |||
656b296d6e | |||
f76014710c | |||
04517d46b0 | |||
365e9c9ca3 | |||
5b01f44333 | |||
388b29ef87 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2024.12.2
|
||||
current_version = 2024.12.3
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||
|
@ -9,6 +9,9 @@ inputs:
|
||||
image-arch:
|
||||
required: false
|
||||
description: "Docker image arch"
|
||||
release:
|
||||
required: true
|
||||
description: "True if this is a release build, false if this is a dev/PR build"
|
||||
|
||||
outputs:
|
||||
shouldPush:
|
||||
@ -44,6 +47,9 @@ outputs:
|
||||
imageMainName:
|
||||
description: "Docker image main name"
|
||||
value: ${{ steps.ev.outputs.imageMainName }}
|
||||
imageBuildArgs:
|
||||
description: "Docker image build args"
|
||||
value: ${{ steps.ev.outputs.imageBuildArgs }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
@ -54,6 +60,8 @@ runs:
|
||||
env:
|
||||
IMAGE_NAME: ${{ inputs.image-name }}
|
||||
IMAGE_ARCH: ${{ inputs.image-arch }}
|
||||
RELEASE: ${{ inputs.release }}
|
||||
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
REF: ${{ github.ref }}
|
||||
run: |
|
||||
python3 ${{ github.action_path }}/push_vars.py
|
||||
|
@ -80,6 +80,13 @@ if should_push:
|
||||
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
|
||||
|
||||
|
||||
image_build_args = []
|
||||
if os.getenv("RELEASE", "false").lower() == "true":
|
||||
image_build_args = [f"VERSION={os.getenv('REF')}"]
|
||||
else:
|
||||
image_build_args = [f"GIT_BUILD_HASH={sha}"]
|
||||
image_build_args = "\n".join(image_build_args)
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print(f"shouldPush={str(should_push).lower()}", file=_output)
|
||||
print(f"sha={sha}", file=_output)
|
||||
@ -91,3 +98,4 @@ with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print(f"imageMainTag={image_main_tag}", file=_output)
|
||||
print(f"imageMainName={image_tags[0]}", file=_output)
|
||||
print(f"cacheTo={cache_to}", file=_output)
|
||||
print(f"imageBuildArgs={image_build_args}", file=_output)
|
||||
|
@ -40,7 +40,7 @@ jobs:
|
||||
attestations: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-qemu-action@v3.3.0
|
||||
- uses: docker/setup-qemu-action@v3.4.0
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
@ -50,6 +50,7 @@ jobs:
|
||||
with:
|
||||
image-name: ${{ inputs.image_name }}
|
||||
image-arch: ${{ inputs.image_arch }}
|
||||
release: ${{ inputs.release }}
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ inputs.registry_dockerhub }}
|
||||
uses: docker/login-action@v3
|
||||
@ -76,18 +77,19 @@ jobs:
|
||||
id: push
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
build-args: |
|
||||
VERSION=${{ github.ref }}
|
||||
${{ steps.ev.outputs.imageBuildArgs }}
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
platforms: linux/${{ inputs.image_arch }}
|
||||
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
|
||||
cache-to: ${{ steps.ev.outputs.cacheTo }}
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
@ -46,6 +46,7 @@ jobs:
|
||||
- build-server-arm64
|
||||
outputs:
|
||||
tags: ${{ steps.ev.outputs.imageTagsJSON }}
|
||||
shouldPush: ${{ steps.ev.outputs.shouldPush }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: prepare variables
|
||||
@ -57,6 +58,7 @@ jobs:
|
||||
image-name: ${{ inputs.image_name }}
|
||||
merge-server:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.get-tags.outputs.shouldPush == 'true' }}
|
||||
needs:
|
||||
- get-tags
|
||||
- build-server-amd64
|
||||
|
28
.github/workflows/ci-main-daily.yml
vendored
Normal file
28
.github/workflows/ci-main-daily.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
---
|
||||
name: authentik-ci-main-daily
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Every night at 3am
|
||||
- cron: "0 3 * * *"
|
||||
|
||||
jobs:
|
||||
test-container:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- docs
|
||||
- version-2024-12
|
||||
- version-2024-10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
current="$(pwd)"
|
||||
dir="/tmp/authentik/${{ matrix.version }}"
|
||||
mkdir -p $dir
|
||||
cd $dir
|
||||
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
|
||||
${current}/scripts/test_docker.sh
|
33
.github/workflows/ci-main.yml
vendored
33
.github/workflows/ci-main.yml
vendored
@ -43,15 +43,26 @@ jobs:
|
||||
uses: ./.github/actions/setup
|
||||
- name: run migrations
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-migrations-from-stable:
|
||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
|
||||
test-make-seed:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: seed
|
||||
run: |
|
||||
echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
seed: ${{ steps.seed.outputs.seed }}
|
||||
test-migrations-from-stable:
|
||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
needs: test-make-seed
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@ -93,18 +104,23 @@ jobs:
|
||||
env:
|
||||
# Test in the main database that we just migrated from the previous stable version
|
||||
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
|
||||
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||
CI_RUN_ID: ${{ matrix.run_id }}
|
||||
CI_TOTAL_RUNS: "5"
|
||||
run: |
|
||||
poetry run make test
|
||||
poetry run make ci-test
|
||||
test-unittest:
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 20
|
||||
needs: test-make-seed
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup authentik env
|
||||
@ -112,9 +128,12 @@ jobs:
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: run unittest
|
||||
env:
|
||||
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||
CI_RUN_ID: ${{ matrix.run_id }}
|
||||
CI_TOTAL_RUNS: "5"
|
||||
run: |
|
||||
poetry run make test
|
||||
poetry run coverage xml
|
||||
poetry run make ci-test
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
|
2
.github/workflows/ci-outpost.yml
vendored
2
.github/workflows/ci-outpost.yml
vendored
@ -82,7 +82,7 @@ jobs:
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.3.0
|
||||
uses: docker/setup-qemu-action@v3.4.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
|
10
.github/workflows/release-publish.yml
vendored
10
.github/workflows/release-publish.yml
vendored
@ -9,9 +9,17 @@ jobs:
|
||||
build-server:
|
||||
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
# Needed to upload container images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
attestations: write
|
||||
with:
|
||||
image_name: ghcr.io/goauthentik/server,beryju/authentik
|
||||
release: true
|
||||
registry_dockerhub: true
|
||||
registry_ghcr: true
|
||||
build-outpost:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
@ -34,7 +42,7 @@ jobs:
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.3.0
|
||||
uses: docker/setup-qemu-action@v3.4.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
|
11
.github/workflows/release-tag.yml
vendored
11
.github/workflows/release-tag.yml
vendored
@ -14,16 +14,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Pre-release test
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
docker buildx install
|
||||
mkdir -p ./gen-ts-api
|
||||
docker build -t testing:latest .
|
||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
||||
echo "AUTHENTIK_TAG=latest" >> .env
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql redis
|
||||
docker compose run -u root server test-all
|
||||
make test-docker
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
|
6
.github/workflows/repo-stale.yml
vendored
6
.github/workflows/repo-stale.yml
vendored
@ -1,8 +1,8 @@
|
||||
name: 'authentik-repo-stale'
|
||||
name: "authentik-repo-stale"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '30 1 * * *'
|
||||
- cron: "30 1 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
@ -25,7 +25,7 @@ jobs:
|
||||
days-before-stale: 60
|
||||
days-before-close: 7
|
||||
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
|
||||
stale-issue-label: wontfix
|
||||
stale-issue-label: status/stale
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -209,3 +209,6 @@ source_docs/
|
||||
|
||||
### Golang ###
|
||||
/vendor/
|
||||
|
||||
### Docker ###
|
||||
docker-compose.override.yml
|
||||
|
7
.vscode/extensions.json
vendored
7
.vscode/extensions.json
vendored
@ -2,6 +2,7 @@
|
||||
"recommendations": [
|
||||
"bashmish.es6-string-css",
|
||||
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
||||
"charliermarsh.ruff",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"esbenp.prettier-vscode",
|
||||
@ -10,12 +11,12 @@
|
||||
"Gruntfuggly.todo-tree",
|
||||
"mechatroner.rainbow-csv",
|
||||
"ms-python.black-formatter",
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.debugpy",
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"ms-python.black-formatter",
|
||||
"redhat.vscode-yaml",
|
||||
"Tobermory.es6-string-html",
|
||||
"unifiedjs.vscode-mdx"
|
||||
"unifiedjs.vscode-mdx",
|
||||
]
|
||||
}
|
||||
|
66
.vscode/launch.json
vendored
66
.vscode/launch.json
vendored
@ -2,26 +2,76 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: PDB attach Server",
|
||||
"type": "python",
|
||||
"name": "Debug: Attach Server Core",
|
||||
"type": "debugpy",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 6800
|
||||
"port": 9901
|
||||
},
|
||||
"justMyCode": true,
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "."
|
||||
}
|
||||
],
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Python: PDB attach Worker",
|
||||
"type": "python",
|
||||
"name": "Debug: Attach Worker",
|
||||
"type": "debugpy",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 6900
|
||||
"port": 9901
|
||||
},
|
||||
"justMyCode": true,
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "."
|
||||
}
|
||||
],
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Server Router",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/server",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start LDAP Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/ldap",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Proxy Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/proxy",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start RAC Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/rac",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Radius Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/radius",
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
11
Dockerfile
11
Dockerfile
@ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 5: Python dependencies
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS python-deps
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
@ -132,13 +132,14 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
||||
. "$HOME/.cargo/env" && \
|
||||
python -m venv /ak-root/venv/ && \
|
||||
bash -c "source ${VENV_PATH}/bin/activate && \
|
||||
pip3 install --upgrade pip && \
|
||||
pip3 install poetry && \
|
||||
pip3 install --upgrade pip poetry && \
|
||||
poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \
|
||||
poetry install --only=main --no-ansi --no-interaction --no-root && \
|
||||
pip uninstall cryptography -y && \
|
||||
poetry install --only=main --no-ansi --no-interaction --no-root"
|
||||
|
||||
# Stage 6: Run
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS final-image
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image
|
||||
|
||||
ARG VERSION
|
||||
ARG GIT_BUILD_HASH
|
||||
@ -154,10 +155,12 @@ WORKDIR /
|
||||
|
||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||
RUN apt-get update && \
|
||||
apt-get upgrade -y && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \
|
||||
# Required for bootstrap & healtcheck
|
||||
apt-get install -y --no-install-recommends runit && \
|
||||
pip3 install --no-cache-dir --upgrade pip && \
|
||||
apt-get clean && \
|
||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||
|
32
Makefile
32
Makefile
@ -6,6 +6,8 @@ UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
PY_SOURCES = authentik tests scripts lifecycle .github
|
||||
GO_SOURCES = cmd internal
|
||||
WEB_SOURCES = web/src web/packages
|
||||
DOCKER_IMAGE ?= "authentik:test"
|
||||
|
||||
GEN_API_TS = "gen-ts-api"
|
||||
@ -19,11 +21,12 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
-S 'web/src/locales/**' \
|
||||
-S 'website/docs/developer-docs/api/reference/**' \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
-S 'website/developer-docs/api/reference/**' \
|
||||
-S '**/node_modules/**' \
|
||||
-S '**/dist/**' \
|
||||
$(PY_SOURCES) \
|
||||
$(GO_SOURCES) \
|
||||
$(WEB_SOURCES) \
|
||||
website/src \
|
||||
website/blog \
|
||||
website/docs \
|
||||
@ -45,15 +48,6 @@ help: ## Show this help
|
||||
go-test:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
test-docker: ## Run all tests in a docker-compose
|
||||
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
||||
docker compose pull -q
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql redis
|
||||
docker compose run -u root server test-all
|
||||
rm -f .env
|
||||
|
||||
test: ## Run the server tests and produce a coverage report (locally)
|
||||
coverage run manage.py test --keepdb authentik
|
||||
coverage html
|
||||
@ -152,7 +146,7 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/${GEN_API_TS} \
|
||||
@ -263,6 +257,9 @@ docker: ## Build a docker image of the current source tree
|
||||
mkdir -p ${GEN_API_TS}
|
||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||
|
||||
test-docker:
|
||||
BUILD=true ./scripts/test_docker.sh
|
||||
|
||||
#########################
|
||||
## CI
|
||||
#########################
|
||||
@ -287,3 +284,8 @@ ci-bandit: ci--meta-debug
|
||||
|
||||
ci-pending-migrations: ci--meta-debug
|
||||
ak makemigrations --check
|
||||
|
||||
ci-test: ci--meta-debug
|
||||
coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
|
||||
coverage report
|
||||
coverage xml
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from os import environ
|
||||
|
||||
__version__ = "2024.12.2"
|
||||
__version__ = "2024.12.3"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
@ -51,6 +51,7 @@ from authentik.enterprise.providers.microsoft_entra.models import (
|
||||
MicrosoftEntraProviderUser,
|
||||
)
|
||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||
from authentik.enterprise.providers.ssf.models import StreamEvent
|
||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
|
||||
EndpointDevice,
|
||||
EndpointDeviceConnection,
|
||||
@ -131,6 +132,7 @@ def excluded_models() -> list[type[Model]]:
|
||||
EndpointDevice,
|
||||
EndpointDeviceConnection,
|
||||
DeviceToken,
|
||||
StreamEvent,
|
||||
)
|
||||
|
||||
|
||||
|
@ -3,6 +3,7 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.fields import (
|
||||
BooleanField,
|
||||
CharField,
|
||||
@ -16,7 +17,6 @@ from rest_framework.viewsets import ViewSet
|
||||
|
||||
from authentik.core.api.utils import MetaNameSerializer
|
||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
|
||||
from authentik.rbac.decorators import permission_required
|
||||
from authentik.stages.authenticator import device_classes, devices_for_user
|
||||
from authentik.stages.authenticator.models import Device
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||
@ -73,7 +73,9 @@ class AdminDeviceViewSet(ViewSet):
|
||||
def get_devices(self, **kwargs):
|
||||
"""Get all devices in all child classes"""
|
||||
for model in device_classes():
|
||||
device_set = model.objects.filter(**kwargs)
|
||||
device_set = get_objects_for_user(
|
||||
self.request.user, f"{model._meta.app_label}.view_{model._meta.model_name}", model
|
||||
).filter(**kwargs)
|
||||
yield from device_set
|
||||
|
||||
@extend_schema(
|
||||
@ -86,10 +88,6 @@ class AdminDeviceViewSet(ViewSet):
|
||||
],
|
||||
responses={200: DeviceSerializer(many=True)},
|
||||
)
|
||||
@permission_required(
|
||||
None,
|
||||
[f"{model._meta.app_label}.view_{model._meta.model_name}" for model in device_classes()],
|
||||
)
|
||||
def list(self, request: Request) -> Response:
|
||||
"""Get all devices for current user"""
|
||||
kwargs = {}
|
||||
|
@ -85,7 +85,7 @@ class SourceViewSet(
|
||||
serializer_class = SourceSerializer
|
||||
lookup_field = "slug"
|
||||
search_fields = ["slug", "name"]
|
||||
filterset_fields = ["slug", "name", "managed"]
|
||||
filterset_fields = ["slug", "name", "managed", "pbm_uuid"]
|
||||
|
||||
def get_queryset(self): # pragma: no cover
|
||||
return Source.objects.select_subclasses()
|
||||
|
@ -236,9 +236,11 @@ class UserSerializer(ModelSerializer):
|
||||
"path",
|
||||
"type",
|
||||
"uuid",
|
||||
"password_change_date",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"name": {"allow_blank": True},
|
||||
"password_change_date": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
|
@ -5,6 +5,7 @@ from typing import TextIO
|
||||
from daphne.management.commands.runserver import Command as RunServer
|
||||
from daphne.server import Server
|
||||
|
||||
from authentik.lib.debug import start_debug_server
|
||||
from authentik.root.signals import post_startup, pre_startup, startup
|
||||
|
||||
|
||||
@ -13,6 +14,7 @@ class SignalServer(Server):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
start_debug_server()
|
||||
|
||||
def ready_callable():
|
||||
pre_startup.send(sender=self)
|
||||
|
@ -9,6 +9,7 @@ from django.db import close_old_connections
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.debug import start_debug_server
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -28,10 +29,7 @@ class Command(BaseCommand):
|
||||
def handle(self, **options):
|
||||
LOGGER.debug("Celery options", **options)
|
||||
close_old_connections()
|
||||
if CONFIG.get_bool("remote_debug"):
|
||||
import debugpy
|
||||
|
||||
debugpy.listen(("0.0.0.0", 6900)) # nosec
|
||||
start_debug_server()
|
||||
worker: Worker = CELERY_APP.Worker(
|
||||
no_color=False,
|
||||
quiet=True,
|
||||
|
@ -599,6 +599,14 @@ class Application(SerializerModel, PolicyBindingModel):
|
||||
return None
|
||||
return candidates[-1]
|
||||
|
||||
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
|
||||
"""Get Backchannel provider for a specific type"""
|
||||
providers = self.backchannel_providers.filter(
|
||||
**{f"{provider_type._meta.model_name}__isnull": False},
|
||||
**kwargs,
|
||||
)
|
||||
return getattr(providers.first(), provider_type._meta.model_name)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.name)
|
||||
|
||||
|
0
authentik/enterprise/providers/ssf/__init__.py
Normal file
0
authentik/enterprise/providers/ssf/__init__.py
Normal file
0
authentik/enterprise/providers/ssf/api/__init__.py
Normal file
0
authentik/enterprise/providers/ssf/api/__init__.py
Normal file
64
authentik/enterprise/providers/ssf/api/providers.py
Normal file
64
authentik/enterprise/providers/ssf/api/providers.py
Normal file
@ -0,0 +1,64 @@
|
||||
"""SSF Provider API Views"""
|
||||
|
||||
from django.urls import reverse
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.tokens import TokenSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||
|
||||
|
||||
class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
||||
"""SSFProvider Serializer"""
|
||||
|
||||
ssf_url = SerializerMethodField()
|
||||
token_obj = TokenSerializer(source="token", required=False, read_only=True)
|
||||
|
||||
def get_ssf_url(self, instance: SSFProvider) -> str | None:
|
||||
request: Request = self._context.get("request")
|
||||
if not request:
|
||||
return None
|
||||
if not instance.backchannel_application:
|
||||
return None
|
||||
return request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ssf:configuration",
|
||||
kwargs={
|
||||
"application_slug": instance.backchannel_application.slug,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = SSFProvider
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
"component",
|
||||
"verbose_name",
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
"signing_key",
|
||||
"token_obj",
|
||||
"oidc_auth_providers",
|
||||
"ssf_url",
|
||||
"event_retention",
|
||||
]
|
||||
extra_kwargs = {}
|
||||
|
||||
|
||||
class SSFProviderViewSet(UsedByMixin, ModelViewSet):
|
||||
"""SSFProvider Viewset"""
|
||||
|
||||
queryset = SSFProvider.objects.all()
|
||||
serializer_class = SSFProviderSerializer
|
||||
filterset_fields = {
|
||||
"application": ["isnull"],
|
||||
"name": ["iexact"],
|
||||
}
|
||||
search_fields = ["name"]
|
||||
ordering = ["name"]
|
37
authentik/enterprise/providers/ssf/api/streams.py
Normal file
37
authentik/enterprise/providers/ssf/api/streams.py
Normal file
@ -0,0 +1,37 @@
|
||||
"""SSF Stream API Views"""
|
||||
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
|
||||
from authentik.enterprise.providers.ssf.models import Stream
|
||||
|
||||
|
||||
class SSFStreamSerializer(ModelSerializer):
|
||||
"""SSFStream Serializer"""
|
||||
|
||||
provider_obj = SSFProviderSerializer(source="provider", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Stream
|
||||
fields = [
|
||||
"pk",
|
||||
"provider",
|
||||
"provider_obj",
|
||||
"delivery_method",
|
||||
"endpoint_url",
|
||||
"events_requested",
|
||||
"format",
|
||||
"aud",
|
||||
"iss",
|
||||
]
|
||||
|
||||
|
||||
class SSFStreamViewSet(ReadOnlyModelViewSet):
|
||||
"""SSFStream Viewset"""
|
||||
|
||||
queryset = Stream.objects.all()
|
||||
serializer_class = SSFStreamSerializer
|
||||
filterset_fields = ["provider", "endpoint_url", "delivery_method"]
|
||||
search_fields = ["provider__name", "endpoint_url"]
|
||||
ordering = ["provider", "uuid"]
|
13
authentik/enterprise/providers/ssf/apps.py
Normal file
13
authentik/enterprise/providers/ssf/apps.py
Normal file
@ -0,0 +1,13 @@
|
||||
"""SSF app config"""
|
||||
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
|
||||
|
||||
class AuthentikEnterpriseProviderSSF(EnterpriseConfig):
|
||||
"""authentik enterprise ssf app config"""
|
||||
|
||||
name = "authentik.enterprise.providers.ssf"
|
||||
label = "authentik_providers_ssf"
|
||||
verbose_name = "authentik Enterprise.Providers.SSF"
|
||||
default = True
|
||||
mountpoint = ""
|
201
authentik/enterprise/providers/ssf/migrations/0001_initial.py
Normal file
201
authentik/enterprise/providers/ssf/migrations/0001_initial.py
Normal file
@ -0,0 +1,201 @@
|
||||
# Generated by Django 5.0.11 on 2025-02-05 16:20
|
||||
|
||||
import authentik.lib.utils.time
|
||||
import django.contrib.postgres.fields
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
|
||||
("authentik_crypto", "0004_alter_certificatekeypair_name"),
|
||||
("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="SSFProvider",
|
||||
fields=[
|
||||
(
|
||||
"provider_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="authentik_core.provider",
|
||||
),
|
||||
),
|
||||
(
|
||||
"event_retention",
|
||||
models.TextField(
|
||||
default="days=30",
|
||||
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||
),
|
||||
),
|
||||
(
|
||||
"oidc_auth_providers",
|
||||
models.ManyToManyField(
|
||||
blank=True, default=None, to="authentik_providers_oauth2.oauth2provider"
|
||||
),
|
||||
),
|
||||
(
|
||||
"signing_key",
|
||||
models.ForeignKey(
|
||||
help_text="Key used to sign the SSF Events.",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_crypto.certificatekeypair",
|
||||
verbose_name="Signing Key",
|
||||
),
|
||||
),
|
||||
(
|
||||
"token",
|
||||
models.ForeignKey(
|
||||
default=None,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_core.token",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Shared Signals Framework Provider",
|
||||
"verbose_name_plural": "Shared Signals Framework Providers",
|
||||
"permissions": [("add_stream", "Add stream to SSF provider")],
|
||||
},
|
||||
bases=("authentik_core.provider",),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Stream",
|
||||
fields=[
|
||||
(
|
||||
"uuid",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||
),
|
||||
),
|
||||
(
|
||||
"delivery_method",
|
||||
models.TextField(
|
||||
choices=[
|
||||
(
|
||||
"https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||
"Risc Push",
|
||||
),
|
||||
(
|
||||
"https://schemas.openid.net/secevent/risc/delivery-method/poll",
|
||||
"Risc Poll",
|
||||
),
|
||||
]
|
||||
),
|
||||
),
|
||||
("endpoint_url", models.TextField(null=True)),
|
||||
(
|
||||
"events_requested",
|
||||
django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.TextField(
|
||||
choices=[
|
||||
(
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
"Caep Session Revoked",
|
||||
),
|
||||
(
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"Caep Credential Change",
|
||||
),
|
||||
(
|
||||
"https://schemas.openid.net/secevent/ssf/event-type/verification",
|
||||
"Set Verification",
|
||||
),
|
||||
]
|
||||
),
|
||||
default=list,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
("format", models.TextField()),
|
||||
(
|
||||
"aud",
|
||||
django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.TextField(), default=list, size=None
|
||||
),
|
||||
),
|
||||
("iss", models.TextField()),
|
||||
(
|
||||
"provider",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_providers_ssf.ssfprovider",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "SSF Stream",
|
||||
"verbose_name_plural": "SSF Streams",
|
||||
"default_permissions": ["change", "delete", "view"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="StreamEvent",
|
||||
fields=[
|
||||
("created", models.DateTimeField(auto_now_add=True)),
|
||||
("last_updated", models.DateTimeField(auto_now=True)),
|
||||
("expires", models.DateTimeField(default=None, null=True)),
|
||||
("expiring", models.BooleanField(default=True)),
|
||||
(
|
||||
"uuid",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.TextField(
|
||||
choices=[
|
||||
("pending_new", "Pending New"),
|
||||
("pending_failed", "Pending Failed"),
|
||||
("sent", "Sent"),
|
||||
]
|
||||
),
|
||||
),
|
||||
(
|
||||
"type",
|
||||
models.TextField(
|
||||
choices=[
|
||||
(
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
"Caep Session Revoked",
|
||||
),
|
||||
(
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"Caep Credential Change",
|
||||
),
|
||||
(
|
||||
"https://schemas.openid.net/secevent/ssf/event-type/verification",
|
||||
"Set Verification",
|
||||
),
|
||||
]
|
||||
),
|
||||
),
|
||||
("payload", models.JSONField(default=dict)),
|
||||
(
|
||||
"stream",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_providers_ssf.stream",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "SSF Stream Event",
|
||||
"verbose_name_plural": "SSF Stream Events",
|
||||
"ordering": ("-created",),
|
||||
},
|
||||
),
|
||||
]
|
178
authentik/enterprise/providers/ssf/models.py
Normal file
178
authentik/enterprise/providers/ssf/models.py
Normal file
@ -0,0 +1,178 @@
|
||||
from datetime import datetime
|
||||
from functools import cached_property
|
||||
from uuid import uuid4
|
||||
|
||||
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
||||
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
||||
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.templatetags.static import static
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from jwt import encode
|
||||
|
||||
from authentik.core.models import BackchannelProvider, ExpiringModel, Token
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.lib.models import CreatedUpdatedModel
|
||||
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||
from authentik.providers.oauth2.models import JWTAlgorithms, OAuth2Provider
|
||||
|
||||
|
||||
class EventTypes(models.TextChoices):
|
||||
"""SSF Event types supported by authentik"""
|
||||
|
||||
CAEP_SESSION_REVOKED = "https://schemas.openid.net/secevent/caep/event-type/session-revoked"
|
||||
CAEP_CREDENTIAL_CHANGE = "https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||
SET_VERIFICATION = "https://schemas.openid.net/secevent/ssf/event-type/verification"
|
||||
|
||||
|
||||
class DeliveryMethods(models.TextChoices):
|
||||
"""SSF Delivery methods"""
|
||||
|
||||
RISC_PUSH = "https://schemas.openid.net/secevent/risc/delivery-method/push"
|
||||
RISC_POLL = "https://schemas.openid.net/secevent/risc/delivery-method/poll"
|
||||
|
||||
|
||||
class SSFEventStatus(models.TextChoices):
|
||||
"""SSF Event status"""
|
||||
|
||||
PENDING_NEW = "pending_new"
|
||||
PENDING_FAILED = "pending_failed"
|
||||
SENT = "sent"
|
||||
|
||||
|
||||
class SSFProvider(BackchannelProvider):
|
||||
"""Shared Signals Framework provider to allow applications to
|
||||
receive user events from authentik."""
|
||||
|
||||
signing_key = models.ForeignKey(
|
||||
CertificateKeyPair,
|
||||
verbose_name=_("Signing Key"),
|
||||
on_delete=models.CASCADE,
|
||||
help_text=_("Key used to sign the SSF Events."),
|
||||
)
|
||||
|
||||
oidc_auth_providers = models.ManyToManyField(OAuth2Provider, blank=True, default=None)
|
||||
|
||||
token = models.ForeignKey(Token, on_delete=models.CASCADE, null=True, default=None)
|
||||
|
||||
event_retention = models.TextField(
|
||||
default="days=30",
|
||||
validators=[timedelta_string_validator],
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def jwt_key(self) -> tuple[PrivateKeyTypes, str]:
|
||||
"""Get either the configured certificate or the client secret"""
|
||||
key: CertificateKeyPair = self.signing_key
|
||||
private_key = key.private_key
|
||||
if isinstance(private_key, RSAPrivateKey):
|
||||
return private_key, JWTAlgorithms.RS256
|
||||
if isinstance(private_key, EllipticCurvePrivateKey):
|
||||
return private_key, JWTAlgorithms.ES256
|
||||
raise ValueError(f"Invalid private key type: {type(private_key)}")
|
||||
|
||||
@property
|
||||
def service_account_identifier(self) -> str:
|
||||
return f"ak-providers-ssf-{self.pk}"
|
||||
|
||||
@property
|
||||
def serializer(self):
|
||||
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
|
||||
|
||||
return SSFProviderSerializer
|
||||
|
||||
@property
|
||||
def icon_url(self) -> str | None:
|
||||
return static("authentik/sources/ssf.svg")
|
||||
|
||||
@property
|
||||
def component(self) -> str:
|
||||
return "ak-provider-ssf-form"
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Shared Signals Framework Provider")
|
||||
verbose_name_plural = _("Shared Signals Framework Providers")
|
||||
permissions = [
|
||||
# This overrides the default "add_stream" permission of the Stream object,
|
||||
# as the user requesting to add a stream must have the permission on the provider
|
||||
("add_stream", _("Add stream to SSF provider")),
|
||||
]
|
||||
|
||||
|
||||
class Stream(models.Model):
|
||||
"""SSF Stream"""
|
||||
|
||||
uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False)
|
||||
provider = models.ForeignKey(SSFProvider, on_delete=models.CASCADE)
|
||||
|
||||
delivery_method = models.TextField(choices=DeliveryMethods.choices)
|
||||
endpoint_url = models.TextField(null=True)
|
||||
|
||||
events_requested = ArrayField(models.TextField(choices=EventTypes.choices), default=list)
|
||||
format = models.TextField()
|
||||
aud = ArrayField(models.TextField(), default=list)
|
||||
|
||||
iss = models.TextField()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("SSF Stream")
|
||||
verbose_name_plural = _("SSF Streams")
|
||||
default_permissions = ["change", "delete", "view"]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "SSF Stream"
|
||||
|
||||
def prepare_event_payload(self, type: EventTypes, event_data: dict, **kwargs) -> dict:
|
||||
jti = uuid4()
|
||||
_now = now()
|
||||
return {
|
||||
"uuid": jti,
|
||||
"stream_id": str(self.pk),
|
||||
"type": type,
|
||||
"expiring": True,
|
||||
"status": SSFEventStatus.PENDING_NEW,
|
||||
"expires": _now + timedelta_from_string(self.provider.event_retention),
|
||||
"payload": {
|
||||
"jti": jti.hex,
|
||||
"aud": self.aud,
|
||||
"iat": int(datetime.now().timestamp()),
|
||||
"iss": self.iss,
|
||||
"events": {type: event_data},
|
||||
**kwargs,
|
||||
},
|
||||
}
|
||||
|
||||
def encode(self, data: dict) -> str:
|
||||
headers = {}
|
||||
if self.provider.signing_key:
|
||||
headers["kid"] = self.provider.signing_key.kid
|
||||
key, alg = self.provider.jwt_key
|
||||
return encode(data, key, algorithm=alg, headers=headers)
|
||||
|
||||
|
||||
class StreamEvent(CreatedUpdatedModel, ExpiringModel):
|
||||
"""Single stream event to be sent"""
|
||||
|
||||
uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False)
|
||||
|
||||
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
|
||||
status = models.TextField(choices=SSFEventStatus.choices)
|
||||
|
||||
type = models.TextField(choices=EventTypes.choices)
|
||||
payload = models.JSONField(default=dict)
|
||||
|
||||
def expire_action(self, *args, **kwargs):
|
||||
"""Only allow automatic cleanup of successfully sent event"""
|
||||
if self.status != SSFEventStatus.SENT:
|
||||
return
|
||||
return super().expire_action(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return f"Stream event {self.type}"
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("SSF Stream Event")
|
||||
verbose_name_plural = _("SSF Stream Events")
|
||||
ordering = ("-created",)
|
193
authentik/enterprise/providers/ssf/signals.py
Normal file
193
authentik/enterprise/providers/ssf/signals.py
Normal file
@ -0,0 +1,193 @@
|
||||
from hashlib import sha256
|
||||
|
||||
from django.contrib.auth.signals import user_logged_out
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_delete, post_save, pre_delete
|
||||
from django.dispatch import receiver
|
||||
from django.http.request import HttpRequest
|
||||
from guardian.shortcuts import assign_perm
|
||||
|
||||
from authentik.core.models import (
|
||||
USER_PATH_SYSTEM_PREFIX,
|
||||
AuthenticatedSession,
|
||||
Token,
|
||||
TokenIntents,
|
||||
User,
|
||||
UserTypes,
|
||||
)
|
||||
from authentik.core.signals import password_changed
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
EventTypes,
|
||||
SSFProvider,
|
||||
)
|
||||
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
|
||||
from authentik.events.middleware import audit_ignore
|
||||
from authentik.stages.authenticator.models import Device
|
||||
from authentik.stages.authenticator_duo.models import DuoDevice
|
||||
from authentik.stages.authenticator_static.models import StaticDevice
|
||||
from authentik.stages.authenticator_totp.models import TOTPDevice
|
||||
from authentik.stages.authenticator_webauthn.models import (
|
||||
UNKNOWN_DEVICE_TYPE_AAGUID,
|
||||
WebAuthnDevice,
|
||||
)
|
||||
|
||||
USER_PATH_PROVIDERS_SSF = USER_PATH_SYSTEM_PREFIX + "/providers/ssf"
|
||||
|
||||
|
||||
@receiver(post_save, sender=SSFProvider)
|
||||
def ssf_providers_post_save(sender: type[Model], instance: SSFProvider, created: bool, **_):
|
||||
"""Create service account before provider is saved"""
|
||||
identifier = instance.service_account_identifier
|
||||
user, _ = User.objects.update_or_create(
|
||||
username=identifier,
|
||||
defaults={
|
||||
"name": f"SSF Provider {instance.name} Service-Account",
|
||||
"type": UserTypes.INTERNAL_SERVICE_ACCOUNT,
|
||||
"path": USER_PATH_PROVIDERS_SSF,
|
||||
},
|
||||
)
|
||||
assign_perm("add_stream", user, instance)
|
||||
token, token_created = Token.objects.update_or_create(
|
||||
identifier=identifier,
|
||||
defaults={
|
||||
"user": user,
|
||||
"intent": TokenIntents.INTENT_API,
|
||||
"expiring": False,
|
||||
"managed": f"goauthentik.io/providers/ssf/{instance.pk}",
|
||||
},
|
||||
)
|
||||
if created or token_created:
|
||||
with audit_ignore():
|
||||
instance.token = token
|
||||
instance.save()
|
||||
|
||||
|
||||
@receiver(user_logged_out)
|
||||
def ssf_user_logged_out_session_revoked(sender, request: HttpRequest, user: User, **_):
|
||||
"""Session revoked trigger (user logged out)"""
|
||||
if not request.session or not request.session.session_key or not user:
|
||||
return
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_SESSION_REVOKED,
|
||||
{
|
||||
"initiating_entity": "user",
|
||||
},
|
||||
sub_id={
|
||||
"format": "complex",
|
||||
"session": {
|
||||
"format": "opaque",
|
||||
"id": sha256(request.session.session_key.encode("ascii")).hexdigest(),
|
||||
},
|
||||
"user": {
|
||||
"format": "email",
|
||||
"email": user.email,
|
||||
},
|
||||
},
|
||||
request=request,
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=AuthenticatedSession)
|
||||
def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSession, **_):
|
||||
"""Session revoked trigger (users' session has been deleted)
|
||||
|
||||
As this signal is also triggered with a regular logout, we can't be sure
|
||||
if the session has been deleted by an admin or by the user themselves."""
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_SESSION_REVOKED,
|
||||
{
|
||||
"initiating_entity": "user",
|
||||
},
|
||||
sub_id={
|
||||
"format": "complex",
|
||||
"session": {
|
||||
"format": "opaque",
|
||||
"id": sha256(instance.session_key.encode("ascii")).hexdigest(),
|
||||
},
|
||||
"user": {
|
||||
"format": "email",
|
||||
"email": instance.user.email,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@receiver(password_changed)
|
||||
def ssf_password_changed_cred_change(sender, user: User, password: str | None, **_):
|
||||
"""Credential change trigger (password changed)"""
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||
{
|
||||
"credential_type": "password",
|
||||
"change_type": "revoke" if password is None else "update",
|
||||
},
|
||||
sub_id={
|
||||
"format": "complex",
|
||||
"user": {
|
||||
"format": "email",
|
||||
"email": user.email,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
device_type_map = {
|
||||
StaticDevice: "pin",
|
||||
TOTPDevice: "pin",
|
||||
WebAuthnDevice: "fido-u2f",
|
||||
DuoDevice: "app",
|
||||
}
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
def ssf_device_post_save(sender: type[Model], instance: Device, created: bool, **_):
|
||||
if not isinstance(instance, Device):
|
||||
return
|
||||
if not instance.confirmed:
|
||||
return
|
||||
device_type = device_type_map.get(instance.__class__)
|
||||
data = {
|
||||
"credential_type": device_type,
|
||||
"change_type": "create" if created else "update",
|
||||
"friendly_name": instance.name,
|
||||
}
|
||||
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
|
||||
data["fido2_aaguid"] = instance.aaguid
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||
data,
|
||||
sub_id={
|
||||
"format": "complex",
|
||||
"user": {
|
||||
"format": "email",
|
||||
"email": instance.user.email,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_delete)
|
||||
def ssf_device_post_delete(sender: type[Model], instance: Device, **_):
|
||||
if not isinstance(instance, Device):
|
||||
return
|
||||
if not instance.confirmed:
|
||||
return
|
||||
device_type = device_type_map.get(instance.__class__)
|
||||
data = {
|
||||
"credential_type": device_type,
|
||||
"change_type": "delete",
|
||||
"friendly_name": instance.name,
|
||||
}
|
||||
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
|
||||
data["fido2_aaguid"] = instance.aaguid
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||
data,
|
||||
sub_id={
|
||||
"format": "complex",
|
||||
"user": {
|
||||
"format": "email",
|
||||
"email": instance.user.email,
|
||||
},
|
||||
},
|
||||
)
|
136
authentik/enterprise/providers/ssf/tasks.py
Normal file
136
authentik/enterprise/providers/ssf/tasks.py
Normal file
@ -0,0 +1,136 @@
|
||||
from celery import group
|
||||
from django.http import HttpRequest
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from requests.exceptions import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
DeliveryMethods,
|
||||
EventTypes,
|
||||
SSFEventStatus,
|
||||
Stream,
|
||||
StreamEvent,
|
||||
)
|
||||
from authentik.events.logs import LogEvent
|
||||
from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
session = get_http_session()
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def send_ssf_event(
|
||||
event_type: EventTypes,
|
||||
data: dict,
|
||||
stream_filter: dict | None = None,
|
||||
request: HttpRequest | None = None,
|
||||
**extra_data,
|
||||
):
|
||||
"""Wrapper to send an SSF event to multiple streams"""
|
||||
payload = []
|
||||
if not stream_filter:
|
||||
stream_filter = {}
|
||||
stream_filter["events_requested__contains"] = [event_type]
|
||||
if request and hasattr(request, "request_id"):
|
||||
extra_data.setdefault("txn", request.request_id)
|
||||
for stream in Stream.objects.filter(**stream_filter):
|
||||
event_data = stream.prepare_event_payload(event_type, data, **extra_data)
|
||||
payload.append((str(stream.uuid), event_data))
|
||||
return _send_ssf_event.delay(payload)
|
||||
|
||||
|
||||
def _check_app_access(stream_uuid: str, event_data: dict) -> bool:
|
||||
"""Check if event is related to user and if so, check
|
||||
if the user has access to the application"""
|
||||
stream = Stream.objects.filter(pk=stream_uuid).first()
|
||||
if not stream:
|
||||
return False
|
||||
# `event_data` is a dict version of a StreamEvent
|
||||
sub_id = event_data.get("payload", {}).get("sub_id", {})
|
||||
email = sub_id.get("user", {}).get("email", None)
|
||||
if not email:
|
||||
return True
|
||||
user = User.objects.filter(email=email).first()
|
||||
if not user:
|
||||
return True
|
||||
engine = PolicyEngine(stream.provider.backchannel_application, user)
|
||||
engine.use_cache = False
|
||||
engine.build()
|
||||
return engine.passing
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def _send_ssf_event(event_data: list[tuple[str, dict]]):
|
||||
tasks = []
|
||||
for stream, data in event_data:
|
||||
if not _check_app_access(stream, data):
|
||||
continue
|
||||
event = StreamEvent.objects.create(**data)
|
||||
tasks.extend(send_single_ssf_event(stream, str(event.uuid)))
|
||||
main_task = group(*tasks)
|
||||
main_task()
|
||||
|
||||
|
||||
def send_single_ssf_event(stream_id: str, evt_id: str):
|
||||
stream = Stream.objects.filter(pk=stream_id).first()
|
||||
if not stream:
|
||||
return
|
||||
event = StreamEvent.objects.filter(pk=evt_id).first()
|
||||
if not event:
|
||||
return
|
||||
if event.status == SSFEventStatus.SENT:
|
||||
return
|
||||
if stream.delivery_method == DeliveryMethods.RISC_PUSH:
|
||||
return [ssf_push_event.si(str(event.pk))]
|
||||
return []
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
def ssf_push_event(self: SystemTask, event_id: str):
|
||||
self.save_on_success = False
|
||||
event = StreamEvent.objects.filter(pk=event_id).first()
|
||||
if not event:
|
||||
return
|
||||
self.set_uid(event_id)
|
||||
if event.status == SSFEventStatus.SENT:
|
||||
self.set_status(TaskStatus.SUCCESSFUL)
|
||||
return
|
||||
try:
|
||||
response = session.post(
|
||||
event.stream.endpoint_url,
|
||||
data=event.stream.encode(event.payload),
|
||||
headers={"Content-Type": "application/secevent+jwt", "Accept": "application/json"},
|
||||
)
|
||||
response.raise_for_status()
|
||||
event.status = SSFEventStatus.SENT
|
||||
event.save()
|
||||
self.set_status(TaskStatus.SUCCESSFUL)
|
||||
return
|
||||
except RequestException as exc:
|
||||
LOGGER.warning("Failed to send SSF event", exc=exc)
|
||||
self.set_status(TaskStatus.ERROR)
|
||||
attrs = {}
|
||||
if exc.response:
|
||||
attrs["response"] = {
|
||||
"content": exc.response.text,
|
||||
"status": exc.response.status_code,
|
||||
}
|
||||
self.set_error(
|
||||
exc,
|
||||
LogEvent(
|
||||
_("Failed to send request"),
|
||||
log_level="warning",
|
||||
logger=self.__name__,
|
||||
attributes=attrs,
|
||||
),
|
||||
)
|
||||
# Re-up the expiry of the stream event
|
||||
event.expires = now() + timedelta_from_string(event.stream.provider.event_retention)
|
||||
event.status = SSFEventStatus.PENDING_FAILED
|
||||
event.save()
|
46
authentik/enterprise/providers/ssf/tests/test_config.py
Normal file
46
authentik/enterprise/providers/ssf/tests/test_config.py
Normal file
@ -0,0 +1,46 @@
|
||||
import json
|
||||
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_cert
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
SSFProvider,
|
||||
)
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestConfiguration(APITestCase):
|
||||
def setUp(self):
|
||||
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.provider = SSFProvider.objects.create(
|
||||
name=generate_id(),
|
||||
signing_key=create_test_cert(),
|
||||
backchannel_application=self.application,
|
||||
)
|
||||
|
||||
def test_config_fetch(self):
|
||||
"""test SSF configuration (unauthenticated)"""
|
||||
res = self.client.get(
|
||||
reverse(
|
||||
"authentik_providers_ssf:configuration",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
content = json.loads(res.content)
|
||||
self.assertEqual(content["spec_version"], "1_0-ID2")
|
||||
|
||||
def test_config_fetch_authenticated(self):
|
||||
"""test SSF configuration (authenticated)"""
|
||||
res = self.client.get(
|
||||
reverse(
|
||||
"authentik_providers_ssf:configuration",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
content = json.loads(res.content)
|
||||
self.assertEqual(content["spec_version"], "1_0-ID2")
|
51
authentik/enterprise/providers/ssf/tests/test_jwks.py
Normal file
51
authentik/enterprise/providers/ssf/tests/test_jwks.py
Normal file
@ -0,0 +1,51 @@
|
||||
"""JWKS tests"""
|
||||
|
||||
import base64
|
||||
import json
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.x509 import load_der_x509_certificate
|
||||
from django.test import TestCase
|
||||
from django.urls.base import reverse
|
||||
from jwt import PyJWKSet
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_cert
|
||||
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestJWKS(TestCase):
|
||||
"""Test JWKS view"""
|
||||
|
||||
def test_rs256(self):
|
||||
"""Test JWKS request with RS256"""
|
||||
provider = SSFProvider.objects.create(
|
||||
name=generate_id(),
|
||||
signing_key=create_test_cert(),
|
||||
)
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app.backchannel_providers.add(provider)
|
||||
response = self.client.get(
|
||||
reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug})
|
||||
)
|
||||
body = json.loads(response.content.decode())
|
||||
self.assertEqual(len(body["keys"]), 1)
|
||||
PyJWKSet.from_dict(body)
|
||||
key = body["keys"][0]
|
||||
load_der_x509_certificate(base64.b64decode(key["x5c"][0]), default_backend()).public_key()
|
||||
|
||||
def test_es256(self):
|
||||
"""Test JWKS request with ES256"""
|
||||
provider = SSFProvider.objects.create(
|
||||
name=generate_id(),
|
||||
signing_key=create_test_cert(),
|
||||
)
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app.backchannel_providers.add(provider)
|
||||
response = self.client.get(
|
||||
reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug})
|
||||
)
|
||||
body = json.loads(response.content.decode())
|
||||
self.assertEqual(len(body["keys"]), 1)
|
||||
PyJWKSet.from_dict(body)
|
168
authentik/enterprise/providers/ssf/tests/test_signals.py
Normal file
168
authentik/enterprise/providers/ssf/tests/test_signals.py
Normal file
@ -0,0 +1,168 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application, Group
|
||||
from authentik.core.tests.utils import (
|
||||
create_test_cert,
|
||||
create_test_user,
|
||||
)
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
EventTypes,
|
||||
SSFEventStatus,
|
||||
SSFProvider,
|
||||
Stream,
|
||||
StreamEvent,
|
||||
)
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||
|
||||
|
||||
class TestSignals(APITestCase):
|
||||
"""Test individual SSF Signals"""
|
||||
|
||||
def setUp(self):
|
||||
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.provider = SSFProvider.objects.create(
|
||||
name=generate_id(),
|
||||
signing_key=create_test_cert(),
|
||||
backchannel_application=self.application,
|
||||
)
|
||||
res = self.client.post(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
data={
|
||||
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||
"aud": ["https://app.authentik.company"],
|
||||
"delivery": {
|
||||
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||
"endpoint_url": "https://app.authentik.company",
|
||||
},
|
||||
"events_requested": [
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
],
|
||||
"format": "iss_sub",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 201, res.content)
|
||||
|
||||
def test_signal_logout(self):
|
||||
"""Test user logout"""
|
||||
user = create_test_user()
|
||||
self.client.force_login(user)
|
||||
self.client.logout()
|
||||
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
event_payload = event.payload["events"][
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked"
|
||||
]
|
||||
self.assertEqual(event_payload["initiating_entity"], "user")
|
||||
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||
self.assertEqual(event.payload["sub_id"]["session"]["format"], "opaque")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||
|
||||
def test_signal_password_change(self):
|
||||
"""Test user password change"""
|
||||
user = create_test_user()
|
||||
self.client.force_login(user)
|
||||
user.set_password(generate_id())
|
||||
user.save()
|
||||
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
event_payload = event.payload["events"][
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||
]
|
||||
self.assertEqual(event_payload["change_type"], "update")
|
||||
self.assertEqual(event_payload["credential_type"], "password")
|
||||
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||
|
||||
def test_signal_authenticator_added(self):
|
||||
"""Test authenticator creation signal"""
|
||||
user = create_test_user()
|
||||
self.client.force_login(user)
|
||||
dev = WebAuthnDevice.objects.create(
|
||||
user=user,
|
||||
name=generate_id(),
|
||||
credential_id=generate_id(),
|
||||
public_key=generate_id(),
|
||||
aaguid=str(uuid4()),
|
||||
)
|
||||
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).exclude().first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
event_payload = event.payload["events"][
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||
]
|
||||
self.assertEqual(event_payload["change_type"], "create")
|
||||
self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid)
|
||||
self.assertEqual(event_payload["friendly_name"], dev.name)
|
||||
self.assertEqual(event_payload["credential_type"], "fido-u2f")
|
||||
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||
|
||||
def test_signal_authenticator_deleted(self):
|
||||
"""Test authenticator deletion signal"""
|
||||
user = create_test_user()
|
||||
self.client.force_login(user)
|
||||
dev = WebAuthnDevice.objects.create(
|
||||
user=user,
|
||||
name=generate_id(),
|
||||
credential_id=generate_id(),
|
||||
public_key=generate_id(),
|
||||
aaguid=str(uuid4()),
|
||||
)
|
||||
dev.delete()
|
||||
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).exclude().first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
event_payload = event.payload["events"][
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||
]
|
||||
self.assertEqual(event_payload["change_type"], "delete")
|
||||
self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid)
|
||||
self.assertEqual(event_payload["friendly_name"], dev.name)
|
||||
self.assertEqual(event_payload["credential_type"], "fido-u2f")
|
||||
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||
|
||||
def test_signal_policy_ignore(self):
|
||||
"""Test event not being created for user that doesn't have access to the application"""
|
||||
PolicyBinding.objects.create(
|
||||
target=self.application, group=Group.objects.create(name=generate_id()), order=0
|
||||
)
|
||||
user = create_test_user()
|
||||
self.client.force_login(user)
|
||||
user.set_password(generate_id())
|
||||
user.save()
|
||||
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(
|
||||
stream=stream, type=EventTypes.CAEP_CREDENTIAL_CHANGE
|
||||
).first()
|
||||
self.assertIsNone(event)
|
154
authentik/enterprise/providers/ssf/tests/test_stream.py
Normal file
154
authentik/enterprise/providers/ssf/tests/test_stream.py
Normal file
@ -0,0 +1,154 @@
|
||||
import json
|
||||
from dataclasses import asdict
|
||||
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
SSFEventStatus,
|
||||
SSFProvider,
|
||||
Stream,
|
||||
StreamEvent,
|
||||
)
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.id_token import IDToken
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
|
||||
|
||||
class TestStream(APITestCase):
|
||||
def setUp(self):
|
||||
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.provider = SSFProvider.objects.create(
|
||||
name=generate_id(),
|
||||
signing_key=create_test_cert(),
|
||||
backchannel_application=self.application,
|
||||
)
|
||||
|
||||
def test_stream_add_token(self):
|
||||
"""test stream add (token auth)"""
|
||||
res = self.client.post(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
data={
|
||||
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||
"aud": ["https://app.authentik.company"],
|
||||
"delivery": {
|
||||
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||
"endpoint_url": "https://app.authentik.company",
|
||||
},
|
||||
"events_requested": [
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
],
|
||||
"format": "iss_sub",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
self.assertEqual(
|
||||
event.payload["events"],
|
||||
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
|
||||
)
|
||||
|
||||
def test_stream_add_poll(self):
|
||||
"""test stream add - poll method"""
|
||||
res = self.client.post(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
data={
|
||||
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||
"aud": ["https://app.authentik.company"],
|
||||
"delivery": {
|
||||
"method": "https://schemas.openid.net/secevent/risc/delivery-method/poll",
|
||||
},
|
||||
"events_requested": [
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
],
|
||||
"format": "iss_sub",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content,
|
||||
{"delivery": {"method": ["Polling for SSF events is not currently supported."]}},
|
||||
)
|
||||
|
||||
def test_stream_add_oidc(self):
|
||||
"""test stream add (oidc auth)"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
authorization_flow=create_test_flow(),
|
||||
)
|
||||
self.application.provider = provider
|
||||
self.application.save()
|
||||
user = create_test_admin_user()
|
||||
token = AccessToken.objects.create(
|
||||
provider=provider,
|
||||
user=user,
|
||||
token=generate_id(),
|
||||
auth_time=timezone.now(),
|
||||
_scope="openid user profile",
|
||||
_id_token=json.dumps(
|
||||
asdict(
|
||||
IDToken("foo", "bar"),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
res = self.client.post(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
data={
|
||||
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||
"aud": ["https://app.authentik.company"],
|
||||
"delivery": {
|
||||
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||
"endpoint_url": "https://app.authentik.company",
|
||||
},
|
||||
"events_requested": [
|
||||
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||
],
|
||||
"format": "iss_sub",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Bearer {token.token}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
stream = Stream.objects.filter(provider=self.provider).first()
|
||||
self.assertIsNotNone(stream)
|
||||
event = StreamEvent.objects.filter(stream=stream).first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||
self.assertEqual(
|
||||
event.payload["events"],
|
||||
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
|
||||
)
|
||||
|
||||
def test_stream_delete(self):
|
||||
"""delete stream"""
|
||||
stream = Stream.objects.create(provider=self.provider)
|
||||
res = self.client.delete(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={"application_slug": self.application.slug},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 204)
|
||||
self.assertFalse(Stream.objects.filter(pk=stream.pk).exists())
|
32
authentik/enterprise/providers/ssf/urls.py
Normal file
32
authentik/enterprise/providers/ssf/urls.py
Normal file
@ -0,0 +1,32 @@
|
||||
"""SSF provider URLs"""
|
||||
|
||||
from django.urls import path
|
||||
|
||||
from authentik.enterprise.providers.ssf.api.providers import SSFProviderViewSet
|
||||
from authentik.enterprise.providers.ssf.api.streams import SSFStreamViewSet
|
||||
from authentik.enterprise.providers.ssf.views.configuration import ConfigurationView
|
||||
from authentik.enterprise.providers.ssf.views.jwks import JWKSview
|
||||
from authentik.enterprise.providers.ssf.views.stream import StreamView
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"application/ssf/<slug:application_slug>/ssf-jwks/",
|
||||
JWKSview.as_view(),
|
||||
name="jwks",
|
||||
),
|
||||
path(
|
||||
".well-known/ssf-configuration/<slug:application_slug>",
|
||||
ConfigurationView.as_view(),
|
||||
name="configuration",
|
||||
),
|
||||
path(
|
||||
"application/ssf/<slug:application_slug>/stream/",
|
||||
StreamView.as_view(),
|
||||
name="stream",
|
||||
),
|
||||
]
|
||||
|
||||
api_urlpatterns = [
|
||||
("providers/ssf", SSFProviderViewSet),
|
||||
("ssf/streams", SSFStreamViewSet),
|
||||
]
|
66
authentik/enterprise/providers/ssf/views/auth.py
Normal file
66
authentik/enterprise/providers/ssf/views/auth.py
Normal file
@ -0,0 +1,66 @@
|
||||
"""SSF Token auth"""
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from django.db.models import Q
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
from rest_framework.request import Request
|
||||
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||
from authentik.providers.oauth2.models import AccessToken
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||
|
||||
|
||||
class SSFTokenAuth(BaseAuthentication):
|
||||
"""SSF Token auth"""
|
||||
|
||||
view: "SSFView"
|
||||
|
||||
def __init__(self, view: "SSFView") -> None:
|
||||
super().__init__()
|
||||
self.view = view
|
||||
|
||||
def check_token(self, key: str) -> Token | None:
|
||||
"""Check that a token exists, is not expired, and is assigned to the correct provider"""
|
||||
token = Token.filter_not_expired(key=key, intent=TokenIntents.INTENT_API).first()
|
||||
if not token:
|
||||
return None
|
||||
provider: SSFProvider = token.ssfprovider_set.first()
|
||||
if not provider:
|
||||
return None
|
||||
self.view.application = provider.backchannel_application
|
||||
self.view.provider = provider
|
||||
return token
|
||||
|
||||
def check_jwt(self, jwt: str) -> AccessToken | None:
|
||||
"""Check JWT-based authentication, this supports tokens issued either by providers
|
||||
configured directly in the provider, and by providers assigned to the application
|
||||
that the SSF provider is a backchannel provider of."""
|
||||
token = AccessToken.filter_not_expired(token=jwt, revoked=False).first()
|
||||
if not token:
|
||||
return None
|
||||
ssf_provider = SSFProvider.objects.filter(
|
||||
Q(oidc_auth_providers__in=[token.provider])
|
||||
| Q(backchannel_application__provider__in=[token.provider]),
|
||||
).first()
|
||||
if not ssf_provider:
|
||||
return None
|
||||
self.view.application = ssf_provider.backchannel_application
|
||||
self.view.provider = ssf_provider
|
||||
return token
|
||||
|
||||
def authenticate(self, request: Request) -> tuple[User, Any] | None:
|
||||
auth = get_authorization_header(request).decode()
|
||||
auth_type, _, key = auth.partition(" ")
|
||||
if auth_type != "Bearer":
|
||||
return None
|
||||
token = self.check_token(key)
|
||||
if token:
|
||||
return (token.user, token)
|
||||
jwt_token = self.check_jwt(key)
|
||||
if jwt_token:
|
||||
return (jwt_token.user, token)
|
||||
return None
|
23
authentik/enterprise/providers/ssf/views/base.py
Normal file
23
authentik/enterprise/providers/ssf/views/base.py
Normal file
@ -0,0 +1,23 @@
|
||||
from django.http import HttpRequest
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.views import APIView
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||
from authentik.enterprise.providers.ssf.views.auth import SSFTokenAuth
|
||||
|
||||
|
||||
class SSFView(APIView):
|
||||
application: Application
|
||||
provider: SSFProvider
|
||||
logger: BoundLogger
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def setup(self, request: HttpRequest, *args, **kwargs) -> None:
|
||||
self.logger = get_logger().bind()
|
||||
super().setup(request, *args, **kwargs)
|
||||
|
||||
def get_authenticators(self):
|
||||
return [SSFTokenAuth(self)]
|
55
authentik/enterprise/providers/ssf/views/configuration.py
Normal file
55
authentik/enterprise/providers/ssf/views/configuration.py
Normal file
@ -0,0 +1,55 @@
|
||||
from django.http import Http404, HttpRequest, HttpResponse, JsonResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.urls import reverse
|
||||
from rest_framework.permissions import AllowAny
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.enterprise.providers.ssf.models import DeliveryMethods, SSFProvider
|
||||
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||
|
||||
|
||||
class ConfigurationView(SSFView):
|
||||
"""SSF configuration endpoint"""
|
||||
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
def get_authenticators(self):
|
||||
return []
|
||||
|
||||
def get(self, request: HttpRequest, application_slug: str, *args, **kwargs) -> HttpResponse:
|
||||
application = get_object_or_404(Application, slug=application_slug)
|
||||
provider = application.backchannel_provider_for(SSFProvider)
|
||||
if not provider:
|
||||
raise Http404
|
||||
data = {
|
||||
"spec_version": "1_0-ID2",
|
||||
"issuer": self.request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ssf:configuration",
|
||||
kwargs={
|
||||
"application_slug": application.slug,
|
||||
},
|
||||
)
|
||||
),
|
||||
"jwks_uri": self.request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ssf:jwks",
|
||||
kwargs={
|
||||
"application_slug": application.slug,
|
||||
},
|
||||
)
|
||||
),
|
||||
"configuration_endpoint": self.request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ssf:stream",
|
||||
kwargs={
|
||||
"application_slug": application.slug,
|
||||
},
|
||||
)
|
||||
),
|
||||
"delivery_methods_supported": [
|
||||
DeliveryMethods.RISC_PUSH,
|
||||
],
|
||||
"authorization_schemes": [{"spec_urn": "urn:ietf:rfc:6749"}],
|
||||
}
|
||||
return JsonResponse(data)
|
31
authentik/enterprise/providers/ssf/views/jwks.py
Normal file
31
authentik/enterprise/providers/ssf/views/jwks.py
Normal file
@ -0,0 +1,31 @@
|
||||
from django.http import Http404, HttpRequest, HttpResponse, JsonResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.views import View
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||
from authentik.providers.oauth2.views.jwks import JWKSView as OAuthJWKSView
|
||||
|
||||
|
||||
class JWKSview(View):
|
||||
"""SSF JWKS endpoint, similar to the OAuth2 provider's endpoint"""
|
||||
|
||||
def get(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
||||
"""Show JWK Key data for Provider"""
|
||||
application = get_object_or_404(Application, slug=application_slug)
|
||||
provider = application.backchannel_provider_for(SSFProvider)
|
||||
if not provider:
|
||||
raise Http404
|
||||
signing_key: CertificateKeyPair = provider.signing_key
|
||||
|
||||
response_data = {}
|
||||
|
||||
jwk = OAuthJWKSView.get_jwk_for_key(signing_key, "sig")
|
||||
if jwk:
|
||||
response_data["keys"] = [jwk]
|
||||
|
||||
response = JsonResponse(response_data)
|
||||
response["Access-Control-Allow-Origin"] = "*"
|
||||
|
||||
return response
|
130
authentik/enterprise/providers/ssf/views/stream.py
Normal file
130
authentik/enterprise/providers/ssf/views/stream.py
Normal file
@ -0,0 +1,130 @@
|
||||
from django.http import HttpRequest
|
||||
from django.urls import reverse
|
||||
from rest_framework.exceptions import PermissionDenied, ValidationError
|
||||
from rest_framework.fields import CharField, ChoiceField, ListField, SerializerMethodField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.enterprise.providers.ssf.models import (
|
||||
DeliveryMethods,
|
||||
EventTypes,
|
||||
SSFProvider,
|
||||
Stream,
|
||||
)
|
||||
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
|
||||
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class StreamDeliverySerializer(PassiveSerializer):
|
||||
method = ChoiceField(choices=[(x.value, x.value) for x in DeliveryMethods])
|
||||
endpoint_url = CharField(required=False)
|
||||
|
||||
def validate_method(self, method: DeliveryMethods):
|
||||
"""Currently only push is supported"""
|
||||
if method == DeliveryMethods.RISC_POLL:
|
||||
raise ValidationError("Polling for SSF events is not currently supported.")
|
||||
return method
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if attrs["method"] == DeliveryMethods.RISC_PUSH:
|
||||
if not attrs.get("endpoint_url"):
|
||||
raise ValidationError("Endpoint URL is required when using push.")
|
||||
return attrs
|
||||
|
||||
|
||||
class StreamSerializer(ModelSerializer):
|
||||
delivery = StreamDeliverySerializer()
|
||||
events_requested = ListField(
|
||||
child=ChoiceField(choices=[(x.value, x.value) for x in EventTypes])
|
||||
)
|
||||
format = CharField()
|
||||
aud = ListField(child=CharField())
|
||||
|
||||
def create(self, validated_data):
|
||||
provider: SSFProvider = validated_data["provider"]
|
||||
request: HttpRequest = self.context["request"]
|
||||
iss = request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ssf:configuration",
|
||||
kwargs={
|
||||
"application_slug": provider.backchannel_application.slug,
|
||||
},
|
||||
)
|
||||
)
|
||||
# Ensure that streams always get SET verification events sent to them
|
||||
validated_data["events_requested"].append(EventTypes.SET_VERIFICATION)
|
||||
return super().create(
|
||||
{
|
||||
"delivery_method": validated_data["delivery"]["method"],
|
||||
"endpoint_url": validated_data["delivery"].get("endpoint_url"),
|
||||
"format": validated_data["format"],
|
||||
"provider": validated_data["provider"],
|
||||
"events_requested": validated_data["events_requested"],
|
||||
"aud": validated_data["aud"],
|
||||
"iss": iss,
|
||||
}
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Stream
|
||||
fields = [
|
||||
"delivery",
|
||||
"events_requested",
|
||||
"format",
|
||||
"aud",
|
||||
]
|
||||
|
||||
|
||||
class StreamResponseSerializer(PassiveSerializer):
|
||||
stream_id = CharField(source="pk")
|
||||
iss = CharField()
|
||||
aud = ListField(child=CharField())
|
||||
delivery = SerializerMethodField()
|
||||
format = CharField()
|
||||
|
||||
events_requested = ListField(child=CharField())
|
||||
events_supported = SerializerMethodField()
|
||||
events_delivered = ListField(child=CharField(), source="events_requested")
|
||||
|
||||
def get_delivery(self, instance: Stream) -> StreamDeliverySerializer:
|
||||
return {
|
||||
"method": instance.delivery_method,
|
||||
"endpoint_url": instance.endpoint_url,
|
||||
}
|
||||
|
||||
def get_events_supported(self, instance: Stream) -> list[str]:
|
||||
return [x.value for x in EventTypes]
|
||||
|
||||
|
||||
class StreamView(SSFView):
|
||||
def post(self, request: Request, *args, **kwargs) -> Response:
|
||||
stream = StreamSerializer(data=request.data, context={"request": request})
|
||||
stream.is_valid(raise_exception=True)
|
||||
if not request.user.has_perm("authentik_providers_ssf.add_stream", self.provider):
|
||||
raise PermissionDenied(
|
||||
"User does not have permission to create stream for this provider."
|
||||
)
|
||||
instance: Stream = stream.save(provider=self.provider)
|
||||
send_ssf_event(
|
||||
EventTypes.SET_VERIFICATION,
|
||||
{
|
||||
"state": None,
|
||||
},
|
||||
stream_filter={"pk": instance.uuid},
|
||||
sub_id={"format": "opaque", "id": str(instance.uuid)},
|
||||
)
|
||||
response = StreamResponseSerializer(instance=instance, context={"request": request}).data
|
||||
return Response(response, status=201)
|
||||
|
||||
def delete(self, request: Request, *args, **kwargs) -> Response:
|
||||
streams = Stream.objects.filter(provider=self.provider)
|
||||
# Technically this parameter is required by the spec...
|
||||
if "stream_id" in request.query_params:
|
||||
streams = streams.filter(stream_id=request.query_params["stream_id"])
|
||||
streams.delete()
|
||||
return Response(status=204)
|
@ -17,6 +17,7 @@ TENANT_APPS = [
|
||||
"authentik.enterprise.providers.google_workspace",
|
||||
"authentik.enterprise.providers.microsoft_entra",
|
||||
"authentik.enterprise.providers.rac",
|
||||
"authentik.enterprise.providers.ssf",
|
||||
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
||||
"authentik.enterprise.stages.source",
|
||||
]
|
||||
|
@ -53,12 +53,13 @@ class SystemTask(TenantTask):
|
||||
if not isinstance(msg, LogEvent):
|
||||
self._messages[idx] = LogEvent(msg, logger=self.__name__, log_level="info")
|
||||
|
||||
def set_error(self, exception: Exception):
|
||||
def set_error(self, exception: Exception, *messages: LogEvent):
|
||||
"""Set result to error and save exception"""
|
||||
self._status = TaskStatus.ERROR
|
||||
self._messages = [
|
||||
LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error")
|
||||
]
|
||||
self._messages = list(messages)
|
||||
self._messages.extend(
|
||||
[LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error")]
|
||||
)
|
||||
|
||||
def before_start(self, task_id, args, kwargs):
|
||||
self._start_precise = perf_counter()
|
||||
|
@ -3,6 +3,7 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.contrib.messages import INFO, add_message
|
||||
from django.http.request import HttpRequest
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
@ -61,6 +62,8 @@ class ReevaluateMarker(StageMarker):
|
||||
engine.request.context.update(plan.context)
|
||||
engine.build()
|
||||
result = engine.result
|
||||
for message in result.messages:
|
||||
add_message(http_request, INFO, message)
|
||||
if result.passing:
|
||||
return binding
|
||||
LOGGER.warning(
|
||||
|
@ -109,6 +109,8 @@ class FlowPlan:
|
||||
|
||||
def pop(self):
|
||||
"""Pop next pending stage from bottom of list"""
|
||||
if not self.markers and not self.bindings:
|
||||
return
|
||||
self.markers.pop(0)
|
||||
self.bindings.pop(0)
|
||||
|
||||
@ -156,8 +158,13 @@ class FlowPlan:
|
||||
final_stage: type[StageView] = self.bindings[-1].stage.view
|
||||
temp_exec = FlowExecutorView(flow=flow, request=request, plan=self)
|
||||
temp_exec.current_stage = self.bindings[-1].stage
|
||||
temp_exec.current_stage_view = final_stage
|
||||
temp_exec.setup(request, flow.slug)
|
||||
stage = final_stage(request=request, executor=temp_exec)
|
||||
return stage.dispatch(request)
|
||||
response = stage.dispatch(request)
|
||||
# Ensure we clean the flow state we have in the session before we redirect away
|
||||
temp_exec.stage_ok()
|
||||
return response
|
||||
|
||||
get_qs = request.GET.copy()
|
||||
if request.user.is_authenticated and (
|
||||
|
@ -103,7 +103,7 @@ class FlowExecutorView(APIView):
|
||||
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
flow: Flow
|
||||
flow: Flow = None
|
||||
|
||||
plan: FlowPlan | None = None
|
||||
current_binding: FlowStageBinding | None = None
|
||||
@ -114,7 +114,8 @@ class FlowExecutorView(APIView):
|
||||
|
||||
def setup(self, request: HttpRequest, flow_slug: str):
|
||||
super().setup(request, flow_slug=flow_slug)
|
||||
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
||||
if not self.flow:
|
||||
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
||||
self._logger = get_logger().bind(flow_slug=flow_slug)
|
||||
set_tag("authentik.flow", self.flow.slug)
|
||||
|
||||
|
@ -283,12 +283,15 @@ class ConfigLoader:
|
||||
def get_optional_int(self, path: str, default=None) -> int | None:
|
||||
"""Wrapper for get that converts value into int or None if set"""
|
||||
value = self.get(path, default)
|
||||
|
||||
if value is UNSET:
|
||||
return default
|
||||
try:
|
||||
return int(value)
|
||||
except (ValueError, TypeError) as exc:
|
||||
if value is None or (isinstance(value, str) and value.lower() == "null"):
|
||||
return None
|
||||
return default
|
||||
if value is UNSET:
|
||||
return default
|
||||
self.log("warning", "Failed to parse config as int", path=path, exc=str(exc))
|
||||
return default
|
||||
|
||||
@ -421,4 +424,4 @@ if __name__ == "__main__":
|
||||
if len(argv) < 2: # noqa: PLR2004
|
||||
print(dumps(CONFIG.raw, indent=4, cls=AttrEncoder))
|
||||
else:
|
||||
print(CONFIG.get(argv[1]))
|
||||
print(CONFIG.get(argv[-1]))
|
||||
|
26
authentik/lib/debug.py
Normal file
26
authentik/lib/debug.py
Normal file
@ -0,0 +1,26 @@
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def start_debug_server(**kwargs) -> bool:
|
||||
"""Attempt to start a debugpy server in the current process.
|
||||
Returns true if the server was started successfully, otherwise false"""
|
||||
if not CONFIG.get_bool("debug") and not CONFIG.get_bool("debugger"):
|
||||
return
|
||||
try:
|
||||
import debugpy
|
||||
except ImportError:
|
||||
LOGGER.warning(
|
||||
"Failed to import debugpy. debugpy is not included "
|
||||
"in the default release dependencies and must be installed manually"
|
||||
)
|
||||
return False
|
||||
|
||||
listen: str = CONFIG.get("listen.listen_debug_py", "127.0.0.1:9901")
|
||||
host, _, port = listen.rpartition(":")
|
||||
debugpy.listen((host, int(port)), **kwargs) # nosec
|
||||
LOGGER.debug("Starting debug server", host=host, port=port)
|
||||
return True
|
@ -8,6 +8,7 @@ postgresql:
|
||||
password: "env://POSTGRES_PASSWORD"
|
||||
test:
|
||||
name: test_authentik
|
||||
default_schema: public
|
||||
read_replicas: {}
|
||||
# For example
|
||||
# 0:
|
||||
@ -21,6 +22,7 @@ listen:
|
||||
listen_radius: 0.0.0.0:1812
|
||||
listen_metrics: 0.0.0.0:9300
|
||||
listen_debug: 0.0.0.0:9900
|
||||
listen_debug_py: 0.0.0.0:9901
|
||||
trusted_proxy_cidrs:
|
||||
- 127.0.0.0/8
|
||||
- 10.0.0.0/8
|
||||
@ -57,7 +59,7 @@ cache:
|
||||
# transport_options: ""
|
||||
|
||||
debug: false
|
||||
remote_debug: false
|
||||
debugger: false
|
||||
|
||||
log_level: info
|
||||
|
||||
|
@ -22,9 +22,9 @@ class OutgoingSyncProvider(Model):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def client_for_model[
|
||||
T: User | Group
|
||||
](self, model: type[T]) -> BaseOutgoingSyncClient[T, Any, Any, Self]:
|
||||
def client_for_model[T: User | Group](
|
||||
self, model: type[T]
|
||||
) -> BaseOutgoingSyncClient[T, Any, Any, Self]:
|
||||
raise NotImplementedError
|
||||
|
||||
def get_object_qs[T: User | Group](self, type: type[T]) -> QuerySet[T]:
|
||||
|
@ -42,6 +42,8 @@ class DebugSession(Session):
|
||||
|
||||
def get_http_session() -> Session:
|
||||
"""Get a requests session with common headers"""
|
||||
session = DebugSession() if CONFIG.get_bool("debug") else Session()
|
||||
session = Session()
|
||||
if CONFIG.get_bool("debug") or CONFIG.get("log_level") == "trace":
|
||||
session = DebugSession()
|
||||
session.headers["User-Agent"] = authentik_user_agent()
|
||||
return session
|
||||
|
@ -1,11 +1,26 @@
|
||||
"""Expression Policy API"""
|
||||
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.events.logs import LogEventSerializer, capture_logs
|
||||
from authentik.policies.api.exec import PolicyTestResultSerializer, PolicyTestSerializer
|
||||
from authentik.policies.api.policies import PolicySerializer
|
||||
from authentik.policies.expression.evaluator import PolicyEvaluator
|
||||
from authentik.policies.expression.models import ExpressionPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.policies.process import PolicyProcess
|
||||
from authentik.policies.types import PolicyRequest
|
||||
from authentik.rbac.decorators import permission_required
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class ExpressionPolicySerializer(PolicySerializer):
|
||||
@ -30,3 +45,50 @@ class ExpressionPolicyViewSet(UsedByMixin, ModelViewSet):
|
||||
filterset_fields = "__all__"
|
||||
ordering = ["name"]
|
||||
search_fields = ["name"]
|
||||
|
||||
class ExpressionPolicyTestSerializer(PolicyTestSerializer):
|
||||
"""Expression policy test serializer"""
|
||||
|
||||
expression = CharField()
|
||||
|
||||
@permission_required("authentik_policies.view_policy")
|
||||
@extend_schema(
|
||||
request=ExpressionPolicyTestSerializer(),
|
||||
responses={
|
||||
200: PolicyTestResultSerializer(),
|
||||
400: OpenApiResponse(description="Invalid parameters"),
|
||||
},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||
def test(self, request: Request, pk: str) -> Response:
|
||||
"""Test policy"""
|
||||
policy = self.get_object()
|
||||
test_params = self.ExpressionPolicyTestSerializer(data=request.data)
|
||||
if not test_params.is_valid():
|
||||
return Response(test_params.errors, status=400)
|
||||
|
||||
# User permission check, only allow policy testing for users that are readable
|
||||
users = get_objects_for_user(request.user, "authentik_core.view_user").filter(
|
||||
pk=test_params.validated_data["user"].pk
|
||||
)
|
||||
if not users.exists():
|
||||
return Response(status=400)
|
||||
|
||||
policy.expression = test_params.validated_data["expression"]
|
||||
|
||||
p_request = PolicyRequest(users.first())
|
||||
p_request.debug = True
|
||||
p_request.set_http_request(self.request)
|
||||
p_request.context = test_params.validated_data.get("context", {})
|
||||
|
||||
proc = PolicyProcess(PolicyBinding(policy=policy), p_request, None)
|
||||
with capture_logs() as logs:
|
||||
result = proc.execute()
|
||||
log_messages = []
|
||||
for log in logs:
|
||||
if log.attributes.get("process", "") == "PolicyProcess":
|
||||
continue
|
||||
log_messages.append(LogEventSerializer(log).data)
|
||||
result.log_messages = log_messages
|
||||
response = PolicyTestResultSerializer(result)
|
||||
return Response(response.data)
|
||||
|
@ -281,7 +281,6 @@ class OAuth2Provider(WebfingerProvider, Provider):
|
||||
},
|
||||
)
|
||||
return request.build_absolute_uri(url)
|
||||
|
||||
except Provider.application.RelatedObjectDoesNotExist:
|
||||
return None
|
||||
|
||||
|
@ -1,9 +1,10 @@
|
||||
from django.contrib.auth.signals import user_logged_out
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
from django.http import HttpRequest
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.providers.oauth2.models import AccessToken
|
||||
from authentik.providers.oauth2.models import AccessToken, DeviceToken, RefreshToken
|
||||
|
||||
|
||||
@receiver(user_logged_out)
|
||||
@ -12,3 +13,13 @@ def user_logged_out_oauth_access_token(sender, request: HttpRequest, user: User,
|
||||
if not request.session or not request.session.session_key:
|
||||
return
|
||||
AccessToken.objects.filter(user=user, session__session_key=request.session.session_key).delete()
|
||||
|
||||
|
||||
@receiver(post_save, sender=User)
|
||||
def user_deactivated(sender, instance: User, **_):
|
||||
"""Remove user tokens when deactivated"""
|
||||
if instance.is_active:
|
||||
return
|
||||
AccessToken.objects.filter(session__user=instance).delete()
|
||||
RefreshToken.objects.filter(session__user=instance).delete()
|
||||
DeviceToken.objects.filter(session__user=instance).delete()
|
||||
|
@ -150,6 +150,7 @@ class TestToken(OAuthTestCase):
|
||||
"id_token": provider.encode(
|
||||
access.id_token.to_dict(),
|
||||
),
|
||||
"scope": "",
|
||||
},
|
||||
)
|
||||
self.validate_jwt(access, provider)
|
||||
@ -242,6 +243,7 @@ class TestToken(OAuthTestCase):
|
||||
"id_token": provider.encode(
|
||||
access.id_token.to_dict(),
|
||||
),
|
||||
"scope": "offline_access",
|
||||
},
|
||||
)
|
||||
self.validate_jwt(access, provider)
|
||||
@ -301,6 +303,7 @@ class TestToken(OAuthTestCase):
|
||||
"id_token": provider.encode(
|
||||
access.id_token.to_dict(),
|
||||
),
|
||||
"scope": "offline_access",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -499,11 +499,11 @@ class OAuthFulfillmentStage(StageView):
|
||||
)
|
||||
|
||||
challenge.is_valid()
|
||||
|
||||
self.executor.stage_ok()
|
||||
return HttpChallengeResponse(
|
||||
challenge=challenge,
|
||||
)
|
||||
|
||||
self.executor.stage_ok()
|
||||
return HttpResponseRedirectScheme(uri, allowed_schemes=[parsed.scheme])
|
||||
|
||||
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
|
@ -64,7 +64,8 @@ def to_base64url_uint(val: int, min_length: int = 0) -> bytes:
|
||||
class JWKSView(View):
|
||||
"""Show RSA Key data for Provider"""
|
||||
|
||||
def get_jwk_for_key(self, key: CertificateKeyPair, use: str) -> dict | None:
|
||||
@staticmethod
|
||||
def get_jwk_for_key(key: CertificateKeyPair, use: str) -> dict | None:
|
||||
"""Convert a certificate-key pair into JWK"""
|
||||
private_key = key.private_key
|
||||
key_data = None
|
||||
@ -123,12 +124,12 @@ class JWKSView(View):
|
||||
response_data = {}
|
||||
|
||||
if signing_key := provider.signing_key:
|
||||
jwk = self.get_jwk_for_key(signing_key, "sig")
|
||||
jwk = JWKSView.get_jwk_for_key(signing_key, "sig")
|
||||
if jwk:
|
||||
response_data.setdefault("keys", [])
|
||||
response_data["keys"].append(jwk)
|
||||
if encryption_key := provider.encryption_key:
|
||||
jwk = self.get_jwk_for_key(encryption_key, "enc")
|
||||
jwk = JWKSView.get_jwk_for_key(encryption_key, "enc")
|
||||
if jwk:
|
||||
response_data.setdefault("keys", [])
|
||||
response_data["keys"].append(jwk)
|
||||
|
@ -627,6 +627,7 @@ class TokenView(View):
|
||||
response = {
|
||||
"access_token": access_token.token,
|
||||
"token_type": TOKEN_TYPE,
|
||||
"scope": " ".join(access_token.scope),
|
||||
"expires_in": int(
|
||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||
),
|
||||
@ -710,6 +711,7 @@ class TokenView(View):
|
||||
"access_token": access_token.token,
|
||||
"refresh_token": refresh_token.token,
|
||||
"token_type": TOKEN_TYPE,
|
||||
"scope": " ".join(access_token.scope),
|
||||
"expires_in": int(
|
||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||
),
|
||||
@ -736,6 +738,7 @@ class TokenView(View):
|
||||
return {
|
||||
"access_token": access_token.token,
|
||||
"token_type": TOKEN_TYPE,
|
||||
"scope": " ".join(access_token.scope),
|
||||
"expires_in": int(
|
||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||
),
|
||||
@ -767,6 +770,7 @@ class TokenView(View):
|
||||
response = {
|
||||
"access_token": access_token.token,
|
||||
"token_type": TOKEN_TYPE,
|
||||
"scope": " ".join(access_token.scope),
|
||||
"expires_in": int(
|
||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||
),
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from django.apps import apps
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.db.models import QuerySet
|
||||
from django.db.models import Q, QuerySet
|
||||
from django_filters.filters import ModelChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
@ -18,6 +18,7 @@ from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
|
||||
from authentik.blueprints.v1.importer import excluded_models
|
||||
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
|
||||
from authentik.core.models import User
|
||||
from authentik.lib.validators import RequiredTogetherValidator
|
||||
@ -105,13 +106,13 @@ class RBACPermissionViewSet(ReadOnlyModelViewSet):
|
||||
]
|
||||
|
||||
def get_queryset(self) -> QuerySet:
|
||||
return (
|
||||
Permission.objects.all()
|
||||
.select_related("content_type")
|
||||
.filter(
|
||||
content_type__app_label__startswith="authentik",
|
||||
query = Q()
|
||||
for model in excluded_models():
|
||||
query |= Q(
|
||||
content_type__app_label=model._meta.app_label,
|
||||
content_type__model=model._meta.model_name,
|
||||
)
|
||||
)
|
||||
return Permission.objects.all().select_related("content_type").exclude(query)
|
||||
|
||||
|
||||
class PermissionAssignSerializer(PassiveSerializer):
|
||||
|
@ -7,7 +7,12 @@ from psycopg import connect
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
QUERY = """SELECT id FROM public.authentik_install_id ORDER BY id LIMIT 1;"""
|
||||
# We need to string format the query as tables and schemas can't be set by parameters
|
||||
# not a security issue as the config value is set by the person installing authentik
|
||||
# which also has postgres credentials etc
|
||||
QUERY = """SELECT id FROM {}.authentik_install_id ORDER BY id LIMIT 1;""".format( # nosec
|
||||
CONFIG.get("postgresql.default_schema")
|
||||
)
|
||||
|
||||
|
||||
@lru_cache
|
||||
|
@ -129,6 +129,7 @@ TENANT_DOMAIN_MODEL = "authentik_tenants.Domain"
|
||||
|
||||
TENANT_CREATION_FAKES_MIGRATIONS = True
|
||||
TENANT_BASE_SCHEMA = "template"
|
||||
PUBLIC_SCHEMA_NAME = CONFIG.get("postgresql.default_schema")
|
||||
|
||||
GUARDIAN_MONKEY_PATCH = False
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
import math
|
||||
from os import environ
|
||||
from ssl import OPENSSL_VERSION
|
||||
|
||||
@ -24,3 +25,20 @@ def pytest_report_header(*_, **__):
|
||||
f"authentik version: {get_full_version()}",
|
||||
f"OpenSSL version: {OPENSSL_VERSION}, FIPS: {backend._fips_enabled}",
|
||||
]
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]) -> None:
|
||||
current_id = int(environ.get("CI_RUN_ID", 0)) - 1
|
||||
total_ids = int(environ.get("CI_TOTAL_RUNS", 0))
|
||||
|
||||
if total_ids:
|
||||
num_tests = len(items)
|
||||
matrix_size = math.ceil(num_tests / total_ids)
|
||||
|
||||
start = current_id * matrix_size
|
||||
end = (current_id + 1) * matrix_size
|
||||
|
||||
deselected_items = items[:start] + items[end:]
|
||||
config.hook.pytest_deselected(items=deselected_items)
|
||||
items[:] = items[start:end]
|
||||
print(f" Executing {start} - {end} tests")
|
||||
|
@ -66,6 +66,7 @@ class KerberosSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
serializer_class = KerberosSourceSerializer
|
||||
lookup_field = "slug"
|
||||
filterset_fields = [
|
||||
"pbm_uuid",
|
||||
"name",
|
||||
"slug",
|
||||
"enabled",
|
||||
|
@ -110,6 +110,7 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
serializer_class = LDAPSourceSerializer
|
||||
lookup_field = "slug"
|
||||
filterset_fields = [
|
||||
"pbm_uuid",
|
||||
"name",
|
||||
"slug",
|
||||
"enabled",
|
||||
|
@ -152,6 +152,7 @@ class OAuthSourceFilter(FilterSet):
|
||||
class Meta:
|
||||
model = OAuthSource
|
||||
fields = [
|
||||
"pbm_uuid",
|
||||
"name",
|
||||
"slug",
|
||||
"enabled",
|
||||
|
@ -52,6 +52,7 @@ class PlexSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
serializer_class = PlexSourceSerializer
|
||||
lookup_field = "slug"
|
||||
filterset_fields = [
|
||||
"pbm_uuid",
|
||||
"name",
|
||||
"slug",
|
||||
"enabled",
|
||||
|
@ -44,6 +44,7 @@ class SAMLSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
serializer_class = SAMLSourceSerializer
|
||||
lookup_field = "slug"
|
||||
filterset_fields = [
|
||||
"pbm_uuid",
|
||||
"name",
|
||||
"slug",
|
||||
"enabled",
|
||||
|
@ -53,6 +53,6 @@ class SCIMSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
queryset = SCIMSource.objects.all()
|
||||
serializer_class = SCIMSourceSerializer
|
||||
lookup_field = "slug"
|
||||
filterset_fields = ["name", "slug"]
|
||||
filterset_fields = ["pbm_uuid", "name", "slug"]
|
||||
search_fields = ["name", "slug", "token__identifier", "token__user__username"]
|
||||
ordering = ["name"]
|
||||
|
@ -114,7 +114,7 @@ class SCIMView(APIView):
|
||||
|
||||
|
||||
class SCIMObjectView(SCIMView):
|
||||
"""Base SCIM View for object management"""
|
||||
"""Base SCIM View for object management"""
|
||||
|
||||
mapper: SourceMapper
|
||||
manager: PropertyMappingManager
|
||||
|
File diff suppressed because one or more lines are too long
@ -5,6 +5,7 @@ from email.policy import Policy
|
||||
from types import MethodType
|
||||
from typing import Any
|
||||
|
||||
from django.contrib.messages import INFO, add_message
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.http.request import QueryDict
|
||||
@ -147,6 +148,9 @@ class PromptChallengeResponse(ChallengeResponse):
|
||||
result = engine.result
|
||||
if not result.passing:
|
||||
raise ValidationError(list(result.messages))
|
||||
else:
|
||||
for msg in result.messages:
|
||||
add_message(self.request, INFO, msg)
|
||||
return attrs
|
||||
|
||||
|
||||
|
@ -20,7 +20,7 @@ from authentik.flows.planner import (
|
||||
FlowPlanner,
|
||||
)
|
||||
from authentik.flows.stage import ChallengeStageView
|
||||
from authentik.flows.views.executor import SESSION_KEY_PLAN, InvalidStageError
|
||||
from authentik.flows.views.executor import SESSION_KEY_GET, SESSION_KEY_PLAN, InvalidStageError
|
||||
from authentik.lib.utils.urls import reverse_with_qs
|
||||
from authentik.stages.redirect.models import RedirectMode, RedirectStage
|
||||
|
||||
@ -72,7 +72,9 @@ class RedirectStageView(ChallengeStageView):
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
kwargs = self.executor.kwargs
|
||||
kwargs.update({"flow_slug": flow.slug})
|
||||
return reverse_with_qs("authentik_core:if-flow", self.request.GET, kwargs=kwargs)
|
||||
return reverse_with_qs(
|
||||
"authentik_core:if-flow", self.request.session[SESSION_KEY_GET], kwargs=kwargs
|
||||
)
|
||||
|
||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||
"""Get the redirect target. Prioritize `redirect_stage_target` if present."""
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""Test Redirect stage"""
|
||||
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from django.urls.base import reverse
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
@ -58,6 +60,23 @@ class TestRedirectStage(FlowTestCase):
|
||||
response, reverse("authentik_core:if-flow", kwargs={"flow_slug": self.target_flow.slug})
|
||||
)
|
||||
|
||||
def test_flow_query(self):
|
||||
self.stage.mode = RedirectMode.FLOW
|
||||
self.stage.save()
|
||||
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
||||
+ "?"
|
||||
+ urlencode({"query": urlencode({"test": "foo"})})
|
||||
)
|
||||
|
||||
self.assertStageRedirects(
|
||||
response,
|
||||
reverse("authentik_core:if-flow", kwargs={"flow_slug": self.target_flow.slug})
|
||||
+ "?"
|
||||
+ urlencode({"test": "foo"}),
|
||||
)
|
||||
|
||||
def test_override_static(self):
|
||||
policy = ExpressionPolicy.objects.create(
|
||||
name=generate_id(),
|
||||
|
@ -2,7 +2,7 @@
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||
"type": "object",
|
||||
"title": "authentik 2024.12.2 Blueprint schema",
|
||||
"title": "authentik 2024.12.3 Blueprint schema",
|
||||
"required": [
|
||||
"version",
|
||||
"entries"
|
||||
@ -3601,6 +3601,46 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"model",
|
||||
"identifiers"
|
||||
],
|
||||
"properties": {
|
||||
"model": {
|
||||
"const": "authentik_providers_ssf.ssfprovider"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
"conditions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"permissions": {
|
||||
"$ref": "#/$defs/model_authentik_providers_ssf.ssfprovider_permissions"
|
||||
},
|
||||
"attrs": {
|
||||
"$ref": "#/$defs/model_authentik_providers_ssf.ssfprovider"
|
||||
},
|
||||
"identifiers": {
|
||||
"$ref": "#/$defs/model_authentik_providers_ssf.ssfprovider"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
@ -4583,6 +4623,7 @@
|
||||
"authentik.enterprise.providers.google_workspace",
|
||||
"authentik.enterprise.providers.microsoft_entra",
|
||||
"authentik.enterprise.providers.rac",
|
||||
"authentik.enterprise.providers.ssf",
|
||||
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
||||
"authentik.enterprise.stages.source",
|
||||
"authentik.events"
|
||||
@ -4686,6 +4727,7 @@
|
||||
"authentik_providers_rac.racprovider",
|
||||
"authentik_providers_rac.endpoint",
|
||||
"authentik_providers_rac.racpropertymapping",
|
||||
"authentik_providers_ssf.ssfprovider",
|
||||
"authentik_stages_authenticator_endpoint_gdtc.authenticatorendpointgdtcstage",
|
||||
"authentik_stages_source.sourcestage",
|
||||
"authentik_events.event",
|
||||
@ -6687,6 +6729,18 @@
|
||||
"authentik_providers_scim.view_scimprovider",
|
||||
"authentik_providers_scim.view_scimprovidergroup",
|
||||
"authentik_providers_scim.view_scimprovideruser",
|
||||
"authentik_providers_ssf.add_ssfprovider",
|
||||
"authentik_providers_ssf.add_stream",
|
||||
"authentik_providers_ssf.add_streamevent",
|
||||
"authentik_providers_ssf.change_ssfprovider",
|
||||
"authentik_providers_ssf.change_stream",
|
||||
"authentik_providers_ssf.change_streamevent",
|
||||
"authentik_providers_ssf.delete_ssfprovider",
|
||||
"authentik_providers_ssf.delete_stream",
|
||||
"authentik_providers_ssf.delete_streamevent",
|
||||
"authentik_providers_ssf.view_ssfprovider",
|
||||
"authentik_providers_ssf.view_stream",
|
||||
"authentik_providers_ssf.view_streamevent",
|
||||
"authentik_rbac.access_admin_interface",
|
||||
"authentik_rbac.add_role",
|
||||
"authentik_rbac.assign_role_permissions",
|
||||
@ -12936,6 +12990,18 @@
|
||||
"authentik_providers_scim.view_scimprovider",
|
||||
"authentik_providers_scim.view_scimprovidergroup",
|
||||
"authentik_providers_scim.view_scimprovideruser",
|
||||
"authentik_providers_ssf.add_ssfprovider",
|
||||
"authentik_providers_ssf.add_stream",
|
||||
"authentik_providers_ssf.add_streamevent",
|
||||
"authentik_providers_ssf.change_ssfprovider",
|
||||
"authentik_providers_ssf.change_stream",
|
||||
"authentik_providers_ssf.change_streamevent",
|
||||
"authentik_providers_ssf.delete_ssfprovider",
|
||||
"authentik_providers_ssf.delete_stream",
|
||||
"authentik_providers_ssf.delete_streamevent",
|
||||
"authentik_providers_ssf.view_ssfprovider",
|
||||
"authentik_providers_ssf.view_stream",
|
||||
"authentik_providers_ssf.view_streamevent",
|
||||
"authentik_rbac.access_admin_interface",
|
||||
"authentik_rbac.add_role",
|
||||
"authentik_rbac.assign_role_permissions",
|
||||
@ -13988,6 +14054,62 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"model_authentik_providers_ssf.ssfprovider": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"title": "Name"
|
||||
},
|
||||
"signing_key": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"title": "Signing Key",
|
||||
"description": "Key used to sign the SSF Events."
|
||||
},
|
||||
"oidc_auth_providers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "integer"
|
||||
},
|
||||
"title": "Oidc auth providers"
|
||||
},
|
||||
"event_retention": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"title": "Event retention"
|
||||
}
|
||||
},
|
||||
"required": []
|
||||
},
|
||||
"model_authentik_providers_ssf.ssfprovider_permissions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"permission"
|
||||
],
|
||||
"properties": {
|
||||
"permission": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"add_stream",
|
||||
"add_ssfprovider",
|
||||
"change_ssfprovider",
|
||||
"delete_ssfprovider",
|
||||
"view_ssfprovider"
|
||||
]
|
||||
},
|
||||
"user": {
|
||||
"type": "integer"
|
||||
},
|
||||
"role": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"model_authentik_stages_authenticator_endpoint_gdtc.authenticatorendpointgdtcstage": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
@ -31,7 +31,7 @@ services:
|
||||
volumes:
|
||||
- redis:/data
|
||||
server:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.12.2}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.12.3}
|
||||
restart: unless-stopped
|
||||
command: server
|
||||
environment:
|
||||
@ -54,7 +54,7 @@ services:
|
||||
redis:
|
||||
condition: service_healthy
|
||||
worker:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.12.2}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.12.3}
|
||||
restart: unless-stopped
|
||||
command: worker
|
||||
environment:
|
||||
|
8
go.mod
8
go.mod
@ -24,15 +24,15 @@ require (
|
||||
github.com/pires/go-proxyproto v0.8.0
|
||||
github.com/prometheus/client_golang v1.20.5
|
||||
github.com/redis/go-redis/v9 v9.7.0
|
||||
github.com/sethvargo/go-envconfig v1.1.0
|
||||
github.com/sethvargo/go-envconfig v1.1.1
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/spf13/cobra v1.8.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/wwt/guac v1.3.2
|
||||
goauthentik.io/api/v3 v3.2024122.2
|
||||
goauthentik.io/api/v3 v3.2024123.4
|
||||
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
|
||||
golang.org/x/oauth2 v0.25.0
|
||||
golang.org/x/sync v0.10.0
|
||||
golang.org/x/oauth2 v0.26.0
|
||||
golang.org/x/sync v0.11.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
layeh.com/radius v0.0.0-20210819152912-ad72663a72ab
|
||||
)
|
||||
|
15
go.sum
15
go.sum
@ -254,8 +254,8 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
|
||||
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
||||
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/sethvargo/go-envconfig v1.1.0 h1:cWZiJxeTm7AlCvzGXrEXaSTCNgip5oJepekh/BOQuog=
|
||||
github.com/sethvargo/go-envconfig v1.1.0/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/6nRidxI8YvGiHw=
|
||||
github.com/sethvargo/go-envconfig v1.1.1 h1:JDu8Q9baIzJf47NPkzhIB6aLYL0vQ+pPypoYrejS9QY=
|
||||
github.com/sethvargo/go-envconfig v1.1.1/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/6nRidxI8YvGiHw=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
@ -299,8 +299,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
goauthentik.io/api/v3 v3.2024122.2 h1:QC+ZQ+AxlPwl9OG1X/Z62EVepmTGyfvJUxhUdFjs+4s=
|
||||
goauthentik.io/api/v3 v3.2024122.2/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
|
||||
goauthentik.io/api/v3 v3.2024123.4 h1:JYLsUjkJ7kT+jHO72DyFTXFwKEGAcOOlLh36SRG9BDw=
|
||||
goauthentik.io/api/v3 v3.2024123.4/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
@ -393,8 +393,8 @@ golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4Iltr
|
||||
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -408,8 +408,9 @@ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
|
||||
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
|
@ -29,4 +29,4 @@ func UserAgent() string {
|
||||
return fmt.Sprintf("authentik@%s", FullVersion())
|
||||
}
|
||||
|
||||
const VERSION = "2024.12.2"
|
||||
const VERSION = "2024.12.3"
|
||||
|
@ -15,7 +15,6 @@ import (
|
||||
func EnableDebugServer() {
|
||||
l := log.WithField("logger", "authentik.go_debugger")
|
||||
if !config.Get().Debug {
|
||||
l.Info("not enabling debug server, set `AUTHENTIK_DEBUG` to `true` to enable it.")
|
||||
return
|
||||
}
|
||||
h := mux.NewRouter()
|
||||
|
@ -43,6 +43,11 @@ LABEL org.opencontainers.image.source=https://github.com/goauthentik/authentik
|
||||
LABEL org.opencontainers.image.version=${VERSION}
|
||||
LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH}
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get upgrade -y && \
|
||||
apt-get clean && \
|
||||
rm -rf /tmp/* /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=builder /go/ldap /
|
||||
|
||||
HEALTHCHECK --interval=5s --retries=20 --start-period=3s CMD [ "/ldap", "healthcheck" ]
|
||||
|
12
lifecycle/ak
12
lifecycle/ak
@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env -S bash -e
|
||||
#!/usr/bin/env -S bash
|
||||
set -e -o pipefail
|
||||
MODE_FILE="${TMPDIR}/authentik-mode"
|
||||
|
||||
function log {
|
||||
@ -54,6 +55,10 @@ function cleanup {
|
||||
}
|
||||
|
||||
function prepare_debug {
|
||||
# Only attempt to install debug dependencies if we're running in a container
|
||||
if [ ! -d /ak-root ]; then
|
||||
return
|
||||
fi
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends krb5-kdc krb5-user krb5-admin-server libkrb5-dev gcc
|
||||
@ -62,7 +67,7 @@ function prepare_debug {
|
||||
chown authentik:authentik /unittest.xml
|
||||
}
|
||||
|
||||
if [[ "${AUTHENTIK_REMOTE_DEBUG}" == "true" ]]; then
|
||||
if [[ "$(python -m authentik.lib.config debugger 2> /dev/null)" == "True" ]]; then
|
||||
prepare_debug
|
||||
fi
|
||||
|
||||
@ -87,12 +92,11 @@ elif [[ "$1" == "bash" ]]; then
|
||||
elif [[ "$1" == "test-all" ]]; then
|
||||
prepare_debug
|
||||
chmod 777 /root
|
||||
pip install --force-reinstall /wheels/*
|
||||
check_if_root "python -m manage test authentik"
|
||||
elif [[ "$1" == "healthcheck" ]]; then
|
||||
run_authentik healthcheck $(cat $MODE_FILE)
|
||||
elif [[ "$1" == "dump_config" ]]; then
|
||||
exec python -m authentik.lib.config
|
||||
exec python -m authentik.lib.config $@
|
||||
elif [[ "$1" == "debug" ]]; then
|
||||
exec sleep infinity
|
||||
else
|
||||
|
8
lifecycle/aws/package-lock.json
generated
8
lifecycle/aws/package-lock.json
generated
@ -9,7 +9,7 @@
|
||||
"version": "0.0.0",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"aws-cdk": "^2.176.0",
|
||||
"aws-cdk": "^2.178.2",
|
||||
"cross-env": "^7.0.3"
|
||||
},
|
||||
"engines": {
|
||||
@ -17,9 +17,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk": {
|
||||
"version": "2.176.0",
|
||||
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.176.0.tgz",
|
||||
"integrity": "sha512-yRjIXzK2ddznwuSjasWAViYBtBSQbEu6GHlylaC3GHsIUPhrK3KguqIuhdlxjMeiQ1Fvok8REDLCReZJdrSLLg==",
|
||||
"version": "2.178.2",
|
||||
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.178.2.tgz",
|
||||
"integrity": "sha512-ojMCMnBGinvDUD6+BOOlUOB9pjsYXoQdFVbf4bvi3dy3nwn557r0j6qDUcJMeikzPJ6YWzfAdL0fYxBZg4xcOg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
|
@ -10,7 +10,7 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"devDependencies": {
|
||||
"aws-cdk": "^2.176.0",
|
||||
"aws-cdk": "^2.178.2",
|
||||
"cross-env": "^7.0.3"
|
||||
}
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ Parameters:
|
||||
Description: authentik Docker image
|
||||
AuthentikVersion:
|
||||
Type: String
|
||||
Default: 2024.12.2
|
||||
Default: 2024.12.3
|
||||
Description: authentik Docker image tag
|
||||
AuthentikServerCPU:
|
||||
Type: Number
|
||||
|
@ -13,6 +13,7 @@ from prometheus_client.values import MultiProcessValue
|
||||
|
||||
from authentik import get_full_version
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.debug import start_debug_server
|
||||
from authentik.lib.logging import get_logger_config
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.lib.utils.reflection import get_env
|
||||
@ -146,9 +147,5 @@ if not CONFIG.get_bool("disable_startup_analytics", False):
|
||||
except Exception: # nosec
|
||||
pass
|
||||
|
||||
if CONFIG.get_bool("remote_debug"):
|
||||
import debugpy
|
||||
|
||||
debugpy.listen(("0.0.0.0", 6800)) # nosec
|
||||
|
||||
start_debug_server()
|
||||
run_migrations()
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
"""System Migration handler"""
|
||||
|
||||
from importlib.util import module_from_spec, spec_from_file_location
|
||||
from inspect import getmembers, isclass
|
||||
from os import environ, system
|
||||
@ -112,7 +113,8 @@ def run_migrations():
|
||||
"forget to activate a virtual environment?"
|
||||
) from exc
|
||||
execute_from_command_line(["", "migrate_schemas"])
|
||||
execute_from_command_line(["", "migrate_schemas", "--schema", "template", "--tenant"])
|
||||
if CONFIG.get_bool("tenants.enabled", False):
|
||||
execute_from_command_line(["", "migrate_schemas", "--schema", "template", "--tenant"])
|
||||
execute_from_command_line(
|
||||
["", "check"] + ([] if CONFIG.get_bool("debug") else ["--deploy"])
|
||||
)
|
||||
|
Binary file not shown.
@ -26,17 +26,20 @@
|
||||
# Thomas Liske, 2024
|
||||
# Michael Gottinger, 2024
|
||||
# itxworks, 2024
|
||||
# Alexander Möbius, 2024
|
||||
# Christian Wichmann <cw1981@gmx.de>, 2024
|
||||
# Stefan Werner, 2024
|
||||
# Alexander Möbius, 2025
|
||||
# Jonas, 2025
|
||||
# Niklas Kroese, 2025
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-11-18 00:09+0000\n"
|
||||
"POT-Creation-Date: 2025-02-14 14:49+0000\n"
|
||||
"PO-Revision-Date: 2022-09-26 16:47+0000\n"
|
||||
"Last-Translator: Christian Wichmann <cw1981@gmx.de>, 2024\n"
|
||||
"Last-Translator: Niklas Kroese, 2025\n"
|
||||
"Language-Team: German (https://app.transifex.com/authentik/teams/119923/de/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@ -104,9 +107,9 @@ msgid "authentik Export - {date}"
|
||||
msgstr "authentik Export - {date}"
|
||||
|
||||
#: authentik/blueprints/v1/tasks.py authentik/crypto/tasks.py
|
||||
#, python-format
|
||||
msgid "Successfully imported %(count)d files."
|
||||
msgstr "%(count)d Dateien wurden erfolgreich importiert."
|
||||
#, python-brace-format
|
||||
msgid "Successfully imported {count} files."
|
||||
msgstr "{count} Dateien erfolgreich importiert."
|
||||
|
||||
#: authentik/brands/models.py
|
||||
msgid ""
|
||||
@ -136,6 +139,10 @@ msgstr "Marke"
|
||||
msgid "Brands"
|
||||
msgstr "Marken"
|
||||
|
||||
#: authentik/core/api/application_entitlements.py
|
||||
msgid "User does not have access to application."
|
||||
msgstr "Nutzer hat keinen Zugriff auf diese Applikation."
|
||||
|
||||
#: authentik/core/api/devices.py
|
||||
msgid "Extra description not available"
|
||||
msgstr "Eine weitergehende Beschreibung ist nicht verfügbar"
|
||||
@ -269,6 +276,14 @@ msgstr "Anwendung"
|
||||
msgid "Applications"
|
||||
msgstr "Anwendungen"
|
||||
|
||||
#: authentik/core/models.py
|
||||
msgid "Application Entitlement"
|
||||
msgstr "Anwendungsberechtigung"
|
||||
|
||||
#: authentik/core/models.py
|
||||
msgid "Application Entitlements"
|
||||
msgstr "Anwendungsberechtigungen"
|
||||
|
||||
#: authentik/core/models.py
|
||||
msgid "Use the source-specific identifier"
|
||||
msgstr "Verwenden Sie die quellenspezifische Kennung"
|
||||
@ -611,6 +626,47 @@ msgstr "Maximale Verbindungsgrenze erreicht."
|
||||
msgid "(You are already connected in another tab/window)"
|
||||
msgstr "(Sie sind bereits in einem anderen Tab/Fenster verbunden)"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Signing Key"
|
||||
msgstr "Signaturschlüssel"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Key used to sign the SSF Events."
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Provider"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Providers"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Add stream to SSF provider"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Streams"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Event"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Events"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/tasks.py
|
||||
msgid "Failed to send request"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/stages/authenticator_endpoint_gdtc/models.py
|
||||
msgid "Endpoint Authenticator Google Device Trust Connector Stage"
|
||||
msgstr "Endpunkt-Authenticator für Google Gerätevertrauen Verbindungs Stage"
|
||||
@ -908,9 +964,8 @@ msgstr ""
|
||||
"Evaluierung der Richtlinien während des Planungsprozesses für den Flow."
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid "Evaluate policies when the Stage is present to the user."
|
||||
msgid "Evaluate policies when the Stage is presented to the user."
|
||||
msgstr ""
|
||||
"Bewerten Sie die Richtlinien, wenn die Stufe für den Benutzer sichtbar ist."
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid ""
|
||||
@ -959,14 +1014,14 @@ msgid "Starting full provider sync"
|
||||
msgstr "Starte komplette Provider Synchronisation."
|
||||
|
||||
#: authentik/lib/sync/outgoing/tasks.py
|
||||
#, python-format
|
||||
msgid "Syncing page %(page)d of users"
|
||||
msgstr "Seite %(page)d der Benutzer synchronisieren"
|
||||
#, python-brace-format
|
||||
msgid "Syncing page {page} of users"
|
||||
msgstr "Synchonisiere Benutzer Seite {page}"
|
||||
|
||||
#: authentik/lib/sync/outgoing/tasks.py
|
||||
#, python-format
|
||||
msgid "Syncing page %(page)d of groups"
|
||||
msgstr "Seite %(page)d der Gruppen synchronisieren"
|
||||
#, python-brace-format
|
||||
msgid "Syncing page {page} of groups"
|
||||
msgstr "Synchonisiere Gruppen Seite {page}"
|
||||
|
||||
#: authentik/lib/sync/outgoing/tasks.py
|
||||
#, python-brace-format
|
||||
@ -1140,10 +1195,10 @@ msgid "Event Matcher Policies"
|
||||
msgstr "Richtlinie für den Ereignisvergleich"
|
||||
|
||||
#: authentik/policies/expiry/models.py
|
||||
#, python-format
|
||||
msgid "Password expired %(days)d days ago. Please update your password."
|
||||
#, python-brace-format
|
||||
msgid "Password expired {days} days ago. Please update your password."
|
||||
msgstr ""
|
||||
"Das Passwort ist vor %(days)d Tagen abgelaufen. Bitte aktualisieren Sie Ihr "
|
||||
"Das Passwort ist vor {days} Tagen abgelaufen. Bitte aktualisieren Sie Ihr "
|
||||
"Passwort."
|
||||
|
||||
#: authentik/policies/expiry/models.py
|
||||
@ -1278,9 +1333,9 @@ msgid "Invalid password."
|
||||
msgstr "Ungültiges Passwort."
|
||||
|
||||
#: authentik/policies/password/models.py
|
||||
#, python-format
|
||||
msgid "Password exists on %(count)d online lists."
|
||||
msgstr "Passwort existiert auf %(count)d Listen."
|
||||
#, python-brace-format
|
||||
msgid "Password exists on {count} online lists."
|
||||
msgstr "Passwort online in {count} Listen gefunden."
|
||||
|
||||
#: authentik/policies/password/models.py
|
||||
msgid "Password is too weak."
|
||||
@ -1407,6 +1462,11 @@ msgstr "LDAP Anbietern"
|
||||
msgid "Search full LDAP directory"
|
||||
msgstr "Durchsuche komplettes LDAP Verzeichnis"
|
||||
|
||||
#: authentik/providers/oauth2/api/providers.py
|
||||
#, python-brace-format
|
||||
msgid "Invalid Regex Pattern: {url}"
|
||||
msgstr "Regex pattern ungültig: {url}"
|
||||
|
||||
#: authentik/providers/oauth2/id_token.py
|
||||
msgid "Based on the Hashed User ID"
|
||||
msgstr "Basierend auf der gehashten Benutzer ID"
|
||||
@ -1456,6 +1516,14 @@ msgstr ""
|
||||
"Jeder Anbieter hat einen anderen Aussteller, der auf dem Slug der Anwendung "
|
||||
"basiert."
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Strict URL comparison"
|
||||
msgstr "Strikter URL-Vergleich"
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Regular Expression URL matching"
|
||||
msgstr "Regex-URL-Vergleich"
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "code (Authorization Code Flow)"
|
||||
msgstr "Code (Autorisierungsablauf)"
|
||||
@ -1536,10 +1604,6 @@ msgstr "Client Geheimnis"
|
||||
msgid "Redirect URIs"
|
||||
msgstr "URIs weiterleiten"
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Enter each URI on a new line."
|
||||
msgstr "Geben Sie jeden URI in eine neue Zeile ein."
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Include claims in id_token"
|
||||
msgstr "Ansprüche in id_token berücksichtigen"
|
||||
@ -1583,10 +1647,6 @@ msgstr ""
|
||||
"Konfigurieren Sie, wie der Flow-Executor mit einer ungültigen Antwort auf "
|
||||
"eine Abfrage umgehen soll."
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Signing Key"
|
||||
msgstr "Signaturschlüssel"
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Key used to sign the tokens."
|
||||
msgstr "Schlüssel zum Signieren der Token."
|
||||
@ -2094,6 +2154,10 @@ msgstr ""
|
||||
"Benutzerdefinierte krb5.conf zur Benutzung. Benutzt standardmäßig die "
|
||||
"systemeigene Konfiguration"
|
||||
|
||||
#: authentik/sources/kerberos/models.py
|
||||
msgid "KAdmin server type"
|
||||
msgstr "KAdmin-Servertyp"
|
||||
|
||||
#: authentik/sources/kerberos/models.py
|
||||
msgid "Sync users from Kerberos into authentik"
|
||||
msgstr "Synchronisiere Nutzer von Kerberos nach authentik"
|
||||
@ -2340,7 +2404,7 @@ msgstr "Zugriffstoken-URL"
|
||||
|
||||
#: authentik/sources/oauth/models.py
|
||||
msgid "URL used by authentik to retrieve tokens."
|
||||
msgstr "URL, die von Authentik zum Abrufen von Token verwendet wird."
|
||||
msgstr "URL, die von Authentik zum Abrufen von Tokens verwendet wird."
|
||||
|
||||
#: authentik/sources/oauth/models.py
|
||||
msgid "Profile URL"
|
||||
@ -3114,12 +3178,11 @@ msgstr ""
|
||||
#, python-format
|
||||
msgid ""
|
||||
"\n"
|
||||
" If you did not request a password change, please ignore this Email. The link above is valid for %(expires)s.\n"
|
||||
" If you did not request a password change, please ignore this email. The link above is valid for %(expires)s.\n"
|
||||
" "
|
||||
msgstr ""
|
||||
"\n"
|
||||
" Wenn Sie keine Passwortänderung beantragt haben, ignorieren Sie bitte diese E-Mail. Der obige Link ist gültig für %(expires)s.\n"
|
||||
" "
|
||||
" Wenn Sie keine Passwortänderung beantragt haben, ignorieren Sie bitte diese E-Mail. Der obige Link ist gültig für %(expires)s."
|
||||
|
||||
#: authentik/stages/email/templates/email/password_reset.txt
|
||||
#, python-format
|
||||
@ -3138,7 +3201,7 @@ msgstr ""
|
||||
#, python-format
|
||||
msgid ""
|
||||
"\n"
|
||||
"If you did not request a password change, please ignore this Email. The link above is valid for %(expires)s.\n"
|
||||
"If you did not request a password change, please ignore this email. The link above is valid for %(expires)s.\n"
|
||||
msgstr ""
|
||||
"\n"
|
||||
"Wenn Sie keine Passwortänderung beantragt haben, ignorieren Sie bitte diese E-Mail. Der obige Link ist gültig für %(expires)s.\n"
|
||||
@ -3450,6 +3513,22 @@ msgstr "Aufforderungsstufen"
|
||||
msgid "Passwords don't match."
|
||||
msgstr "Passwörter stimmen nicht überein"
|
||||
|
||||
#: authentik/stages/redirect/api.py
|
||||
msgid "Target URL should be present when mode is Static."
|
||||
msgstr "Ziel-URL sollte beim statischen Modus vorhanden sein"
|
||||
|
||||
#: authentik/stages/redirect/api.py
|
||||
msgid "Target Flow should be present when mode is Flow."
|
||||
msgstr "Ziel-Flow sollte beim Flow-Modus vorhanden sein"
|
||||
|
||||
#: authentik/stages/redirect/models.py
|
||||
msgid "Redirect Stage"
|
||||
msgstr "Umleitungphase"
|
||||
|
||||
#: authentik/stages/redirect/models.py
|
||||
msgid "Redirect Stages"
|
||||
msgstr "Umleitungphasen"
|
||||
|
||||
#: authentik/stages/user_delete/models.py
|
||||
msgid "User Delete Stage"
|
||||
msgstr "Benutzer löschen Stufe"
|
||||
|
@ -8,7 +8,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-12-20 00:08+0000\n"
|
||||
"POT-Creation-Date: 2025-02-14 14:49+0000\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
@ -551,6 +551,47 @@ msgstr ""
|
||||
msgid "(You are already connected in another tab/window)"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Signing Key"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Key used to sign the SSF Events."
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Provider"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Providers"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Add stream to SSF provider"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Streams"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Event"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Events"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/tasks.py
|
||||
msgid "Failed to send request"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/stages/authenticator_endpoint_gdtc/models.py
|
||||
msgid "Endpoint Authenticator Google Device Trust Connector Stage"
|
||||
msgstr ""
|
||||
@ -826,7 +867,7 @@ msgid "Evaluate policies during the Flow planning process."
|
||||
msgstr ""
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid "Evaluate policies when the Stage is present to the user."
|
||||
msgid "Evaluate policies when the Stage is presented to the user."
|
||||
msgstr ""
|
||||
|
||||
#: authentik/flows/models.py
|
||||
@ -1427,10 +1468,6 @@ msgstr ""
|
||||
msgid "Configure how the issuer field of the ID Token should be filled."
|
||||
msgstr ""
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Signing Key"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Key used to sign the tokens."
|
||||
msgstr ""
|
||||
|
Binary file not shown.
@ -14,7 +14,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-11-26 00:09+0000\n"
|
||||
"POT-Creation-Date: 2025-02-14 14:49+0000\n"
|
||||
"PO-Revision-Date: 2022-09-26 16:47+0000\n"
|
||||
"Last-Translator: Iamanaws, 2024\n"
|
||||
"Language-Team: Spanish (https://app.transifex.com/authentik/teams/119923/es/)\n"
|
||||
@ -116,6 +116,10 @@ msgstr "Marca"
|
||||
msgid "Brands"
|
||||
msgstr "Marcas"
|
||||
|
||||
#: authentik/core/api/application_entitlements.py
|
||||
msgid "User does not have access to application."
|
||||
msgstr ""
|
||||
|
||||
#: authentik/core/api/devices.py
|
||||
msgid "Extra description not available"
|
||||
msgstr "Descripción adicional no disponible."
|
||||
@ -249,6 +253,14 @@ msgstr "Aplicación"
|
||||
msgid "Applications"
|
||||
msgstr "Aplicaciones"
|
||||
|
||||
#: authentik/core/models.py
|
||||
msgid "Application Entitlement"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/core/models.py
|
||||
msgid "Application Entitlements"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/core/models.py
|
||||
msgid "Use the source-specific identifier"
|
||||
msgstr "Usar el identificador específico de la fuente"
|
||||
@ -592,6 +604,47 @@ msgstr "Límite máximo de conexiones alcanzado."
|
||||
msgid "(You are already connected in another tab/window)"
|
||||
msgstr "(Ya estás conectado en otra pestaña/ventana)"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Signing Key"
|
||||
msgstr "Clave de firma"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Key used to sign the SSF Events."
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Provider"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Providers"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Add stream to SSF provider"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Streams"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Event"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Events"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/tasks.py
|
||||
msgid "Failed to send request"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/stages/authenticator_endpoint_gdtc/models.py
|
||||
msgid "Endpoint Authenticator Google Device Trust Connector Stage"
|
||||
msgstr ""
|
||||
@ -887,9 +940,8 @@ msgid "Evaluate policies during the Flow planning process."
|
||||
msgstr "Evalúa políticas durante el proceso de planeación del Flujo."
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid "Evaluate policies when the Stage is present to the user."
|
||||
msgid "Evaluate policies when the Stage is presented to the user."
|
||||
msgstr ""
|
||||
"Evalúe las políticas cuando el escenario esté presente para el usuario."
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid ""
|
||||
@ -1567,10 +1619,6 @@ msgstr ""
|
||||
msgid "Configure how the issuer field of the ID Token should be filled."
|
||||
msgstr "Configure cómo se debe rellenar el campo emisor del token de ID."
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Signing Key"
|
||||
msgstr "Clave de firma"
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Key used to sign the tokens."
|
||||
msgstr "Clave utilizada para firmar los tokens."
|
||||
@ -2077,6 +2125,10 @@ msgstr "Dominio Kerberos"
|
||||
msgid "Custom krb5.conf to use. Uses the system one by default"
|
||||
msgstr "krb5.conf personalizado a usar. Usa el del sistema por defecto."
|
||||
|
||||
#: authentik/sources/kerberos/models.py
|
||||
msgid "KAdmin server type"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/sources/kerberos/models.py
|
||||
msgid "Sync users from Kerberos into authentik"
|
||||
msgstr "Sincronizar usuarios desde Kerberos hacia Authentik"
|
||||
@ -3099,12 +3151,9 @@ msgstr ""
|
||||
#, python-format
|
||||
msgid ""
|
||||
"\n"
|
||||
" If you did not request a password change, please ignore this Email. The link above is valid for %(expires)s.\n"
|
||||
" If you did not request a password change, please ignore this email. The link above is valid for %(expires)s.\n"
|
||||
" "
|
||||
msgstr ""
|
||||
"\n"
|
||||
" Si no solicitaste un cambio de contraseña, por favor ignora este correo. El enlace de arriba es valida por %(expires)s.\n"
|
||||
" "
|
||||
|
||||
#: authentik/stages/email/templates/email/password_reset.txt
|
||||
#, python-format
|
||||
@ -3123,10 +3172,8 @@ msgstr ""
|
||||
#, python-format
|
||||
msgid ""
|
||||
"\n"
|
||||
"If you did not request a password change, please ignore this Email. The link above is valid for %(expires)s.\n"
|
||||
"If you did not request a password change, please ignore this email. The link above is valid for %(expires)s.\n"
|
||||
msgstr ""
|
||||
"\n"
|
||||
"Si no solicitaste un cambio de contraseña, por favor ignora este correo. El enlace de arriba es valida por %(expires)s.\n"
|
||||
|
||||
#: authentik/stages/email/templates/email/setup.html
|
||||
msgid "authentik Test-Email"
|
||||
@ -3436,6 +3483,22 @@ msgstr "Etapas de Solicitud"
|
||||
msgid "Passwords don't match."
|
||||
msgstr "Las contraseñas no coinciden."
|
||||
|
||||
#: authentik/stages/redirect/api.py
|
||||
msgid "Target URL should be present when mode is Static."
|
||||
msgstr ""
|
||||
|
||||
#: authentik/stages/redirect/api.py
|
||||
msgid "Target Flow should be present when mode is Flow."
|
||||
msgstr ""
|
||||
|
||||
#: authentik/stages/redirect/models.py
|
||||
msgid "Redirect Stage"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/stages/redirect/models.py
|
||||
msgid "Redirect Stages"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/stages/user_delete/models.py
|
||||
msgid "User Delete Stage"
|
||||
msgstr "Etapa de eliminación del usuario"
|
||||
|
BIN
locale/fi/LC_MESSAGES/django.mo
Normal file
BIN
locale/fi/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
@ -8,15 +8,16 @@
|
||||
# Skyler Mäntysaari, 2024
|
||||
# Jani Hast, 2024
|
||||
# MarkoTukiainen, 2025
|
||||
# Marc Schmitt, 2025
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-12-20 00:08+0000\n"
|
||||
"POT-Creation-Date: 2025-02-14 14:49+0000\n"
|
||||
"PO-Revision-Date: 2022-09-26 16:47+0000\n"
|
||||
"Last-Translator: MarkoTukiainen, 2025\n"
|
||||
"Last-Translator: Marc Schmitt, 2025\n"
|
||||
"Language-Team: Finnish (https://app.transifex.com/authentik/teams/119923/fi/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@ -78,7 +79,7 @@ msgstr "Suunnitelman ilmentymät"
|
||||
#: authentik/blueprints/v1/exporter.py
|
||||
#, python-brace-format
|
||||
msgid "authentik Export - {date}"
|
||||
msgstr "authentik Vienti - {data}"
|
||||
msgstr "authentik Vienti - {date}"
|
||||
|
||||
#: authentik/blueprints/v1/tasks.py authentik/crypto/tasks.py
|
||||
#, python-brace-format
|
||||
@ -594,6 +595,47 @@ msgstr "Yhteyksien enimmäismäärä saavutettu."
|
||||
msgid "(You are already connected in another tab/window)"
|
||||
msgstr "(Olet jo yhteydessä toisen selainvälilehden tai -ikkunan kautta)"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Signing Key"
|
||||
msgstr "Allekirjoitusavain"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Key used to sign the SSF Events."
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Provider"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Providers"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Add stream to SSF provider"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Streams"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Event"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Events"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/tasks.py
|
||||
msgid "Failed to send request"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/stages/authenticator_endpoint_gdtc/models.py
|
||||
msgid "Endpoint Authenticator Google Device Trust Connector Stage"
|
||||
msgstr "Päätepisteen todentaja Google Device Trust Connector -vaihe"
|
||||
@ -884,8 +926,8 @@ msgid "Evaluate policies during the Flow planning process."
|
||||
msgstr "Suorita käytännöt prosessin suunnitteluvaiheen aikana."
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid "Evaluate policies when the Stage is present to the user."
|
||||
msgstr "Suorita käytännöt kun vaihe näytetään käyttäjälle."
|
||||
msgid "Evaluate policies when the Stage is presented to the user."
|
||||
msgstr ""
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid ""
|
||||
@ -1559,10 +1601,6 @@ msgstr ""
|
||||
msgid "Configure how the issuer field of the ID Token should be filled."
|
||||
msgstr "Määritä, miten myöntäjän ID-tunnisteen kenttä täytetään."
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Signing Key"
|
||||
msgstr "Allekirjoitusavain"
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Key used to sign the tokens."
|
||||
msgstr "Avain, jota käytetään tunnisteiden allekirjoittamiseen."
|
||||
|
Binary file not shown.
@ -19,7 +19,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-12-20 00:08+0000\n"
|
||||
"POT-Creation-Date: 2025-02-14 14:49+0000\n"
|
||||
"PO-Revision-Date: 2022-09-26 16:47+0000\n"
|
||||
"Last-Translator: Marc Schmitt, 2025\n"
|
||||
"Language-Team: French (https://app.transifex.com/authentik/teams/119923/fr/)\n"
|
||||
@ -608,6 +608,47 @@ msgstr "Limite maximum de connection atteinte."
|
||||
msgid "(You are already connected in another tab/window)"
|
||||
msgstr "(Vous êtes déjà connecté dans un autre onglet/une autre fenêtre)"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Signing Key"
|
||||
msgstr "Clé de signature"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Key used to sign the SSF Events."
|
||||
msgstr "Clé utilisée pour signer les évènements SSF."
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Provider"
|
||||
msgstr "Fournisseur Shared Signals Framework"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Providers"
|
||||
msgstr "Fournisseurs Shared Signals Framework"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Add stream to SSF provider"
|
||||
msgstr "Ajouter un flux au fournisseur SSF"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream"
|
||||
msgstr "Flux SSF"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Streams"
|
||||
msgstr "Flux SSF"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Event"
|
||||
msgstr "Évènement du flux SSF"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Events"
|
||||
msgstr "Évènements du flux SSF"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/tasks.py
|
||||
msgid "Failed to send request"
|
||||
msgstr "Échec de l'envoi de la requête"
|
||||
|
||||
#: authentik/enterprise/stages/authenticator_endpoint_gdtc/models.py
|
||||
msgid "Endpoint Authenticator Google Device Trust Connector Stage"
|
||||
msgstr ""
|
||||
@ -905,7 +946,7 @@ msgid "Evaluate policies during the Flow planning process."
|
||||
msgstr "Évaluer les politiques durant la planification du flux."
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid "Evaluate policies when the Stage is present to the user."
|
||||
msgid "Evaluate policies when the Stage is presented to the user."
|
||||
msgstr ""
|
||||
"Évaluer les politiques lorsque l'étape est présentée est l'utilisateur."
|
||||
|
||||
@ -1585,10 +1626,6 @@ msgstr ""
|
||||
msgid "Configure how the issuer field of the ID Token should be filled."
|
||||
msgstr "Configure comment le champ émetteur du jeton ID sera rempli."
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Signing Key"
|
||||
msgstr "Clé de signature"
|
||||
|
||||
#: authentik/providers/oauth2/models.py
|
||||
msgid "Key used to sign the tokens."
|
||||
msgstr "Clé utilisée pour signer les jetons."
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user