Compare commits
209 Commits
web/design
...
version/20
Author | SHA1 | Date | |
---|---|---|---|
5c5cc1c7da | |||
3dccce1095 | |||
78f997fbee | |||
ed83c2b0b1 | |||
af780deb27 | |||
a4be38567f | |||
39aafbb34a | |||
07eb5fe533 | |||
301a89dd92 | |||
cd6d0a47f3 | |||
8a23eaef1e | |||
8f285fbcc5 | |||
5d391424f7 | |||
2de11f8a69 | |||
b2dcf94aba | |||
adb532fc5d | |||
5d3b35d1ba | |||
433a94d9ee | |||
f28d622d10 | |||
50a68c22c5 | |||
13c99c8546 | |||
7243add30f | |||
6611a64a62 | |||
5262f61483 | |||
9dcbb4af9e | |||
0665bfac58 | |||
790e0c4d80 | |||
12f16241fb | |||
2c3a040e35 | |||
ec0dd8c6a0 | |||
7b8c27ad2c | |||
79b80c2ed2 | |||
28485e8a15 | |||
e86b4514bc | |||
179f5c7acf | |||
e7538b85e1 | |||
ab8f5a2ac4 | |||
67c22c1313 | |||
74e090239a | |||
e5f0fc6469 | |||
945987f10f | |||
4ba360e7af | |||
a8fd0c376f | |||
0e5d647238 | |||
306f227813 | |||
e89e592061 | |||
454bf554a6 | |||
eab6ca96a7 | |||
7746d2ab7a | |||
4fe38172e3 | |||
e6082e0f08 | |||
9402c19966 | |||
e9c944c0d5 | |||
b865e97973 | |||
24a364bd6b | |||
65579c0a2b | |||
de20897321 | |||
39f7bc8e9b | |||
4ade549ce2 | |||
a4d87ef011 | |||
b851c3daaf | |||
198af84b3b | |||
69ced3ae02 | |||
4a2f58561b | |||
8becaf3418 | |||
bcfbc46839 | |||
af287ee7b0 | |||
ebf3d12874 | |||
7fbdd0452e | |||
18298a856f | |||
ef6836207a | |||
5ad176adf2 | |||
011afc8b2f | |||
4c32c1503b | |||
774a8e6eeb | |||
297d7f100a | |||
0d3692a619 | |||
ba20748b07 | |||
3fc296ad0b | |||
0aba428787 | |||
4a88e29de6 | |||
0d6fced7d8 | |||
29c6c1e33b | |||
e2e8b7c114 | |||
bf2e854f12 | |||
3fbc059f2d | |||
e051e8ebd8 | |||
880a99efe5 | |||
27d5063d16 | |||
e130bca344 | |||
325d590679 | |||
f40a4b5076 | |||
89a19f6e4c | |||
9bc51c683e | |||
3d2bd4d8dd | |||
46a968d1dd | |||
49cc70eb96 | |||
143b02b51a | |||
5904fae80b | |||
6f9479a085 | |||
ce10dbfa4e | |||
394881dcd3 | |||
a6e322507c | |||
755e2f1507 | |||
d41c9eb442 | |||
dea48e6ac7 | |||
1614f3174f | |||
d18950f7bb | |||
4fe533a92f | |||
82d4e8aa4e | |||
98129d3e9a | |||
98f3b9ae97 | |||
bd69dbc0e1 | |||
ac4d6ae9f6 | |||
cdc0d0a857 | |||
3656c38aa0 | |||
fe4e364492 | |||
ce86cbe2a0 | |||
8f0e9ff534 | |||
ff60607851 | |||
b6cf27b421 | |||
9457c80d62 | |||
409035b692 | |||
7798d16e01 | |||
8f16a182aa | |||
50c68df0a1 | |||
556248c7c9 | |||
ed2e2380cc | |||
1f79b5acb7 | |||
6185e7cdc7 | |||
aedce2a6a1 | |||
fefa189ff4 | |||
b5bdad6804 | |||
1d03f92dee | |||
01b20153ca | |||
83a2728500 | |||
c57f17bff8 | |||
5533f7dd7a | |||
daebeb1192 | |||
26a08fcaac | |||
330fc8cee3 | |||
205c01038f | |||
23eb93c981 | |||
5679352c15 | |||
fb7d637da1 | |||
cee48909e9 | |||
6549b303d5 | |||
e2d6d3860c | |||
91155f9ce3 | |||
bdcd1059dd | |||
e4b6df3f27 | |||
7a6d7919c8 | |||
fda9b137a7 | |||
7686d12f1b | |||
34ee29227a | |||
334e2c466f | |||
7c944b954c | |||
427a8c91c8 | |||
22d6dd3098 | |||
36c81a30ad | |||
f7dc7faea5 | |||
62720e6c51 | |||
64dfe7e3c2 | |||
c803b4da51 | |||
3568cd601f | |||
8cad66536c | |||
220e79e668 | |||
316f43e6eb | |||
b7053dfffd | |||
fccdaaf210 | |||
cf530c6f31 | |||
94d84ae1dc | |||
de1bb03619 | |||
e41d86bd2a | |||
a10e6b7fd7 | |||
92d6d74c2d | |||
773c57b8d7 | |||
692a6be07f | |||
645323cd02 | |||
06d57a7574 | |||
102c7e4c5c | |||
7e7ed83dfe | |||
141ced8317 | |||
5109af0ab4 | |||
1a1912e391 | |||
6702652824 | |||
b04ff5bbee | |||
3daa39080a | |||
d3d6040e23 | |||
e08ccf4ca0 | |||
0e346c6e7c | |||
62187e60d4 | |||
467b1fcd14 | |||
9e2fccb045 | |||
39d8b41357 | |||
0a0f8433c6 | |||
3b61e08d3d | |||
921e1923b0 | |||
a666c20c40 | |||
1ed96fd5a5 | |||
f245dada2c | |||
7d8094d9c4 | |||
d63cba0a9d | |||
fdc3de8646 | |||
7163d333dc | |||
02bdf093e0 | |||
1ce3dfd17f | |||
ce7e539f59 | |||
12e6282316 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2024.12.2
|
current_version = 2025.2.0
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||||
|
@ -40,7 +40,7 @@ jobs:
|
|||||||
attestations: write
|
attestations: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: docker/setup-qemu-action@v3.3.0
|
- uses: docker/setup-qemu-action@v3.4.0
|
||||||
- uses: docker/setup-buildx-action@v3
|
- uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
@ -77,7 +77,7 @@ jobs:
|
|||||||
id: push
|
id: push
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
push: true
|
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
@ -89,6 +89,7 @@ jobs:
|
|||||||
cache-to: ${{ steps.ev.outputs.cacheTo }}
|
cache-to: ${{ steps.ev.outputs.cacheTo }}
|
||||||
- uses: actions/attest-build-provenance@v2
|
- uses: actions/attest-build-provenance@v2
|
||||||
id: attest
|
id: attest
|
||||||
|
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||||
with:
|
with:
|
||||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||||
subject-digest: ${{ steps.push.outputs.digest }}
|
subject-digest: ${{ steps.push.outputs.digest }}
|
||||||
|
@ -46,6 +46,7 @@ jobs:
|
|||||||
- build-server-arm64
|
- build-server-arm64
|
||||||
outputs:
|
outputs:
|
||||||
tags: ${{ steps.ev.outputs.imageTagsJSON }}
|
tags: ${{ steps.ev.outputs.imageTagsJSON }}
|
||||||
|
shouldPush: ${{ steps.ev.outputs.shouldPush }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
@ -57,6 +58,7 @@ jobs:
|
|||||||
image-name: ${{ inputs.image_name }}
|
image-name: ${{ inputs.image_name }}
|
||||||
merge-server:
|
merge-server:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ needs.get-tags.outputs.shouldPush == 'true' }}
|
||||||
needs:
|
needs:
|
||||||
- get-tags
|
- get-tags
|
||||||
- build-server-amd64
|
- build-server-amd64
|
||||||
|
28
.github/workflows/ci-main-daily.yml
vendored
Normal file
28
.github/workflows/ci-main-daily.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
---
|
||||||
|
name: authentik-ci-main-daily
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
# Every night at 3am
|
||||||
|
- cron: "0 3 * * *"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-container:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- docs
|
||||||
|
- version-2024-12
|
||||||
|
- version-2024-10
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- run: |
|
||||||
|
current="$(pwd)"
|
||||||
|
dir="/tmp/authentik/${{ matrix.version }}"
|
||||||
|
mkdir -p $dir
|
||||||
|
cd $dir
|
||||||
|
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
|
||||||
|
${current}/scripts/test_docker.sh
|
33
.github/workflows/ci-main.yml
vendored
33
.github/workflows/ci-main.yml
vendored
@ -43,15 +43,26 @@ jobs:
|
|||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: run migrations
|
- name: run migrations
|
||||||
run: poetry run python -m lifecycle.migrate
|
run: poetry run python -m lifecycle.migrate
|
||||||
test-migrations-from-stable:
|
test-make-seed:
|
||||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- id: seed
|
||||||
|
run: |
|
||||||
|
echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT"
|
||||||
|
outputs:
|
||||||
|
seed: ${{ steps.seed.outputs.seed }}
|
||||||
|
test-migrations-from-stable:
|
||||||
|
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 20
|
||||||
|
needs: test-make-seed
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
psql:
|
psql:
|
||||||
- 15-alpine
|
- 15-alpine
|
||||||
- 16-alpine
|
- 16-alpine
|
||||||
|
run_id: [1, 2, 3, 4, 5]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@ -93,18 +104,23 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
# Test in the main database that we just migrated from the previous stable version
|
# Test in the main database that we just migrated from the previous stable version
|
||||||
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
|
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
|
||||||
|
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||||
|
CI_RUN_ID: ${{ matrix.run_id }}
|
||||||
|
CI_TOTAL_RUNS: "5"
|
||||||
run: |
|
run: |
|
||||||
poetry run make test
|
poetry run make ci-test
|
||||||
test-unittest:
|
test-unittest:
|
||||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 20
|
||||||
|
needs: test-make-seed
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
psql:
|
psql:
|
||||||
- 15-alpine
|
- 15-alpine
|
||||||
- 16-alpine
|
- 16-alpine
|
||||||
|
run_id: [1, 2, 3, 4, 5]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
@ -112,9 +128,12 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
postgresql_version: ${{ matrix.psql }}
|
postgresql_version: ${{ matrix.psql }}
|
||||||
- name: run unittest
|
- name: run unittest
|
||||||
|
env:
|
||||||
|
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||||
|
CI_RUN_ID: ${{ matrix.run_id }}
|
||||||
|
CI_TOTAL_RUNS: "5"
|
||||||
run: |
|
run: |
|
||||||
poetry run make test
|
poetry run make ci-test
|
||||||
poetry run coverage xml
|
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
|
2
.github/workflows/ci-outpost.yml
vendored
2
.github/workflows/ci-outpost.yml
vendored
@ -82,7 +82,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.3.0
|
uses: docker/setup-qemu-action@v3.4.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
|
10
.github/workflows/release-publish.yml
vendored
10
.github/workflows/release-publish.yml
vendored
@ -9,9 +9,17 @@ jobs:
|
|||||||
build-server:
|
build-server:
|
||||||
uses: ./.github/workflows/_reusable-docker-build.yaml
|
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
permissions:
|
||||||
|
# Needed to upload container images to ghcr.io
|
||||||
|
packages: write
|
||||||
|
# Needed for attestation
|
||||||
|
id-token: write
|
||||||
|
attestations: write
|
||||||
with:
|
with:
|
||||||
image_name: ghcr.io/goauthentik/server,beryju/authentik
|
image_name: ghcr.io/goauthentik/server,beryju/authentik
|
||||||
release: true
|
release: true
|
||||||
|
registry_dockerhub: true
|
||||||
|
registry_ghcr: true
|
||||||
build-outpost:
|
build-outpost:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
@ -34,7 +42,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.3.0
|
uses: docker/setup-qemu-action@v3.4.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
|
11
.github/workflows/release-tag.yml
vendored
11
.github/workflows/release-tag.yml
vendored
@ -14,16 +14,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Pre-release test
|
- name: Pre-release test
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
|
make test-docker
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
|
|
||||||
docker buildx install
|
|
||||||
mkdir -p ./gen-ts-api
|
|
||||||
docker build -t testing:latest .
|
|
||||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
|
||||||
echo "AUTHENTIK_TAG=latest" >> .env
|
|
||||||
docker compose up --no-start
|
|
||||||
docker compose start postgresql redis
|
|
||||||
docker compose run -u root server test-all
|
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v2
|
uses: tibdex/github-app-token@v2
|
||||||
with:
|
with:
|
||||||
|
6
.github/workflows/repo-stale.yml
vendored
6
.github/workflows/repo-stale.yml
vendored
@ -1,8 +1,8 @@
|
|||||||
name: 'authentik-repo-stale'
|
name: "authentik-repo-stale"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '30 1 * * *'
|
- cron: "30 1 * * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
@ -25,7 +25,7 @@ jobs:
|
|||||||
days-before-stale: 60
|
days-before-stale: 60
|
||||||
days-before-close: 7
|
days-before-close: 7
|
||||||
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
|
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
|
||||||
stale-issue-label: wontfix
|
stale-issue-label: status/stale
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
recent activity. It will be closed if no further activity occurs. Thank you
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -209,3 +209,6 @@ source_docs/
|
|||||||
|
|
||||||
### Golang ###
|
### Golang ###
|
||||||
/vendor/
|
/vendor/
|
||||||
|
|
||||||
|
### Docker ###
|
||||||
|
docker-compose.override.yml
|
||||||
|
7
.vscode/extensions.json
vendored
7
.vscode/extensions.json
vendored
@ -2,6 +2,7 @@
|
|||||||
"recommendations": [
|
"recommendations": [
|
||||||
"bashmish.es6-string-css",
|
"bashmish.es6-string-css",
|
||||||
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
||||||
|
"charliermarsh.ruff",
|
||||||
"dbaeumer.vscode-eslint",
|
"dbaeumer.vscode-eslint",
|
||||||
"EditorConfig.EditorConfig",
|
"EditorConfig.EditorConfig",
|
||||||
"esbenp.prettier-vscode",
|
"esbenp.prettier-vscode",
|
||||||
@ -10,12 +11,12 @@
|
|||||||
"Gruntfuggly.todo-tree",
|
"Gruntfuggly.todo-tree",
|
||||||
"mechatroner.rainbow-csv",
|
"mechatroner.rainbow-csv",
|
||||||
"ms-python.black-formatter",
|
"ms-python.black-formatter",
|
||||||
"charliermarsh.ruff",
|
"ms-python.black-formatter",
|
||||||
|
"ms-python.debugpy",
|
||||||
"ms-python.python",
|
"ms-python.python",
|
||||||
"ms-python.vscode-pylance",
|
"ms-python.vscode-pylance",
|
||||||
"ms-python.black-formatter",
|
|
||||||
"redhat.vscode-yaml",
|
"redhat.vscode-yaml",
|
||||||
"Tobermory.es6-string-html",
|
"Tobermory.es6-string-html",
|
||||||
"unifiedjs.vscode-mdx"
|
"unifiedjs.vscode-mdx",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
66
.vscode/launch.json
vendored
66
.vscode/launch.json
vendored
@ -2,26 +2,76 @@
|
|||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
{
|
||||||
"name": "Python: PDB attach Server",
|
"name": "Debug: Attach Server Core",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
"connect": {
|
"connect": {
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": 6800
|
"port": 9901
|
||||||
},
|
},
|
||||||
"justMyCode": true,
|
"pathMappings": [
|
||||||
|
{
|
||||||
|
"localRoot": "${workspaceFolder}",
|
||||||
|
"remoteRoot": "."
|
||||||
|
}
|
||||||
|
],
|
||||||
"django": true
|
"django": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Python: PDB attach Worker",
|
"name": "Debug: Attach Worker",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
"connect": {
|
"connect": {
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": 6900
|
"port": 9901
|
||||||
},
|
},
|
||||||
"justMyCode": true,
|
"pathMappings": [
|
||||||
|
{
|
||||||
|
"localRoot": "${workspaceFolder}",
|
||||||
|
"remoteRoot": "."
|
||||||
|
}
|
||||||
|
],
|
||||||
"django": true
|
"django": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug: Start Server Router",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/cmd/server",
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug: Start LDAP Outpost",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/cmd/ldap",
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug: Start Proxy Outpost",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/cmd/proxy",
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug: Start RAC Outpost",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/cmd/rac",
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug: Start Radius Outpost",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/cmd/radius",
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
|||||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||||
|
|
||||||
# Stage 5: Python dependencies
|
# Stage 5: Python dependencies
|
||||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS python-deps
|
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps
|
||||||
|
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG TARGETVARIANT
|
ARG TARGETVARIANT
|
||||||
@ -139,7 +139,7 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
|||||||
poetry install --only=main --no-ansi --no-interaction --no-root"
|
poetry install --only=main --no-ansi --no-interaction --no-root"
|
||||||
|
|
||||||
# Stage 6: Run
|
# Stage 6: Run
|
||||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS final-image
|
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image
|
||||||
|
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
ARG GIT_BUILD_HASH
|
ARG GIT_BUILD_HASH
|
||||||
|
28
Makefile
28
Makefile
@ -6,6 +6,8 @@ UID = $(shell id -u)
|
|||||||
GID = $(shell id -g)
|
GID = $(shell id -g)
|
||||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||||
PY_SOURCES = authentik tests scripts lifecycle .github
|
PY_SOURCES = authentik tests scripts lifecycle .github
|
||||||
|
GO_SOURCES = cmd internal
|
||||||
|
WEB_SOURCES = web/src web/packages
|
||||||
DOCKER_IMAGE ?= "authentik:test"
|
DOCKER_IMAGE ?= "authentik:test"
|
||||||
|
|
||||||
GEN_API_TS = "gen-ts-api"
|
GEN_API_TS = "gen-ts-api"
|
||||||
@ -20,10 +22,11 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
|||||||
-I .github/codespell-words.txt \
|
-I .github/codespell-words.txt \
|
||||||
-S 'web/src/locales/**' \
|
-S 'web/src/locales/**' \
|
||||||
-S 'website/docs/developer-docs/api/reference/**' \
|
-S 'website/docs/developer-docs/api/reference/**' \
|
||||||
authentik \
|
-S '**/node_modules/**' \
|
||||||
internal \
|
-S '**/dist/**' \
|
||||||
cmd \
|
$(PY_SOURCES) \
|
||||||
web/src \
|
$(GO_SOURCES) \
|
||||||
|
$(WEB_SOURCES) \
|
||||||
website/src \
|
website/src \
|
||||||
website/blog \
|
website/blog \
|
||||||
website/docs \
|
website/docs \
|
||||||
@ -45,15 +48,6 @@ help: ## Show this help
|
|||||||
go-test:
|
go-test:
|
||||||
go test -timeout 0 -v -race -cover ./...
|
go test -timeout 0 -v -race -cover ./...
|
||||||
|
|
||||||
test-docker: ## Run all tests in a docker-compose
|
|
||||||
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
|
||||||
docker compose pull -q
|
|
||||||
docker compose up --no-start
|
|
||||||
docker compose start postgresql redis
|
|
||||||
docker compose run -u root server test-all
|
|
||||||
rm -f .env
|
|
||||||
|
|
||||||
test: ## Run the server tests and produce a coverage report (locally)
|
test: ## Run the server tests and produce a coverage report (locally)
|
||||||
coverage run manage.py test --keepdb authentik
|
coverage run manage.py test --keepdb authentik
|
||||||
coverage html
|
coverage html
|
||||||
@ -263,6 +257,9 @@ docker: ## Build a docker image of the current source tree
|
|||||||
mkdir -p ${GEN_API_TS}
|
mkdir -p ${GEN_API_TS}
|
||||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||||
|
|
||||||
|
test-docker:
|
||||||
|
BUILD=true ./scripts/test_docker.sh
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
## CI
|
## CI
|
||||||
#########################
|
#########################
|
||||||
@ -287,3 +284,8 @@ ci-bandit: ci--meta-debug
|
|||||||
|
|
||||||
ci-pending-migrations: ci--meta-debug
|
ci-pending-migrations: ci--meta-debug
|
||||||
ak makemigrations --check
|
ak makemigrations --check
|
||||||
|
|
||||||
|
ci-test: ci--meta-debug
|
||||||
|
coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
|
||||||
|
coverage report
|
||||||
|
coverage xml
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
__version__ = "2024.12.2"
|
__version__ = "2025.2.0"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ from authentik.enterprise.providers.microsoft_entra.models import (
|
|||||||
MicrosoftEntraProviderGroup,
|
MicrosoftEntraProviderGroup,
|
||||||
MicrosoftEntraProviderUser,
|
MicrosoftEntraProviderUser,
|
||||||
)
|
)
|
||||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
from authentik.enterprise.providers.ssf.models import StreamEvent
|
||||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
|
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
|
||||||
EndpointDevice,
|
EndpointDevice,
|
||||||
EndpointDeviceConnection,
|
EndpointDeviceConnection,
|
||||||
@ -71,6 +71,7 @@ from authentik.providers.oauth2.models import (
|
|||||||
DeviceToken,
|
DeviceToken,
|
||||||
RefreshToken,
|
RefreshToken,
|
||||||
)
|
)
|
||||||
|
from authentik.providers.rac.models import ConnectionToken
|
||||||
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||||
from authentik.rbac.models import Role
|
from authentik.rbac.models import Role
|
||||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||||
@ -131,6 +132,7 @@ def excluded_models() -> list[type[Model]]:
|
|||||||
EndpointDevice,
|
EndpointDevice,
|
||||||
EndpointDeviceConnection,
|
EndpointDeviceConnection,
|
||||||
DeviceToken,
|
DeviceToken,
|
||||||
|
StreamEvent,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||||
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.fields import (
|
from rest_framework.fields import (
|
||||||
BooleanField,
|
BooleanField,
|
||||||
CharField,
|
CharField,
|
||||||
@ -16,7 +17,6 @@ from rest_framework.viewsets import ViewSet
|
|||||||
|
|
||||||
from authentik.core.api.utils import MetaNameSerializer
|
from authentik.core.api.utils import MetaNameSerializer
|
||||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
|
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
|
||||||
from authentik.rbac.decorators import permission_required
|
|
||||||
from authentik.stages.authenticator import device_classes, devices_for_user
|
from authentik.stages.authenticator import device_classes, devices_for_user
|
||||||
from authentik.stages.authenticator.models import Device
|
from authentik.stages.authenticator.models import Device
|
||||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||||
@ -73,7 +73,9 @@ class AdminDeviceViewSet(ViewSet):
|
|||||||
def get_devices(self, **kwargs):
|
def get_devices(self, **kwargs):
|
||||||
"""Get all devices in all child classes"""
|
"""Get all devices in all child classes"""
|
||||||
for model in device_classes():
|
for model in device_classes():
|
||||||
device_set = model.objects.filter(**kwargs)
|
device_set = get_objects_for_user(
|
||||||
|
self.request.user, f"{model._meta.app_label}.view_{model._meta.model_name}", model
|
||||||
|
).filter(**kwargs)
|
||||||
yield from device_set
|
yield from device_set
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@ -86,10 +88,6 @@ class AdminDeviceViewSet(ViewSet):
|
|||||||
],
|
],
|
||||||
responses={200: DeviceSerializer(many=True)},
|
responses={200: DeviceSerializer(many=True)},
|
||||||
)
|
)
|
||||||
@permission_required(
|
|
||||||
None,
|
|
||||||
[f"{model._meta.app_label}.view_{model._meta.model_name}" for model in device_classes()],
|
|
||||||
)
|
|
||||||
def list(self, request: Request) -> Response:
|
def list(self, request: Request) -> Response:
|
||||||
"""Get all devices for current user"""
|
"""Get all devices for current user"""
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
|
@ -4,6 +4,7 @@ from json import loads
|
|||||||
|
|
||||||
from django.db.models import Prefetch
|
from django.db.models import Prefetch
|
||||||
from django.http import Http404
|
from django.http import Http404
|
||||||
|
from django.utils.translation import gettext as _
|
||||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
from drf_spectacular.utils import (
|
from drf_spectacular.utils import (
|
||||||
@ -81,9 +82,37 @@ class GroupSerializer(ModelSerializer):
|
|||||||
if not self.instance or not parent:
|
if not self.instance or not parent:
|
||||||
return parent
|
return parent
|
||||||
if str(parent.group_uuid) == str(self.instance.group_uuid):
|
if str(parent.group_uuid) == str(self.instance.group_uuid):
|
||||||
raise ValidationError("Cannot set group as parent of itself.")
|
raise ValidationError(_("Cannot set group as parent of itself."))
|
||||||
return parent
|
return parent
|
||||||
|
|
||||||
|
def validate_is_superuser(self, superuser: bool):
|
||||||
|
"""Ensure that the user creating this group has permissions to set the superuser flag"""
|
||||||
|
request: Request = self.context.get("request", None)
|
||||||
|
if not request:
|
||||||
|
return superuser
|
||||||
|
# If we're updating an instance, and the state hasn't changed, we don't need to check perms
|
||||||
|
if self.instance and superuser == self.instance.is_superuser:
|
||||||
|
return superuser
|
||||||
|
user: User = request.user
|
||||||
|
perm = (
|
||||||
|
"authentik_core.enable_group_superuser"
|
||||||
|
if superuser
|
||||||
|
else "authentik_core.disable_group_superuser"
|
||||||
|
)
|
||||||
|
has_perm = user.has_perm(perm)
|
||||||
|
if self.instance and not has_perm:
|
||||||
|
has_perm = user.has_perm(perm, self.instance)
|
||||||
|
if not has_perm:
|
||||||
|
raise ValidationError(
|
||||||
|
_(
|
||||||
|
(
|
||||||
|
"User does not have permission to set "
|
||||||
|
"superuser status to {superuser_status}."
|
||||||
|
).format_map({"superuser_status": superuser})
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return superuser
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Group
|
model = Group
|
||||||
fields = [
|
fields = [
|
||||||
|
@ -85,7 +85,7 @@ class SourceViewSet(
|
|||||||
serializer_class = SourceSerializer
|
serializer_class = SourceSerializer
|
||||||
lookup_field = "slug"
|
lookup_field = "slug"
|
||||||
search_fields = ["slug", "name"]
|
search_fields = ["slug", "name"]
|
||||||
filterset_fields = ["slug", "name", "managed"]
|
filterset_fields = ["slug", "name", "managed", "pbm_uuid"]
|
||||||
|
|
||||||
def get_queryset(self): # pragma: no cover
|
def get_queryset(self): # pragma: no cover
|
||||||
return Source.objects.select_subclasses()
|
return Source.objects.select_subclasses()
|
||||||
|
@ -236,9 +236,11 @@ class UserSerializer(ModelSerializer):
|
|||||||
"path",
|
"path",
|
||||||
"type",
|
"type",
|
||||||
"uuid",
|
"uuid",
|
||||||
|
"password_change_date",
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"name": {"allow_blank": True},
|
"name": {"allow_blank": True},
|
||||||
|
"password_change_date": {"read_only": True},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ from typing import TextIO
|
|||||||
from daphne.management.commands.runserver import Command as RunServer
|
from daphne.management.commands.runserver import Command as RunServer
|
||||||
from daphne.server import Server
|
from daphne.server import Server
|
||||||
|
|
||||||
|
from authentik.lib.debug import start_debug_server
|
||||||
from authentik.root.signals import post_startup, pre_startup, startup
|
from authentik.root.signals import post_startup, pre_startup, startup
|
||||||
|
|
||||||
|
|
||||||
@ -13,6 +14,7 @@ class SignalServer(Server):
|
|||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
start_debug_server()
|
||||||
|
|
||||||
def ready_callable():
|
def ready_callable():
|
||||||
pre_startup.send(sender=self)
|
pre_startup.send(sender=self)
|
||||||
|
@ -9,6 +9,7 @@ from django.db import close_old_connections
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
|
from authentik.lib.debug import start_debug_server
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -28,10 +29,7 @@ class Command(BaseCommand):
|
|||||||
def handle(self, **options):
|
def handle(self, **options):
|
||||||
LOGGER.debug("Celery options", **options)
|
LOGGER.debug("Celery options", **options)
|
||||||
close_old_connections()
|
close_old_connections()
|
||||||
if CONFIG.get_bool("remote_debug"):
|
start_debug_server()
|
||||||
import debugpy
|
|
||||||
|
|
||||||
debugpy.listen(("0.0.0.0", 6900)) # nosec
|
|
||||||
worker: Worker = CELERY_APP.Worker(
|
worker: Worker = CELERY_APP.Worker(
|
||||||
no_color=False,
|
no_color=False,
|
||||||
quiet=True,
|
quiet=True,
|
||||||
|
26
authentik/core/migrations/0043_alter_group_options.py
Normal file
26
authentik/core/migrations/0043_alter_group_options.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 5.0.11 on 2025-01-30 23:55
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="group",
|
||||||
|
options={
|
||||||
|
"permissions": [
|
||||||
|
("add_user_to_group", "Add user to group"),
|
||||||
|
("remove_user_from_group", "Remove user from group"),
|
||||||
|
("enable_group_superuser", "Enable superuser status"),
|
||||||
|
("disable_group_superuser", "Disable superuser status"),
|
||||||
|
],
|
||||||
|
"verbose_name": "Group",
|
||||||
|
"verbose_name_plural": "Groups",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
@ -204,6 +204,8 @@ class Group(SerializerModel, AttributesMixin):
|
|||||||
permissions = [
|
permissions = [
|
||||||
("add_user_to_group", _("Add user to group")),
|
("add_user_to_group", _("Add user to group")),
|
||||||
("remove_user_from_group", _("Remove user from group")),
|
("remove_user_from_group", _("Remove user from group")),
|
||||||
|
("enable_group_superuser", _("Enable superuser status")),
|
||||||
|
("disable_group_superuser", _("Disable superuser status")),
|
||||||
]
|
]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@ -599,6 +601,14 @@ class Application(SerializerModel, PolicyBindingModel):
|
|||||||
return None
|
return None
|
||||||
return candidates[-1]
|
return candidates[-1]
|
||||||
|
|
||||||
|
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
|
||||||
|
"""Get Backchannel provider for a specific type"""
|
||||||
|
providers = self.backchannel_providers.filter(
|
||||||
|
**{f"{provider_type._meta.model_name}__isnull": False},
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
return getattr(providers.first(), provider_type._meta.model_name)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.name)
|
return str(self.name)
|
||||||
|
|
||||||
|
@ -35,8 +35,7 @@ from authentik.flows.planner import (
|
|||||||
FlowPlanner,
|
FlowPlanner,
|
||||||
)
|
)
|
||||||
from authentik.flows.stage import StageView
|
from authentik.flows.stage import StageView
|
||||||
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN
|
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET
|
||||||
from authentik.lib.utils.urls import redirect_with_qs
|
|
||||||
from authentik.lib.views import bad_request_message
|
from authentik.lib.views import bad_request_message
|
||||||
from authentik.policies.denied import AccessDeniedResponse
|
from authentik.policies.denied import AccessDeniedResponse
|
||||||
from authentik.policies.utils import delete_none_values
|
from authentik.policies.utils import delete_none_values
|
||||||
@ -47,8 +46,9 @@ from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH
|
|||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec
|
|
||||||
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
|
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
|
||||||
|
SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages"
|
||||||
|
SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec
|
||||||
|
|
||||||
|
|
||||||
class MessageStage(StageView):
|
class MessageStage(StageView):
|
||||||
@ -219,9 +219,17 @@ class SourceFlowManager:
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
flow_context.update(self.policy_context)
|
flow_context.update(self.policy_context)
|
||||||
|
flow_context.setdefault(PLAN_CONTEXT_REDIRECT, final_redirect)
|
||||||
|
|
||||||
|
if not flow:
|
||||||
|
# We only check for the flow token here if we don't have a flow, otherwise we rely on
|
||||||
|
# SESSION_KEY_SOURCE_FLOW_STAGES to delegate the usage of this token and dynamically add
|
||||||
|
# stages that deal with this token to return to another flow
|
||||||
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
||||||
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
|
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
|
||||||
self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug)
|
self._logger.info(
|
||||||
|
"Replacing source flow with overridden flow", flow=token.flow.slug
|
||||||
|
)
|
||||||
plan = token.plan
|
plan = token.plan
|
||||||
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
|
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
|
||||||
plan.context.update(flow_context)
|
plan.context.update(flow_context)
|
||||||
@ -230,17 +238,9 @@ class SourceFlowManager:
|
|||||||
if stages:
|
if stages:
|
||||||
for stage in stages:
|
for stage in stages:
|
||||||
plan.append_stage(stage)
|
plan.append_stage(stage)
|
||||||
self.request.session[SESSION_KEY_PLAN] = plan
|
redirect = plan.to_redirect(self.request, token.flow)
|
||||||
flow_slug = token.flow.slug
|
|
||||||
token.delete()
|
token.delete()
|
||||||
return redirect_with_qs(
|
return redirect
|
||||||
"authentik_core:if-flow",
|
|
||||||
self.request.GET,
|
|
||||||
flow_slug=flow_slug,
|
|
||||||
)
|
|
||||||
flow_context.setdefault(PLAN_CONTEXT_REDIRECT, final_redirect)
|
|
||||||
|
|
||||||
if not flow:
|
|
||||||
return bad_request_message(
|
return bad_request_message(
|
||||||
self.request,
|
self.request,
|
||||||
_("Configured flow does not exist."),
|
_("Configured flow does not exist."),
|
||||||
@ -259,6 +259,8 @@ class SourceFlowManager:
|
|||||||
if stages:
|
if stages:
|
||||||
for stage in stages:
|
for stage in stages:
|
||||||
plan.append_stage(stage)
|
plan.append_stage(stage)
|
||||||
|
for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []):
|
||||||
|
plan.append_stage(stage)
|
||||||
return plan.to_redirect(self.request, flow)
|
return plan.to_redirect(self.request, flow)
|
||||||
|
|
||||||
def handle_auth(
|
def handle_auth(
|
||||||
@ -295,6 +297,8 @@ class SourceFlowManager:
|
|||||||
# When request isn't authenticated we jump straight to auth
|
# When request isn't authenticated we jump straight to auth
|
||||||
if not self.request.user.is_authenticated:
|
if not self.request.user.is_authenticated:
|
||||||
return self.handle_auth(connection)
|
return self.handle_auth(connection)
|
||||||
|
# When an override flow token exists we actually still use a flow for link
|
||||||
|
# to continue the existing flow we came from
|
||||||
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
||||||
return self._prepare_flow(None, connection)
|
return self._prepare_flow(None, connection)
|
||||||
connection.save()
|
connection.save()
|
||||||
|
@ -67,6 +67,8 @@ def clean_expired_models(self: SystemTask):
|
|||||||
raise ImproperlyConfigured(
|
raise ImproperlyConfigured(
|
||||||
"Invalid session_storage setting, allowed values are db and cache"
|
"Invalid session_storage setting, allowed values are db and cache"
|
||||||
)
|
)
|
||||||
|
if CONFIG.get("session_storage", "cache") == "db":
|
||||||
|
DBSessionStore.clear_expired()
|
||||||
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
|
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
|
||||||
|
|
||||||
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")
|
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")
|
||||||
|
@ -8,6 +8,8 @@
|
|||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
|
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
|
||||||
|
{# Darkreader breaks the site regardless of theme as its not compatible with webcomponents, and we default to a dark theme based on preferred colour-scheme #}
|
||||||
|
<meta name="darkreader-lock">
|
||||||
<title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title>
|
<title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title>
|
||||||
<link rel="icon" href="{{ brand.branding_favicon_url }}">
|
<link rel="icon" href="{{ brand.branding_favicon_url }}">
|
||||||
<link rel="shortcut icon" href="{{ brand.branding_favicon_url }}">
|
<link rel="shortcut icon" href="{{ brand.branding_favicon_url }}">
|
||||||
|
@ -4,7 +4,7 @@ from django.urls.base import reverse
|
|||||||
from guardian.shortcuts import assign_perm
|
from guardian.shortcuts import assign_perm
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_user
|
from authentik.core.tests.utils import create_test_admin_user, create_test_user
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
@ -14,7 +14,7 @@ class TestGroupsAPI(APITestCase):
|
|||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.login_user = create_test_user()
|
self.login_user = create_test_user()
|
||||||
self.user = User.objects.create(username="test-user")
|
self.user = create_test_user()
|
||||||
|
|
||||||
def test_list_with_users(self):
|
def test_list_with_users(self):
|
||||||
"""Test listing with users"""
|
"""Test listing with users"""
|
||||||
@ -109,3 +109,57 @@ class TestGroupsAPI(APITestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertEqual(res.status_code, 400)
|
self.assertEqual(res.status_code, 400)
|
||||||
|
|
||||||
|
def test_superuser_no_perm(self):
|
||||||
|
"""Test creating a superuser group without permission"""
|
||||||
|
assign_perm("authentik_core.add_group", self.login_user)
|
||||||
|
self.client.force_login(self.login_user)
|
||||||
|
res = self.client.post(
|
||||||
|
reverse("authentik_api:group-list"),
|
||||||
|
data={"name": generate_id(), "is_superuser": True},
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 400)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
res.content,
|
||||||
|
{"is_superuser": ["User does not have permission to set superuser status to True."]},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_superuser_update_no_perm(self):
|
||||||
|
"""Test updating a superuser group without permission"""
|
||||||
|
group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||||
|
assign_perm("view_group", self.login_user, group)
|
||||||
|
assign_perm("change_group", self.login_user, group)
|
||||||
|
self.client.force_login(self.login_user)
|
||||||
|
res = self.client.patch(
|
||||||
|
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
|
||||||
|
data={"is_superuser": False},
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 400)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
res.content,
|
||||||
|
{"is_superuser": ["User does not have permission to set superuser status to False."]},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_superuser_update_no_change(self):
|
||||||
|
"""Test updating a superuser group without permission
|
||||||
|
and without changing the superuser status"""
|
||||||
|
group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||||
|
assign_perm("view_group", self.login_user, group)
|
||||||
|
assign_perm("change_group", self.login_user, group)
|
||||||
|
self.client.force_login(self.login_user)
|
||||||
|
res = self.client.patch(
|
||||||
|
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
|
||||||
|
data={"name": generate_id(), "is_superuser": True},
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
|
||||||
|
def test_superuser_create(self):
|
||||||
|
"""Test creating a superuser group with permission"""
|
||||||
|
assign_perm("authentik_core.add_group", self.login_user)
|
||||||
|
assign_perm("authentik_core.enable_group_superuser", self.login_user)
|
||||||
|
self.client.force_login(self.login_user)
|
||||||
|
res = self.client.post(
|
||||||
|
reverse("authentik_api:group-list"),
|
||||||
|
data={"name": generate_id(), "is_superuser": True},
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 201)
|
||||||
|
@ -97,6 +97,8 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
|||||||
thread_kwargs: dict | None = None,
|
thread_kwargs: dict | None = None,
|
||||||
**_,
|
**_,
|
||||||
):
|
):
|
||||||
|
if not self.enabled:
|
||||||
|
return super().post_save_handler(request, sender, instance, created, thread_kwargs, **_)
|
||||||
if not should_log_model(instance):
|
if not should_log_model(instance):
|
||||||
return None
|
return None
|
||||||
thread_kwargs = {}
|
thread_kwargs = {}
|
||||||
@ -122,6 +124,8 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
|||||||
):
|
):
|
||||||
thread_kwargs = {}
|
thread_kwargs = {}
|
||||||
m2m_field = None
|
m2m_field = None
|
||||||
|
if not self.enabled:
|
||||||
|
return super().m2m_changed_handler(request, sender, instance, action, thread_kwargs)
|
||||||
# For the audit log we don't care about `pre_` or `post_` so we trim that part off
|
# For the audit log we don't care about `pre_` or `post_` so we trim that part off
|
||||||
_, _, action_direction = action.partition("_")
|
_, _, action_direction = action.partition("_")
|
||||||
# resolve the "through" model to an actual field
|
# resolve the "through" model to an actual field
|
||||||
|
@ -1,14 +0,0 @@
|
|||||||
"""RAC app config"""
|
|
||||||
|
|
||||||
from authentik.enterprise.apps import EnterpriseConfig
|
|
||||||
|
|
||||||
|
|
||||||
class AuthentikEnterpriseProviderRAC(EnterpriseConfig):
|
|
||||||
"""authentik enterprise rac app config"""
|
|
||||||
|
|
||||||
name = "authentik.enterprise.providers.rac"
|
|
||||||
label = "authentik_providers_rac"
|
|
||||||
verbose_name = "authentik Enterprise.Providers.RAC"
|
|
||||||
default = True
|
|
||||||
mountpoint = ""
|
|
||||||
ws_mountpoint = "authentik.enterprise.providers.rac.urls"
|
|
64
authentik/enterprise/providers/ssf/api/providers.py
Normal file
64
authentik/enterprise/providers/ssf/api/providers.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
"""SSF Provider API Views"""
|
||||||
|
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.fields import SerializerMethodField
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
|
from authentik.core.api.tokens import TokenSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||||
|
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||||
|
|
||||||
|
|
||||||
|
class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
||||||
|
"""SSFProvider Serializer"""
|
||||||
|
|
||||||
|
ssf_url = SerializerMethodField()
|
||||||
|
token_obj = TokenSerializer(source="token", required=False, read_only=True)
|
||||||
|
|
||||||
|
def get_ssf_url(self, instance: SSFProvider) -> str | None:
|
||||||
|
request: Request = self._context.get("request")
|
||||||
|
if not request:
|
||||||
|
return None
|
||||||
|
if not instance.backchannel_application:
|
||||||
|
return None
|
||||||
|
return request.build_absolute_uri(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:configuration",
|
||||||
|
kwargs={
|
||||||
|
"application_slug": instance.backchannel_application.slug,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = SSFProvider
|
||||||
|
fields = [
|
||||||
|
"pk",
|
||||||
|
"name",
|
||||||
|
"component",
|
||||||
|
"verbose_name",
|
||||||
|
"verbose_name_plural",
|
||||||
|
"meta_model_name",
|
||||||
|
"signing_key",
|
||||||
|
"token_obj",
|
||||||
|
"oidc_auth_providers",
|
||||||
|
"ssf_url",
|
||||||
|
"event_retention",
|
||||||
|
]
|
||||||
|
extra_kwargs = {}
|
||||||
|
|
||||||
|
|
||||||
|
class SSFProviderViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""SSFProvider Viewset"""
|
||||||
|
|
||||||
|
queryset = SSFProvider.objects.all()
|
||||||
|
serializer_class = SSFProviderSerializer
|
||||||
|
filterset_fields = {
|
||||||
|
"application": ["isnull"],
|
||||||
|
"name": ["iexact"],
|
||||||
|
}
|
||||||
|
search_fields = ["name"]
|
||||||
|
ordering = ["name"]
|
37
authentik/enterprise/providers/ssf/api/streams.py
Normal file
37
authentik/enterprise/providers/ssf/api/streams.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
"""SSF Stream API Views"""
|
||||||
|
|
||||||
|
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.utils import ModelSerializer
|
||||||
|
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
|
||||||
|
from authentik.enterprise.providers.ssf.models import Stream
|
||||||
|
|
||||||
|
|
||||||
|
class SSFStreamSerializer(ModelSerializer):
|
||||||
|
"""SSFStream Serializer"""
|
||||||
|
|
||||||
|
provider_obj = SSFProviderSerializer(source="provider", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Stream
|
||||||
|
fields = [
|
||||||
|
"pk",
|
||||||
|
"provider",
|
||||||
|
"provider_obj",
|
||||||
|
"delivery_method",
|
||||||
|
"endpoint_url",
|
||||||
|
"events_requested",
|
||||||
|
"format",
|
||||||
|
"aud",
|
||||||
|
"iss",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class SSFStreamViewSet(ReadOnlyModelViewSet):
|
||||||
|
"""SSFStream Viewset"""
|
||||||
|
|
||||||
|
queryset = Stream.objects.all()
|
||||||
|
serializer_class = SSFStreamSerializer
|
||||||
|
filterset_fields = ["provider", "endpoint_url", "delivery_method"]
|
||||||
|
search_fields = ["provider__name", "endpoint_url"]
|
||||||
|
ordering = ["provider", "uuid"]
|
13
authentik/enterprise/providers/ssf/apps.py
Normal file
13
authentik/enterprise/providers/ssf/apps.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
"""SSF app config"""
|
||||||
|
|
||||||
|
from authentik.enterprise.apps import EnterpriseConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AuthentikEnterpriseProviderSSF(EnterpriseConfig):
|
||||||
|
"""authentik enterprise ssf app config"""
|
||||||
|
|
||||||
|
name = "authentik.enterprise.providers.ssf"
|
||||||
|
label = "authentik_providers_ssf"
|
||||||
|
verbose_name = "authentik Enterprise.Providers.SSF"
|
||||||
|
default = True
|
||||||
|
mountpoint = ""
|
201
authentik/enterprise/providers/ssf/migrations/0001_initial.py
Normal file
201
authentik/enterprise/providers/ssf/migrations/0001_initial.py
Normal file
@ -0,0 +1,201 @@
|
|||||||
|
# Generated by Django 5.0.11 on 2025-02-05 16:20
|
||||||
|
|
||||||
|
import authentik.lib.utils.time
|
||||||
|
import django.contrib.postgres.fields
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
|
||||||
|
("authentik_crypto", "0004_alter_certificatekeypair_name"),
|
||||||
|
("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="SSFProvider",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"provider_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.provider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"event_retention",
|
||||||
|
models.TextField(
|
||||||
|
default="days=30",
|
||||||
|
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"oidc_auth_providers",
|
||||||
|
models.ManyToManyField(
|
||||||
|
blank=True, default=None, to="authentik_providers_oauth2.oauth2provider"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"signing_key",
|
||||||
|
models.ForeignKey(
|
||||||
|
help_text="Key used to sign the SSF Events.",
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_crypto.certificatekeypair",
|
||||||
|
verbose_name="Signing Key",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"token",
|
||||||
|
models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_core.token",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Shared Signals Framework Provider",
|
||||||
|
"verbose_name_plural": "Shared Signals Framework Providers",
|
||||||
|
"permissions": [("add_stream", "Add stream to SSF provider")],
|
||||||
|
},
|
||||||
|
bases=("authentik_core.provider",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="Stream",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"uuid",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"delivery_method",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||||
|
"Risc Push",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/risc/delivery-method/poll",
|
||||||
|
"Risc Poll",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("endpoint_url", models.TextField(null=True)),
|
||||||
|
(
|
||||||
|
"events_requested",
|
||||||
|
django.contrib.postgres.fields.ArrayField(
|
||||||
|
base_field=models.TextField(
|
||||||
|
choices=[
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
"Caep Session Revoked",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"Caep Credential Change",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/ssf/event-type/verification",
|
||||||
|
"Set Verification",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
default=list,
|
||||||
|
size=None,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("format", models.TextField()),
|
||||||
|
(
|
||||||
|
"aud",
|
||||||
|
django.contrib.postgres.fields.ArrayField(
|
||||||
|
base_field=models.TextField(), default=list, size=None
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("iss", models.TextField()),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_ssf.ssfprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "SSF Stream",
|
||||||
|
"verbose_name_plural": "SSF Streams",
|
||||||
|
"default_permissions": ["change", "delete", "view"],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="StreamEvent",
|
||||||
|
fields=[
|
||||||
|
("created", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("last_updated", models.DateTimeField(auto_now=True)),
|
||||||
|
("expires", models.DateTimeField(default=None, null=True)),
|
||||||
|
("expiring", models.BooleanField(default=True)),
|
||||||
|
(
|
||||||
|
"uuid",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"status",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("pending_new", "Pending New"),
|
||||||
|
("pending_failed", "Pending Failed"),
|
||||||
|
("sent", "Sent"),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"type",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
"Caep Session Revoked",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"Caep Credential Change",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/ssf/event-type/verification",
|
||||||
|
"Set Verification",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("payload", models.JSONField(default=dict)),
|
||||||
|
(
|
||||||
|
"stream",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_ssf.stream",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "SSF Stream Event",
|
||||||
|
"verbose_name_plural": "SSF Stream Events",
|
||||||
|
"ordering": ("-created",),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
178
authentik/enterprise/providers/ssf/models.py
Normal file
178
authentik/enterprise/providers/ssf/models.py
Normal file
@ -0,0 +1,178 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from functools import cached_property
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
|
||||||
|
from django.contrib.postgres.fields import ArrayField
|
||||||
|
from django.db import models
|
||||||
|
from django.templatetags.static import static
|
||||||
|
from django.utils.timezone import now
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from jwt import encode
|
||||||
|
|
||||||
|
from authentik.core.models import BackchannelProvider, ExpiringModel, Token
|
||||||
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
from authentik.lib.models import CreatedUpdatedModel
|
||||||
|
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||||
|
from authentik.providers.oauth2.models import JWTAlgorithms, OAuth2Provider
|
||||||
|
|
||||||
|
|
||||||
|
class EventTypes(models.TextChoices):
|
||||||
|
"""SSF Event types supported by authentik"""
|
||||||
|
|
||||||
|
CAEP_SESSION_REVOKED = "https://schemas.openid.net/secevent/caep/event-type/session-revoked"
|
||||||
|
CAEP_CREDENTIAL_CHANGE = "https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||||
|
SET_VERIFICATION = "https://schemas.openid.net/secevent/ssf/event-type/verification"
|
||||||
|
|
||||||
|
|
||||||
|
class DeliveryMethods(models.TextChoices):
|
||||||
|
"""SSF Delivery methods"""
|
||||||
|
|
||||||
|
RISC_PUSH = "https://schemas.openid.net/secevent/risc/delivery-method/push"
|
||||||
|
RISC_POLL = "https://schemas.openid.net/secevent/risc/delivery-method/poll"
|
||||||
|
|
||||||
|
|
||||||
|
class SSFEventStatus(models.TextChoices):
|
||||||
|
"""SSF Event status"""
|
||||||
|
|
||||||
|
PENDING_NEW = "pending_new"
|
||||||
|
PENDING_FAILED = "pending_failed"
|
||||||
|
SENT = "sent"
|
||||||
|
|
||||||
|
|
||||||
|
class SSFProvider(BackchannelProvider):
|
||||||
|
"""Shared Signals Framework provider to allow applications to
|
||||||
|
receive user events from authentik."""
|
||||||
|
|
||||||
|
signing_key = models.ForeignKey(
|
||||||
|
CertificateKeyPair,
|
||||||
|
verbose_name=_("Signing Key"),
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
help_text=_("Key used to sign the SSF Events."),
|
||||||
|
)
|
||||||
|
|
||||||
|
oidc_auth_providers = models.ManyToManyField(OAuth2Provider, blank=True, default=None)
|
||||||
|
|
||||||
|
token = models.ForeignKey(Token, on_delete=models.CASCADE, null=True, default=None)
|
||||||
|
|
||||||
|
event_retention = models.TextField(
|
||||||
|
default="days=30",
|
||||||
|
validators=[timedelta_string_validator],
|
||||||
|
)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def jwt_key(self) -> tuple[PrivateKeyTypes, str]:
|
||||||
|
"""Get either the configured certificate or the client secret"""
|
||||||
|
key: CertificateKeyPair = self.signing_key
|
||||||
|
private_key = key.private_key
|
||||||
|
if isinstance(private_key, RSAPrivateKey):
|
||||||
|
return private_key, JWTAlgorithms.RS256
|
||||||
|
if isinstance(private_key, EllipticCurvePrivateKey):
|
||||||
|
return private_key, JWTAlgorithms.ES256
|
||||||
|
raise ValueError(f"Invalid private key type: {type(private_key)}")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def service_account_identifier(self) -> str:
|
||||||
|
return f"ak-providers-ssf-{self.pk}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self):
|
||||||
|
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
|
||||||
|
|
||||||
|
return SSFProviderSerializer
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon_url(self) -> str | None:
|
||||||
|
return static("authentik/sources/ssf.svg")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def component(self) -> str:
|
||||||
|
return "ak-provider-ssf-form"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Shared Signals Framework Provider")
|
||||||
|
verbose_name_plural = _("Shared Signals Framework Providers")
|
||||||
|
permissions = [
|
||||||
|
# This overrides the default "add_stream" permission of the Stream object,
|
||||||
|
# as the user requesting to add a stream must have the permission on the provider
|
||||||
|
("add_stream", _("Add stream to SSF provider")),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class Stream(models.Model):
|
||||||
|
"""SSF Stream"""
|
||||||
|
|
||||||
|
uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False)
|
||||||
|
provider = models.ForeignKey(SSFProvider, on_delete=models.CASCADE)
|
||||||
|
|
||||||
|
delivery_method = models.TextField(choices=DeliveryMethods.choices)
|
||||||
|
endpoint_url = models.TextField(null=True)
|
||||||
|
|
||||||
|
events_requested = ArrayField(models.TextField(choices=EventTypes.choices), default=list)
|
||||||
|
format = models.TextField()
|
||||||
|
aud = ArrayField(models.TextField(), default=list)
|
||||||
|
|
||||||
|
iss = models.TextField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("SSF Stream")
|
||||||
|
verbose_name_plural = _("SSF Streams")
|
||||||
|
default_permissions = ["change", "delete", "view"]
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return "SSF Stream"
|
||||||
|
|
||||||
|
def prepare_event_payload(self, type: EventTypes, event_data: dict, **kwargs) -> dict:
|
||||||
|
jti = uuid4()
|
||||||
|
_now = now()
|
||||||
|
return {
|
||||||
|
"uuid": jti,
|
||||||
|
"stream_id": str(self.pk),
|
||||||
|
"type": type,
|
||||||
|
"expiring": True,
|
||||||
|
"status": SSFEventStatus.PENDING_NEW,
|
||||||
|
"expires": _now + timedelta_from_string(self.provider.event_retention),
|
||||||
|
"payload": {
|
||||||
|
"jti": jti.hex,
|
||||||
|
"aud": self.aud,
|
||||||
|
"iat": int(datetime.now().timestamp()),
|
||||||
|
"iss": self.iss,
|
||||||
|
"events": {type: event_data},
|
||||||
|
**kwargs,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def encode(self, data: dict) -> str:
|
||||||
|
headers = {}
|
||||||
|
if self.provider.signing_key:
|
||||||
|
headers["kid"] = self.provider.signing_key.kid
|
||||||
|
key, alg = self.provider.jwt_key
|
||||||
|
return encode(data, key, algorithm=alg, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
|
class StreamEvent(CreatedUpdatedModel, ExpiringModel):
|
||||||
|
"""Single stream event to be sent"""
|
||||||
|
|
||||||
|
uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False)
|
||||||
|
|
||||||
|
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
|
||||||
|
status = models.TextField(choices=SSFEventStatus.choices)
|
||||||
|
|
||||||
|
type = models.TextField(choices=EventTypes.choices)
|
||||||
|
payload = models.JSONField(default=dict)
|
||||||
|
|
||||||
|
def expire_action(self, *args, **kwargs):
|
||||||
|
"""Only allow automatic cleanup of successfully sent event"""
|
||||||
|
if self.status != SSFEventStatus.SENT:
|
||||||
|
return
|
||||||
|
return super().expire_action(*args, **kwargs)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Stream event {self.type}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("SSF Stream Event")
|
||||||
|
verbose_name_plural = _("SSF Stream Events")
|
||||||
|
ordering = ("-created",)
|
193
authentik/enterprise/providers/ssf/signals.py
Normal file
193
authentik/enterprise/providers/ssf/signals.py
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
from hashlib import sha256
|
||||||
|
|
||||||
|
from django.contrib.auth.signals import user_logged_out
|
||||||
|
from django.db.models import Model
|
||||||
|
from django.db.models.signals import post_delete, post_save, pre_delete
|
||||||
|
from django.dispatch import receiver
|
||||||
|
from django.http.request import HttpRequest
|
||||||
|
from guardian.shortcuts import assign_perm
|
||||||
|
|
||||||
|
from authentik.core.models import (
|
||||||
|
USER_PATH_SYSTEM_PREFIX,
|
||||||
|
AuthenticatedSession,
|
||||||
|
Token,
|
||||||
|
TokenIntents,
|
||||||
|
User,
|
||||||
|
UserTypes,
|
||||||
|
)
|
||||||
|
from authentik.core.signals import password_changed
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
EventTypes,
|
||||||
|
SSFProvider,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
|
||||||
|
from authentik.events.middleware import audit_ignore
|
||||||
|
from authentik.stages.authenticator.models import Device
|
||||||
|
from authentik.stages.authenticator_duo.models import DuoDevice
|
||||||
|
from authentik.stages.authenticator_static.models import StaticDevice
|
||||||
|
from authentik.stages.authenticator_totp.models import TOTPDevice
|
||||||
|
from authentik.stages.authenticator_webauthn.models import (
|
||||||
|
UNKNOWN_DEVICE_TYPE_AAGUID,
|
||||||
|
WebAuthnDevice,
|
||||||
|
)
|
||||||
|
|
||||||
|
USER_PATH_PROVIDERS_SSF = USER_PATH_SYSTEM_PREFIX + "/providers/ssf"
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_save, sender=SSFProvider)
|
||||||
|
def ssf_providers_post_save(sender: type[Model], instance: SSFProvider, created: bool, **_):
|
||||||
|
"""Create service account before provider is saved"""
|
||||||
|
identifier = instance.service_account_identifier
|
||||||
|
user, _ = User.objects.update_or_create(
|
||||||
|
username=identifier,
|
||||||
|
defaults={
|
||||||
|
"name": f"SSF Provider {instance.name} Service-Account",
|
||||||
|
"type": UserTypes.INTERNAL_SERVICE_ACCOUNT,
|
||||||
|
"path": USER_PATH_PROVIDERS_SSF,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assign_perm("add_stream", user, instance)
|
||||||
|
token, token_created = Token.objects.update_or_create(
|
||||||
|
identifier=identifier,
|
||||||
|
defaults={
|
||||||
|
"user": user,
|
||||||
|
"intent": TokenIntents.INTENT_API,
|
||||||
|
"expiring": False,
|
||||||
|
"managed": f"goauthentik.io/providers/ssf/{instance.pk}",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if created or token_created:
|
||||||
|
with audit_ignore():
|
||||||
|
instance.token = token
|
||||||
|
instance.save()
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(user_logged_out)
|
||||||
|
def ssf_user_logged_out_session_revoked(sender, request: HttpRequest, user: User, **_):
|
||||||
|
"""Session revoked trigger (user logged out)"""
|
||||||
|
if not request.session or not request.session.session_key or not user:
|
||||||
|
return
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.CAEP_SESSION_REVOKED,
|
||||||
|
{
|
||||||
|
"initiating_entity": "user",
|
||||||
|
},
|
||||||
|
sub_id={
|
||||||
|
"format": "complex",
|
||||||
|
"session": {
|
||||||
|
"format": "opaque",
|
||||||
|
"id": sha256(request.session.session_key.encode("ascii")).hexdigest(),
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"format": "email",
|
||||||
|
"email": user.email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
request=request,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(pre_delete, sender=AuthenticatedSession)
|
||||||
|
def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSession, **_):
|
||||||
|
"""Session revoked trigger (users' session has been deleted)
|
||||||
|
|
||||||
|
As this signal is also triggered with a regular logout, we can't be sure
|
||||||
|
if the session has been deleted by an admin or by the user themselves."""
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.CAEP_SESSION_REVOKED,
|
||||||
|
{
|
||||||
|
"initiating_entity": "user",
|
||||||
|
},
|
||||||
|
sub_id={
|
||||||
|
"format": "complex",
|
||||||
|
"session": {
|
||||||
|
"format": "opaque",
|
||||||
|
"id": sha256(instance.session_key.encode("ascii")).hexdigest(),
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"format": "email",
|
||||||
|
"email": instance.user.email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(password_changed)
|
||||||
|
def ssf_password_changed_cred_change(sender, user: User, password: str | None, **_):
|
||||||
|
"""Credential change trigger (password changed)"""
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||||
|
{
|
||||||
|
"credential_type": "password",
|
||||||
|
"change_type": "revoke" if password is None else "update",
|
||||||
|
},
|
||||||
|
sub_id={
|
||||||
|
"format": "complex",
|
||||||
|
"user": {
|
||||||
|
"format": "email",
|
||||||
|
"email": user.email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
device_type_map = {
|
||||||
|
StaticDevice: "pin",
|
||||||
|
TOTPDevice: "pin",
|
||||||
|
WebAuthnDevice: "fido-u2f",
|
||||||
|
DuoDevice: "app",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_save)
|
||||||
|
def ssf_device_post_save(sender: type[Model], instance: Device, created: bool, **_):
|
||||||
|
if not isinstance(instance, Device):
|
||||||
|
return
|
||||||
|
if not instance.confirmed:
|
||||||
|
return
|
||||||
|
device_type = device_type_map.get(instance.__class__)
|
||||||
|
data = {
|
||||||
|
"credential_type": device_type,
|
||||||
|
"change_type": "create" if created else "update",
|
||||||
|
"friendly_name": instance.name,
|
||||||
|
}
|
||||||
|
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
|
||||||
|
data["fido2_aaguid"] = instance.aaguid
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||||
|
data,
|
||||||
|
sub_id={
|
||||||
|
"format": "complex",
|
||||||
|
"user": {
|
||||||
|
"format": "email",
|
||||||
|
"email": instance.user.email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_delete)
|
||||||
|
def ssf_device_post_delete(sender: type[Model], instance: Device, **_):
|
||||||
|
if not isinstance(instance, Device):
|
||||||
|
return
|
||||||
|
if not instance.confirmed:
|
||||||
|
return
|
||||||
|
device_type = device_type_map.get(instance.__class__)
|
||||||
|
data = {
|
||||||
|
"credential_type": device_type,
|
||||||
|
"change_type": "delete",
|
||||||
|
"friendly_name": instance.name,
|
||||||
|
}
|
||||||
|
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
|
||||||
|
data["fido2_aaguid"] = instance.aaguid
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||||
|
data,
|
||||||
|
sub_id={
|
||||||
|
"format": "complex",
|
||||||
|
"user": {
|
||||||
|
"format": "email",
|
||||||
|
"email": instance.user.email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
136
authentik/enterprise/providers/ssf/tasks.py
Normal file
136
authentik/enterprise/providers/ssf/tasks.py
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
from celery import group
|
||||||
|
from django.http import HttpRequest
|
||||||
|
from django.utils.timezone import now
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from requests.exceptions import RequestException
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.core.models import User
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
DeliveryMethods,
|
||||||
|
EventTypes,
|
||||||
|
SSFEventStatus,
|
||||||
|
Stream,
|
||||||
|
StreamEvent,
|
||||||
|
)
|
||||||
|
from authentik.events.logs import LogEvent
|
||||||
|
from authentik.events.models import TaskStatus
|
||||||
|
from authentik.events.system_tasks import SystemTask
|
||||||
|
from authentik.lib.utils.http import get_http_session
|
||||||
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
|
from authentik.policies.engine import PolicyEngine
|
||||||
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
session = get_http_session()
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
def send_ssf_event(
|
||||||
|
event_type: EventTypes,
|
||||||
|
data: dict,
|
||||||
|
stream_filter: dict | None = None,
|
||||||
|
request: HttpRequest | None = None,
|
||||||
|
**extra_data,
|
||||||
|
):
|
||||||
|
"""Wrapper to send an SSF event to multiple streams"""
|
||||||
|
payload = []
|
||||||
|
if not stream_filter:
|
||||||
|
stream_filter = {}
|
||||||
|
stream_filter["events_requested__contains"] = [event_type]
|
||||||
|
if request and hasattr(request, "request_id"):
|
||||||
|
extra_data.setdefault("txn", request.request_id)
|
||||||
|
for stream in Stream.objects.filter(**stream_filter):
|
||||||
|
event_data = stream.prepare_event_payload(event_type, data, **extra_data)
|
||||||
|
payload.append((str(stream.uuid), event_data))
|
||||||
|
return _send_ssf_event.delay(payload)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_app_access(stream_uuid: str, event_data: dict) -> bool:
|
||||||
|
"""Check if event is related to user and if so, check
|
||||||
|
if the user has access to the application"""
|
||||||
|
stream = Stream.objects.filter(pk=stream_uuid).first()
|
||||||
|
if not stream:
|
||||||
|
return False
|
||||||
|
# `event_data` is a dict version of a StreamEvent
|
||||||
|
sub_id = event_data.get("payload", {}).get("sub_id", {})
|
||||||
|
email = sub_id.get("user", {}).get("email", None)
|
||||||
|
if not email:
|
||||||
|
return True
|
||||||
|
user = User.objects.filter(email=email).first()
|
||||||
|
if not user:
|
||||||
|
return True
|
||||||
|
engine = PolicyEngine(stream.provider.backchannel_application, user)
|
||||||
|
engine.use_cache = False
|
||||||
|
engine.build()
|
||||||
|
return engine.passing
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def _send_ssf_event(event_data: list[tuple[str, dict]]):
|
||||||
|
tasks = []
|
||||||
|
for stream, data in event_data:
|
||||||
|
if not _check_app_access(stream, data):
|
||||||
|
continue
|
||||||
|
event = StreamEvent.objects.create(**data)
|
||||||
|
tasks.extend(send_single_ssf_event(stream, str(event.uuid)))
|
||||||
|
main_task = group(*tasks)
|
||||||
|
main_task()
|
||||||
|
|
||||||
|
|
||||||
|
def send_single_ssf_event(stream_id: str, evt_id: str):
|
||||||
|
stream = Stream.objects.filter(pk=stream_id).first()
|
||||||
|
if not stream:
|
||||||
|
return
|
||||||
|
event = StreamEvent.objects.filter(pk=evt_id).first()
|
||||||
|
if not event:
|
||||||
|
return
|
||||||
|
if event.status == SSFEventStatus.SENT:
|
||||||
|
return
|
||||||
|
if stream.delivery_method == DeliveryMethods.RISC_PUSH:
|
||||||
|
return [ssf_push_event.si(str(event.pk))]
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||||
|
def ssf_push_event(self: SystemTask, event_id: str):
|
||||||
|
self.save_on_success = False
|
||||||
|
event = StreamEvent.objects.filter(pk=event_id).first()
|
||||||
|
if not event:
|
||||||
|
return
|
||||||
|
self.set_uid(event_id)
|
||||||
|
if event.status == SSFEventStatus.SENT:
|
||||||
|
self.set_status(TaskStatus.SUCCESSFUL)
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
response = session.post(
|
||||||
|
event.stream.endpoint_url,
|
||||||
|
data=event.stream.encode(event.payload),
|
||||||
|
headers={"Content-Type": "application/secevent+jwt", "Accept": "application/json"},
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
event.status = SSFEventStatus.SENT
|
||||||
|
event.save()
|
||||||
|
self.set_status(TaskStatus.SUCCESSFUL)
|
||||||
|
return
|
||||||
|
except RequestException as exc:
|
||||||
|
LOGGER.warning("Failed to send SSF event", exc=exc)
|
||||||
|
self.set_status(TaskStatus.ERROR)
|
||||||
|
attrs = {}
|
||||||
|
if exc.response:
|
||||||
|
attrs["response"] = {
|
||||||
|
"content": exc.response.text,
|
||||||
|
"status": exc.response.status_code,
|
||||||
|
}
|
||||||
|
self.set_error(
|
||||||
|
exc,
|
||||||
|
LogEvent(
|
||||||
|
_("Failed to send request"),
|
||||||
|
log_level="warning",
|
||||||
|
logger=self.__name__,
|
||||||
|
attributes=attrs,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
# Re-up the expiry of the stream event
|
||||||
|
event.expires = now() + timedelta_from_string(event.stream.provider.event_retention)
|
||||||
|
event.status = SSFEventStatus.PENDING_FAILED
|
||||||
|
event.save()
|
46
authentik/enterprise/providers/ssf/tests/test_config.py
Normal file
46
authentik/enterprise/providers/ssf/tests/test_config.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.core.tests.utils import create_test_cert
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
SSFProvider,
|
||||||
|
)
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
|
||||||
|
class TestConfiguration(APITestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||||
|
self.provider = SSFProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
signing_key=create_test_cert(),
|
||||||
|
backchannel_application=self.application,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_config_fetch(self):
|
||||||
|
"""test SSF configuration (unauthenticated)"""
|
||||||
|
res = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:configuration",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
content = json.loads(res.content)
|
||||||
|
self.assertEqual(content["spec_version"], "1_0-ID2")
|
||||||
|
|
||||||
|
def test_config_fetch_authenticated(self):
|
||||||
|
"""test SSF configuration (authenticated)"""
|
||||||
|
res = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:configuration",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
content = json.loads(res.content)
|
||||||
|
self.assertEqual(content["spec_version"], "1_0-ID2")
|
51
authentik/enterprise/providers/ssf/tests/test_jwks.py
Normal file
51
authentik/enterprise/providers/ssf/tests/test_jwks.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
"""JWKS tests"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.x509 import load_der_x509_certificate
|
||||||
|
from django.test import TestCase
|
||||||
|
from django.urls.base import reverse
|
||||||
|
from jwt import PyJWKSet
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.core.tests.utils import create_test_cert
|
||||||
|
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
|
||||||
|
class TestJWKS(TestCase):
|
||||||
|
"""Test JWKS view"""
|
||||||
|
|
||||||
|
def test_rs256(self):
|
||||||
|
"""Test JWKS request with RS256"""
|
||||||
|
provider = SSFProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
signing_key=create_test_cert(),
|
||||||
|
)
|
||||||
|
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||||
|
app.backchannel_providers.add(provider)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug})
|
||||||
|
)
|
||||||
|
body = json.loads(response.content.decode())
|
||||||
|
self.assertEqual(len(body["keys"]), 1)
|
||||||
|
PyJWKSet.from_dict(body)
|
||||||
|
key = body["keys"][0]
|
||||||
|
load_der_x509_certificate(base64.b64decode(key["x5c"][0]), default_backend()).public_key()
|
||||||
|
|
||||||
|
def test_es256(self):
|
||||||
|
"""Test JWKS request with ES256"""
|
||||||
|
provider = SSFProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
signing_key=create_test_cert(),
|
||||||
|
)
|
||||||
|
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||||
|
app.backchannel_providers.add(provider)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug})
|
||||||
|
)
|
||||||
|
body = json.loads(response.content.decode())
|
||||||
|
self.assertEqual(len(body["keys"]), 1)
|
||||||
|
PyJWKSet.from_dict(body)
|
168
authentik/enterprise/providers/ssf/tests/test_signals.py
Normal file
168
authentik/enterprise/providers/ssf/tests/test_signals.py
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.core.models import Application, Group
|
||||||
|
from authentik.core.tests.utils import (
|
||||||
|
create_test_cert,
|
||||||
|
create_test_user,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
EventTypes,
|
||||||
|
SSFEventStatus,
|
||||||
|
SSFProvider,
|
||||||
|
Stream,
|
||||||
|
StreamEvent,
|
||||||
|
)
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.policies.models import PolicyBinding
|
||||||
|
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||||
|
|
||||||
|
|
||||||
|
class TestSignals(APITestCase):
|
||||||
|
"""Test individual SSF Signals"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||||
|
self.provider = SSFProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
signing_key=create_test_cert(),
|
||||||
|
backchannel_application=self.application,
|
||||||
|
)
|
||||||
|
res = self.client.post(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
data={
|
||||||
|
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||||
|
"aud": ["https://app.authentik.company"],
|
||||||
|
"delivery": {
|
||||||
|
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||||
|
"endpoint_url": "https://app.authentik.company",
|
||||||
|
},
|
||||||
|
"events_requested": [
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
],
|
||||||
|
"format": "iss_sub",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 201, res.content)
|
||||||
|
|
||||||
|
def test_signal_logout(self):
|
||||||
|
"""Test user logout"""
|
||||||
|
user = create_test_user()
|
||||||
|
self.client.force_login(user)
|
||||||
|
self.client.logout()
|
||||||
|
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
event_payload = event.payload["events"][
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked"
|
||||||
|
]
|
||||||
|
self.assertEqual(event_payload["initiating_entity"], "user")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["session"]["format"], "opaque")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||||
|
|
||||||
|
def test_signal_password_change(self):
|
||||||
|
"""Test user password change"""
|
||||||
|
user = create_test_user()
|
||||||
|
self.client.force_login(user)
|
||||||
|
user.set_password(generate_id())
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
event_payload = event.payload["events"][
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||||
|
]
|
||||||
|
self.assertEqual(event_payload["change_type"], "update")
|
||||||
|
self.assertEqual(event_payload["credential_type"], "password")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||||
|
|
||||||
|
def test_signal_authenticator_added(self):
|
||||||
|
"""Test authenticator creation signal"""
|
||||||
|
user = create_test_user()
|
||||||
|
self.client.force_login(user)
|
||||||
|
dev = WebAuthnDevice.objects.create(
|
||||||
|
user=user,
|
||||||
|
name=generate_id(),
|
||||||
|
credential_id=generate_id(),
|
||||||
|
public_key=generate_id(),
|
||||||
|
aaguid=str(uuid4()),
|
||||||
|
)
|
||||||
|
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).exclude().first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
event_payload = event.payload["events"][
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||||
|
]
|
||||||
|
self.assertEqual(event_payload["change_type"], "create")
|
||||||
|
self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid)
|
||||||
|
self.assertEqual(event_payload["friendly_name"], dev.name)
|
||||||
|
self.assertEqual(event_payload["credential_type"], "fido-u2f")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||||
|
|
||||||
|
def test_signal_authenticator_deleted(self):
|
||||||
|
"""Test authenticator deletion signal"""
|
||||||
|
user = create_test_user()
|
||||||
|
self.client.force_login(user)
|
||||||
|
dev = WebAuthnDevice.objects.create(
|
||||||
|
user=user,
|
||||||
|
name=generate_id(),
|
||||||
|
credential_id=generate_id(),
|
||||||
|
public_key=generate_id(),
|
||||||
|
aaguid=str(uuid4()),
|
||||||
|
)
|
||||||
|
dev.delete()
|
||||||
|
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).exclude().first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
event_payload = event.payload["events"][
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||||
|
]
|
||||||
|
self.assertEqual(event_payload["change_type"], "delete")
|
||||||
|
self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid)
|
||||||
|
self.assertEqual(event_payload["friendly_name"], dev.name)
|
||||||
|
self.assertEqual(event_payload["credential_type"], "fido-u2f")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||||
|
|
||||||
|
def test_signal_policy_ignore(self):
|
||||||
|
"""Test event not being created for user that doesn't have access to the application"""
|
||||||
|
PolicyBinding.objects.create(
|
||||||
|
target=self.application, group=Group.objects.create(name=generate_id()), order=0
|
||||||
|
)
|
||||||
|
user = create_test_user()
|
||||||
|
self.client.force_login(user)
|
||||||
|
user.set_password(generate_id())
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(
|
||||||
|
stream=stream, type=EventTypes.CAEP_CREDENTIAL_CHANGE
|
||||||
|
).first()
|
||||||
|
self.assertIsNone(event)
|
154
authentik/enterprise/providers/ssf/tests/test_stream.py
Normal file
154
authentik/enterprise/providers/ssf/tests/test_stream.py
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
import json
|
||||||
|
from dataclasses import asdict
|
||||||
|
|
||||||
|
from django.urls import reverse
|
||||||
|
from django.utils import timezone
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
SSFEventStatus,
|
||||||
|
SSFProvider,
|
||||||
|
Stream,
|
||||||
|
StreamEvent,
|
||||||
|
)
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.providers.oauth2.id_token import IDToken
|
||||||
|
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||||
|
|
||||||
|
|
||||||
|
class TestStream(APITestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||||
|
self.provider = SSFProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
signing_key=create_test_cert(),
|
||||||
|
backchannel_application=self.application,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_stream_add_token(self):
|
||||||
|
"""test stream add (token auth)"""
|
||||||
|
res = self.client.post(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
data={
|
||||||
|
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||||
|
"aud": ["https://app.authentik.company"],
|
||||||
|
"delivery": {
|
||||||
|
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||||
|
"endpoint_url": "https://app.authentik.company",
|
||||||
|
},
|
||||||
|
"events_requested": [
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
],
|
||||||
|
"format": "iss_sub",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 201)
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
self.assertEqual(
|
||||||
|
event.payload["events"],
|
||||||
|
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_stream_add_poll(self):
|
||||||
|
"""test stream add - poll method"""
|
||||||
|
res = self.client.post(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
data={
|
||||||
|
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||||
|
"aud": ["https://app.authentik.company"],
|
||||||
|
"delivery": {
|
||||||
|
"method": "https://schemas.openid.net/secevent/risc/delivery-method/poll",
|
||||||
|
},
|
||||||
|
"events_requested": [
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
],
|
||||||
|
"format": "iss_sub",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 400)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
res.content,
|
||||||
|
{"delivery": {"method": ["Polling for SSF events is not currently supported."]}},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_stream_add_oidc(self):
|
||||||
|
"""test stream add (oidc auth)"""
|
||||||
|
provider = OAuth2Provider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
authorization_flow=create_test_flow(),
|
||||||
|
)
|
||||||
|
self.application.provider = provider
|
||||||
|
self.application.save()
|
||||||
|
user = create_test_admin_user()
|
||||||
|
token = AccessToken.objects.create(
|
||||||
|
provider=provider,
|
||||||
|
user=user,
|
||||||
|
token=generate_id(),
|
||||||
|
auth_time=timezone.now(),
|
||||||
|
_scope="openid user profile",
|
||||||
|
_id_token=json.dumps(
|
||||||
|
asdict(
|
||||||
|
IDToken("foo", "bar"),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
res = self.client.post(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
data={
|
||||||
|
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||||
|
"aud": ["https://app.authentik.company"],
|
||||||
|
"delivery": {
|
||||||
|
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||||
|
"endpoint_url": "https://app.authentik.company",
|
||||||
|
},
|
||||||
|
"events_requested": [
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
],
|
||||||
|
"format": "iss_sub",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {token.token}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 201)
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
self.assertEqual(
|
||||||
|
event.payload["events"],
|
||||||
|
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_stream_delete(self):
|
||||||
|
"""delete stream"""
|
||||||
|
stream = Stream.objects.create(provider=self.provider)
|
||||||
|
res = self.client.delete(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 204)
|
||||||
|
self.assertFalse(Stream.objects.filter(pk=stream.pk).exists())
|
32
authentik/enterprise/providers/ssf/urls.py
Normal file
32
authentik/enterprise/providers/ssf/urls.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
"""SSF provider URLs"""
|
||||||
|
|
||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.ssf.api.providers import SSFProviderViewSet
|
||||||
|
from authentik.enterprise.providers.ssf.api.streams import SSFStreamViewSet
|
||||||
|
from authentik.enterprise.providers.ssf.views.configuration import ConfigurationView
|
||||||
|
from authentik.enterprise.providers.ssf.views.jwks import JWKSview
|
||||||
|
from authentik.enterprise.providers.ssf.views.stream import StreamView
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"application/ssf/<slug:application_slug>/ssf-jwks/",
|
||||||
|
JWKSview.as_view(),
|
||||||
|
name="jwks",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
".well-known/ssf-configuration/<slug:application_slug>",
|
||||||
|
ConfigurationView.as_view(),
|
||||||
|
name="configuration",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"application/ssf/<slug:application_slug>/stream/",
|
||||||
|
StreamView.as_view(),
|
||||||
|
name="stream",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
api_urlpatterns = [
|
||||||
|
("providers/ssf", SSFProviderViewSet),
|
||||||
|
("ssf/streams", SSFStreamViewSet),
|
||||||
|
]
|
66
authentik/enterprise/providers/ssf/views/auth.py
Normal file
66
authentik/enterprise/providers/ssf/views/auth.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
"""SSF Token auth"""
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from django.db.models import Q
|
||||||
|
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||||
|
from rest_framework.request import Request
|
||||||
|
|
||||||
|
from authentik.core.models import Token, TokenIntents, User
|
||||||
|
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||||
|
from authentik.providers.oauth2.models import AccessToken
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||||
|
|
||||||
|
|
||||||
|
class SSFTokenAuth(BaseAuthentication):
|
||||||
|
"""SSF Token auth"""
|
||||||
|
|
||||||
|
view: "SSFView"
|
||||||
|
|
||||||
|
def __init__(self, view: "SSFView") -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.view = view
|
||||||
|
|
||||||
|
def check_token(self, key: str) -> Token | None:
|
||||||
|
"""Check that a token exists, is not expired, and is assigned to the correct provider"""
|
||||||
|
token = Token.filter_not_expired(key=key, intent=TokenIntents.INTENT_API).first()
|
||||||
|
if not token:
|
||||||
|
return None
|
||||||
|
provider: SSFProvider = token.ssfprovider_set.first()
|
||||||
|
if not provider:
|
||||||
|
return None
|
||||||
|
self.view.application = provider.backchannel_application
|
||||||
|
self.view.provider = provider
|
||||||
|
return token
|
||||||
|
|
||||||
|
def check_jwt(self, jwt: str) -> AccessToken | None:
|
||||||
|
"""Check JWT-based authentication, this supports tokens issued either by providers
|
||||||
|
configured directly in the provider, and by providers assigned to the application
|
||||||
|
that the SSF provider is a backchannel provider of."""
|
||||||
|
token = AccessToken.filter_not_expired(token=jwt, revoked=False).first()
|
||||||
|
if not token:
|
||||||
|
return None
|
||||||
|
ssf_provider = SSFProvider.objects.filter(
|
||||||
|
Q(oidc_auth_providers__in=[token.provider])
|
||||||
|
| Q(backchannel_application__provider__in=[token.provider]),
|
||||||
|
).first()
|
||||||
|
if not ssf_provider:
|
||||||
|
return None
|
||||||
|
self.view.application = ssf_provider.backchannel_application
|
||||||
|
self.view.provider = ssf_provider
|
||||||
|
return token
|
||||||
|
|
||||||
|
def authenticate(self, request: Request) -> tuple[User, Any] | None:
|
||||||
|
auth = get_authorization_header(request).decode()
|
||||||
|
auth_type, _, key = auth.partition(" ")
|
||||||
|
if auth_type != "Bearer":
|
||||||
|
return None
|
||||||
|
token = self.check_token(key)
|
||||||
|
if token:
|
||||||
|
return (token.user, token)
|
||||||
|
jwt_token = self.check_jwt(key)
|
||||||
|
if jwt_token:
|
||||||
|
return (jwt_token.user, token)
|
||||||
|
return None
|
23
authentik/enterprise/providers/ssf/views/base.py
Normal file
23
authentik/enterprise/providers/ssf/views/base.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from django.http import HttpRequest
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||||
|
from authentik.enterprise.providers.ssf.views.auth import SSFTokenAuth
|
||||||
|
|
||||||
|
|
||||||
|
class SSFView(APIView):
|
||||||
|
application: Application
|
||||||
|
provider: SSFProvider
|
||||||
|
logger: BoundLogger
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def setup(self, request: HttpRequest, *args, **kwargs) -> None:
|
||||||
|
self.logger = get_logger().bind()
|
||||||
|
super().setup(request, *args, **kwargs)
|
||||||
|
|
||||||
|
def get_authenticators(self):
|
||||||
|
return [SSFTokenAuth(self)]
|
55
authentik/enterprise/providers/ssf/views/configuration.py
Normal file
55
authentik/enterprise/providers/ssf/views/configuration.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
from django.http import Http404, HttpRequest, HttpResponse, JsonResponse
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.permissions import AllowAny
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.enterprise.providers.ssf.models import DeliveryMethods, SSFProvider
|
||||||
|
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationView(SSFView):
|
||||||
|
"""SSF configuration endpoint"""
|
||||||
|
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
def get_authenticators(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get(self, request: HttpRequest, application_slug: str, *args, **kwargs) -> HttpResponse:
|
||||||
|
application = get_object_or_404(Application, slug=application_slug)
|
||||||
|
provider = application.backchannel_provider_for(SSFProvider)
|
||||||
|
if not provider:
|
||||||
|
raise Http404
|
||||||
|
data = {
|
||||||
|
"spec_version": "1_0-ID2",
|
||||||
|
"issuer": self.request.build_absolute_uri(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:configuration",
|
||||||
|
kwargs={
|
||||||
|
"application_slug": application.slug,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
),
|
||||||
|
"jwks_uri": self.request.build_absolute_uri(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:jwks",
|
||||||
|
kwargs={
|
||||||
|
"application_slug": application.slug,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
),
|
||||||
|
"configuration_endpoint": self.request.build_absolute_uri(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={
|
||||||
|
"application_slug": application.slug,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
),
|
||||||
|
"delivery_methods_supported": [
|
||||||
|
DeliveryMethods.RISC_PUSH,
|
||||||
|
],
|
||||||
|
"authorization_schemes": [{"spec_urn": "urn:ietf:rfc:6749"}],
|
||||||
|
}
|
||||||
|
return JsonResponse(data)
|
31
authentik/enterprise/providers/ssf/views/jwks.py
Normal file
31
authentik/enterprise/providers/ssf/views/jwks.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
from django.http import Http404, HttpRequest, HttpResponse, JsonResponse
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
|
from django.views import View
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||||
|
from authentik.providers.oauth2.views.jwks import JWKSView as OAuthJWKSView
|
||||||
|
|
||||||
|
|
||||||
|
class JWKSview(View):
|
||||||
|
"""SSF JWKS endpoint, similar to the OAuth2 provider's endpoint"""
|
||||||
|
|
||||||
|
def get(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
||||||
|
"""Show JWK Key data for Provider"""
|
||||||
|
application = get_object_or_404(Application, slug=application_slug)
|
||||||
|
provider = application.backchannel_provider_for(SSFProvider)
|
||||||
|
if not provider:
|
||||||
|
raise Http404
|
||||||
|
signing_key: CertificateKeyPair = provider.signing_key
|
||||||
|
|
||||||
|
response_data = {}
|
||||||
|
|
||||||
|
jwk = OAuthJWKSView.get_jwk_for_key(signing_key, "sig")
|
||||||
|
if jwk:
|
||||||
|
response_data["keys"] = [jwk]
|
||||||
|
|
||||||
|
response = JsonResponse(response_data)
|
||||||
|
response["Access-Control-Allow-Origin"] = "*"
|
||||||
|
|
||||||
|
return response
|
130
authentik/enterprise/providers/ssf/views/stream.py
Normal file
130
authentik/enterprise/providers/ssf/views/stream.py
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
from django.http import HttpRequest
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.exceptions import PermissionDenied, ValidationError
|
||||||
|
from rest_framework.fields import CharField, ChoiceField, ListField, SerializerMethodField
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.serializers import ModelSerializer
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
DeliveryMethods,
|
||||||
|
EventTypes,
|
||||||
|
SSFProvider,
|
||||||
|
Stream,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
|
||||||
|
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
class StreamDeliverySerializer(PassiveSerializer):
|
||||||
|
method = ChoiceField(choices=[(x.value, x.value) for x in DeliveryMethods])
|
||||||
|
endpoint_url = CharField(required=False)
|
||||||
|
|
||||||
|
def validate_method(self, method: DeliveryMethods):
|
||||||
|
"""Currently only push is supported"""
|
||||||
|
if method == DeliveryMethods.RISC_POLL:
|
||||||
|
raise ValidationError("Polling for SSF events is not currently supported.")
|
||||||
|
return method
|
||||||
|
|
||||||
|
def validate(self, attrs: dict) -> dict:
|
||||||
|
if attrs["method"] == DeliveryMethods.RISC_PUSH:
|
||||||
|
if not attrs.get("endpoint_url"):
|
||||||
|
raise ValidationError("Endpoint URL is required when using push.")
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
class StreamSerializer(ModelSerializer):
|
||||||
|
delivery = StreamDeliverySerializer()
|
||||||
|
events_requested = ListField(
|
||||||
|
child=ChoiceField(choices=[(x.value, x.value) for x in EventTypes])
|
||||||
|
)
|
||||||
|
format = CharField()
|
||||||
|
aud = ListField(child=CharField())
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
provider: SSFProvider = validated_data["provider"]
|
||||||
|
request: HttpRequest = self.context["request"]
|
||||||
|
iss = request.build_absolute_uri(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:configuration",
|
||||||
|
kwargs={
|
||||||
|
"application_slug": provider.backchannel_application.slug,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# Ensure that streams always get SET verification events sent to them
|
||||||
|
validated_data["events_requested"].append(EventTypes.SET_VERIFICATION)
|
||||||
|
return super().create(
|
||||||
|
{
|
||||||
|
"delivery_method": validated_data["delivery"]["method"],
|
||||||
|
"endpoint_url": validated_data["delivery"].get("endpoint_url"),
|
||||||
|
"format": validated_data["format"],
|
||||||
|
"provider": validated_data["provider"],
|
||||||
|
"events_requested": validated_data["events_requested"],
|
||||||
|
"aud": validated_data["aud"],
|
||||||
|
"iss": iss,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Stream
|
||||||
|
fields = [
|
||||||
|
"delivery",
|
||||||
|
"events_requested",
|
||||||
|
"format",
|
||||||
|
"aud",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class StreamResponseSerializer(PassiveSerializer):
|
||||||
|
stream_id = CharField(source="pk")
|
||||||
|
iss = CharField()
|
||||||
|
aud = ListField(child=CharField())
|
||||||
|
delivery = SerializerMethodField()
|
||||||
|
format = CharField()
|
||||||
|
|
||||||
|
events_requested = ListField(child=CharField())
|
||||||
|
events_supported = SerializerMethodField()
|
||||||
|
events_delivered = ListField(child=CharField(), source="events_requested")
|
||||||
|
|
||||||
|
def get_delivery(self, instance: Stream) -> StreamDeliverySerializer:
|
||||||
|
return {
|
||||||
|
"method": instance.delivery_method,
|
||||||
|
"endpoint_url": instance.endpoint_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_events_supported(self, instance: Stream) -> list[str]:
|
||||||
|
return [x.value for x in EventTypes]
|
||||||
|
|
||||||
|
|
||||||
|
class StreamView(SSFView):
|
||||||
|
def post(self, request: Request, *args, **kwargs) -> Response:
|
||||||
|
stream = StreamSerializer(data=request.data, context={"request": request})
|
||||||
|
stream.is_valid(raise_exception=True)
|
||||||
|
if not request.user.has_perm("authentik_providers_ssf.add_stream", self.provider):
|
||||||
|
raise PermissionDenied(
|
||||||
|
"User does not have permission to create stream for this provider."
|
||||||
|
)
|
||||||
|
instance: Stream = stream.save(provider=self.provider)
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.SET_VERIFICATION,
|
||||||
|
{
|
||||||
|
"state": None,
|
||||||
|
},
|
||||||
|
stream_filter={"pk": instance.uuid},
|
||||||
|
sub_id={"format": "opaque", "id": str(instance.uuid)},
|
||||||
|
)
|
||||||
|
response = StreamResponseSerializer(instance=instance, context={"request": request}).data
|
||||||
|
return Response(response, status=201)
|
||||||
|
|
||||||
|
def delete(self, request: Request, *args, **kwargs) -> Response:
|
||||||
|
streams = Stream.objects.filter(provider=self.provider)
|
||||||
|
# Technically this parameter is required by the spec...
|
||||||
|
if "stream_id" in request.query_params:
|
||||||
|
streams = streams.filter(stream_id=request.query_params["stream_id"])
|
||||||
|
streams.delete()
|
||||||
|
return Response(status=204)
|
@ -16,7 +16,7 @@ TENANT_APPS = [
|
|||||||
"authentik.enterprise.audit",
|
"authentik.enterprise.audit",
|
||||||
"authentik.enterprise.providers.google_workspace",
|
"authentik.enterprise.providers.google_workspace",
|
||||||
"authentik.enterprise.providers.microsoft_entra",
|
"authentik.enterprise.providers.microsoft_entra",
|
||||||
"authentik.enterprise.providers.rac",
|
"authentik.enterprise.providers.ssf",
|
||||||
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
||||||
"authentik.enterprise.stages.source",
|
"authentik.enterprise.stages.source",
|
||||||
]
|
]
|
||||||
|
@ -9,13 +9,16 @@ from django.utils.timezone import now
|
|||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
|
|
||||||
from authentik.core.models import Source, User
|
from authentik.core.models import Source, User
|
||||||
from authentik.core.sources.flow_manager import SESSION_KEY_OVERRIDE_FLOW_TOKEN
|
from authentik.core.sources.flow_manager import (
|
||||||
|
SESSION_KEY_OVERRIDE_FLOW_TOKEN,
|
||||||
|
SESSION_KEY_SOURCE_FLOW_STAGES,
|
||||||
|
)
|
||||||
from authentik.core.types import UILoginButton
|
from authentik.core.types import UILoginButton
|
||||||
from authentik.enterprise.stages.source.models import SourceStage
|
from authentik.enterprise.stages.source.models import SourceStage
|
||||||
from authentik.flows.challenge import Challenge, ChallengeResponse
|
from authentik.flows.challenge import Challenge, ChallengeResponse
|
||||||
from authentik.flows.models import FlowToken
|
from authentik.flows.models import FlowToken, in_memory_stage
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED
|
from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED
|
||||||
from authentik.flows.stage import ChallengeStageView
|
from authentik.flows.stage import ChallengeStageView, StageView
|
||||||
from authentik.lib.utils.time import timedelta_from_string
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
|
|
||||||
PLAN_CONTEXT_RESUME_TOKEN = "resume_token" # nosec
|
PLAN_CONTEXT_RESUME_TOKEN = "resume_token" # nosec
|
||||||
@ -49,6 +52,7 @@ class SourceStageView(ChallengeStageView):
|
|||||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||||
resume_token = self.create_flow_token()
|
resume_token = self.create_flow_token()
|
||||||
self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token
|
self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token
|
||||||
|
self.request.session[SESSION_KEY_SOURCE_FLOW_STAGES] = [in_memory_stage(SourceStageFinal)]
|
||||||
return self.login_button.challenge
|
return self.login_button.challenge
|
||||||
|
|
||||||
def create_flow_token(self) -> FlowToken:
|
def create_flow_token(self) -> FlowToken:
|
||||||
@ -77,3 +81,19 @@ class SourceStageView(ChallengeStageView):
|
|||||||
|
|
||||||
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
|
|
||||||
|
|
||||||
|
class SourceStageFinal(StageView):
|
||||||
|
"""Dynamic stage injected in the source flow manager. This is injected in the
|
||||||
|
flow the source flow manager picks (authentication or enrollment), and will run at the end.
|
||||||
|
This stage uses the override flow token to resume execution of the initial flow the
|
||||||
|
source stage is bound to."""
|
||||||
|
|
||||||
|
def dispatch(self):
|
||||||
|
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
|
||||||
|
self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug)
|
||||||
|
plan = token.plan
|
||||||
|
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
|
||||||
|
response = plan.to_redirect(self.request, token.flow)
|
||||||
|
token.delete()
|
||||||
|
return response
|
||||||
|
@ -53,12 +53,13 @@ class SystemTask(TenantTask):
|
|||||||
if not isinstance(msg, LogEvent):
|
if not isinstance(msg, LogEvent):
|
||||||
self._messages[idx] = LogEvent(msg, logger=self.__name__, log_level="info")
|
self._messages[idx] = LogEvent(msg, logger=self.__name__, log_level="info")
|
||||||
|
|
||||||
def set_error(self, exception: Exception):
|
def set_error(self, exception: Exception, *messages: LogEvent):
|
||||||
"""Set result to error and save exception"""
|
"""Set result to error and save exception"""
|
||||||
self._status = TaskStatus.ERROR
|
self._status = TaskStatus.ERROR
|
||||||
self._messages = [
|
self._messages = list(messages)
|
||||||
LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error")
|
self._messages.extend(
|
||||||
]
|
[LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error")]
|
||||||
|
)
|
||||||
|
|
||||||
def before_start(self, task_id, args, kwargs):
|
def before_start(self, task_id, args, kwargs):
|
||||||
self._start_precise = perf_counter()
|
self._start_precise = perf_counter()
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from django.contrib.messages import INFO, add_message
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
@ -61,6 +62,8 @@ class ReevaluateMarker(StageMarker):
|
|||||||
engine.request.context.update(plan.context)
|
engine.request.context.update(plan.context)
|
||||||
engine.build()
|
engine.build()
|
||||||
result = engine.result
|
result = engine.result
|
||||||
|
for message in result.messages:
|
||||||
|
add_message(http_request, INFO, message)
|
||||||
if result.passing:
|
if result.passing:
|
||||||
return binding
|
return binding
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
|
@ -109,6 +109,8 @@ class FlowPlan:
|
|||||||
|
|
||||||
def pop(self):
|
def pop(self):
|
||||||
"""Pop next pending stage from bottom of list"""
|
"""Pop next pending stage from bottom of list"""
|
||||||
|
if not self.markers and not self.bindings:
|
||||||
|
return
|
||||||
self.markers.pop(0)
|
self.markers.pop(0)
|
||||||
self.bindings.pop(0)
|
self.bindings.pop(0)
|
||||||
|
|
||||||
@ -156,8 +158,13 @@ class FlowPlan:
|
|||||||
final_stage: type[StageView] = self.bindings[-1].stage.view
|
final_stage: type[StageView] = self.bindings[-1].stage.view
|
||||||
temp_exec = FlowExecutorView(flow=flow, request=request, plan=self)
|
temp_exec = FlowExecutorView(flow=flow, request=request, plan=self)
|
||||||
temp_exec.current_stage = self.bindings[-1].stage
|
temp_exec.current_stage = self.bindings[-1].stage
|
||||||
|
temp_exec.current_stage_view = final_stage
|
||||||
|
temp_exec.setup(request, flow.slug)
|
||||||
stage = final_stage(request=request, executor=temp_exec)
|
stage = final_stage(request=request, executor=temp_exec)
|
||||||
return stage.dispatch(request)
|
response = stage.dispatch(request)
|
||||||
|
# Ensure we clean the flow state we have in the session before we redirect away
|
||||||
|
temp_exec.stage_ok()
|
||||||
|
return response
|
||||||
|
|
||||||
get_qs = request.GET.copy()
|
get_qs = request.GET.copy()
|
||||||
if request.user.is_authenticated and (
|
if request.user.is_authenticated and (
|
||||||
|
@ -103,7 +103,7 @@ class FlowExecutorView(APIView):
|
|||||||
|
|
||||||
permission_classes = [AllowAny]
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
flow: Flow
|
flow: Flow = None
|
||||||
|
|
||||||
plan: FlowPlan | None = None
|
plan: FlowPlan | None = None
|
||||||
current_binding: FlowStageBinding | None = None
|
current_binding: FlowStageBinding | None = None
|
||||||
@ -114,6 +114,7 @@ class FlowExecutorView(APIView):
|
|||||||
|
|
||||||
def setup(self, request: HttpRequest, flow_slug: str):
|
def setup(self, request: HttpRequest, flow_slug: str):
|
||||||
super().setup(request, flow_slug=flow_slug)
|
super().setup(request, flow_slug=flow_slug)
|
||||||
|
if not self.flow:
|
||||||
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
||||||
self._logger = get_logger().bind(flow_slug=flow_slug)
|
self._logger = get_logger().bind(flow_slug=flow_slug)
|
||||||
set_tag("authentik.flow", self.flow.slug)
|
set_tag("authentik.flow", self.flow.slug)
|
||||||
|
@ -283,12 +283,15 @@ class ConfigLoader:
|
|||||||
def get_optional_int(self, path: str, default=None) -> int | None:
|
def get_optional_int(self, path: str, default=None) -> int | None:
|
||||||
"""Wrapper for get that converts value into int or None if set"""
|
"""Wrapper for get that converts value into int or None if set"""
|
||||||
value = self.get(path, default)
|
value = self.get(path, default)
|
||||||
|
if value is UNSET:
|
||||||
|
return default
|
||||||
try:
|
try:
|
||||||
return int(value)
|
return int(value)
|
||||||
except (ValueError, TypeError) as exc:
|
except (ValueError, TypeError) as exc:
|
||||||
if value is None or (isinstance(value, str) and value.lower() == "null"):
|
if value is None or (isinstance(value, str) and value.lower() == "null"):
|
||||||
return None
|
return default
|
||||||
|
if value is UNSET:
|
||||||
|
return default
|
||||||
self.log("warning", "Failed to parse config as int", path=path, exc=str(exc))
|
self.log("warning", "Failed to parse config as int", path=path, exc=str(exc))
|
||||||
return default
|
return default
|
||||||
|
|
||||||
@ -421,4 +424,4 @@ if __name__ == "__main__":
|
|||||||
if len(argv) < 2: # noqa: PLR2004
|
if len(argv) < 2: # noqa: PLR2004
|
||||||
print(dumps(CONFIG.raw, indent=4, cls=AttrEncoder))
|
print(dumps(CONFIG.raw, indent=4, cls=AttrEncoder))
|
||||||
else:
|
else:
|
||||||
print(CONFIG.get(argv[1]))
|
print(CONFIG.get(argv[-1]))
|
||||||
|
26
authentik/lib/debug.py
Normal file
26
authentik/lib/debug.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
def start_debug_server(**kwargs) -> bool:
|
||||||
|
"""Attempt to start a debugpy server in the current process.
|
||||||
|
Returns true if the server was started successfully, otherwise false"""
|
||||||
|
if not CONFIG.get_bool("debug") and not CONFIG.get_bool("debugger"):
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
import debugpy
|
||||||
|
except ImportError:
|
||||||
|
LOGGER.warning(
|
||||||
|
"Failed to import debugpy. debugpy is not included "
|
||||||
|
"in the default release dependencies and must be installed manually"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
listen: str = CONFIG.get("listen.listen_debug_py", "127.0.0.1:9901")
|
||||||
|
host, _, port = listen.rpartition(":")
|
||||||
|
debugpy.listen((host, int(port)), **kwargs) # nosec
|
||||||
|
LOGGER.debug("Starting debug server", host=host, port=port)
|
||||||
|
return True
|
@ -8,6 +8,7 @@ postgresql:
|
|||||||
password: "env://POSTGRES_PASSWORD"
|
password: "env://POSTGRES_PASSWORD"
|
||||||
test:
|
test:
|
||||||
name: test_authentik
|
name: test_authentik
|
||||||
|
default_schema: public
|
||||||
read_replicas: {}
|
read_replicas: {}
|
||||||
# For example
|
# For example
|
||||||
# 0:
|
# 0:
|
||||||
@ -21,6 +22,7 @@ listen:
|
|||||||
listen_radius: 0.0.0.0:1812
|
listen_radius: 0.0.0.0:1812
|
||||||
listen_metrics: 0.0.0.0:9300
|
listen_metrics: 0.0.0.0:9300
|
||||||
listen_debug: 0.0.0.0:9900
|
listen_debug: 0.0.0.0:9900
|
||||||
|
listen_debug_py: 0.0.0.0:9901
|
||||||
trusted_proxy_cidrs:
|
trusted_proxy_cidrs:
|
||||||
- 127.0.0.0/8
|
- 127.0.0.0/8
|
||||||
- 10.0.0.0/8
|
- 10.0.0.0/8
|
||||||
@ -57,7 +59,7 @@ cache:
|
|||||||
# transport_options: ""
|
# transport_options: ""
|
||||||
|
|
||||||
debug: false
|
debug: false
|
||||||
remote_debug: false
|
debugger: false
|
||||||
|
|
||||||
log_level: info
|
log_level: info
|
||||||
|
|
||||||
|
@ -22,9 +22,9 @@ class OutgoingSyncProvider(Model):
|
|||||||
class Meta:
|
class Meta:
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
def client_for_model[
|
def client_for_model[T: User | Group](
|
||||||
T: User | Group
|
self, model: type[T]
|
||||||
](self, model: type[T]) -> BaseOutgoingSyncClient[T, Any, Any, Self]:
|
) -> BaseOutgoingSyncClient[T, Any, Any, Self]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_object_qs[T: User | Group](self, type: type[T]) -> QuerySet[T]:
|
def get_object_qs[T: User | Group](self, type: type[T]) -> QuerySet[T]:
|
||||||
|
54
authentik/lib/utils/email.py
Normal file
54
authentik/lib/utils/email.py
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
"""Email utility functions"""
|
||||||
|
|
||||||
|
|
||||||
|
def mask_email(email: str | None) -> str | None:
|
||||||
|
"""Mask email address for privacy
|
||||||
|
|
||||||
|
Args:
|
||||||
|
email: Email address to mask
|
||||||
|
Returns:
|
||||||
|
Masked email address or None if input is None
|
||||||
|
Example:
|
||||||
|
mask_email("myname@company.org")
|
||||||
|
'm*****@c******.org'
|
||||||
|
"""
|
||||||
|
if not email:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Basic email format validation
|
||||||
|
if email.count("@") != 1:
|
||||||
|
raise ValueError("Invalid email format: Must contain exactly one '@' symbol")
|
||||||
|
|
||||||
|
local, domain = email.split("@")
|
||||||
|
if not local or not domain:
|
||||||
|
raise ValueError("Invalid email format: Local and domain parts cannot be empty")
|
||||||
|
|
||||||
|
domain_parts = domain.split(".")
|
||||||
|
if len(domain_parts) < 2: # noqa: PLR2004
|
||||||
|
raise ValueError("Invalid email format: Domain must contain at least one dot")
|
||||||
|
|
||||||
|
limit = 2
|
||||||
|
|
||||||
|
# Mask local part (keep first char)
|
||||||
|
if len(local) <= limit:
|
||||||
|
masked_local = "*" * len(local)
|
||||||
|
else:
|
||||||
|
masked_local = local[0] + "*" * (len(local) - 1)
|
||||||
|
|
||||||
|
# Mask each domain part except the last one (TLD)
|
||||||
|
masked_domain_parts = []
|
||||||
|
for _i, part in enumerate(domain_parts[:-1]): # Process all parts except TLD
|
||||||
|
if not part: # Check for empty parts (consecutive dots)
|
||||||
|
raise ValueError("Invalid email format: Domain parts cannot be empty")
|
||||||
|
if len(part) <= limit:
|
||||||
|
masked_part = "*" * len(part)
|
||||||
|
else:
|
||||||
|
masked_part = part[0] + "*" * (len(part) - 1)
|
||||||
|
masked_domain_parts.append(masked_part)
|
||||||
|
|
||||||
|
# Add TLD unchanged
|
||||||
|
if not domain_parts[-1]: # Check if TLD is empty
|
||||||
|
raise ValueError("Invalid email format: TLD cannot be empty")
|
||||||
|
masked_domain_parts.append(domain_parts[-1])
|
||||||
|
|
||||||
|
return f"{masked_local}@{'.'.join(masked_domain_parts)}"
|
@ -42,6 +42,8 @@ class DebugSession(Session):
|
|||||||
|
|
||||||
def get_http_session() -> Session:
|
def get_http_session() -> Session:
|
||||||
"""Get a requests session with common headers"""
|
"""Get a requests session with common headers"""
|
||||||
session = DebugSession() if CONFIG.get_bool("debug") else Session()
|
session = Session()
|
||||||
|
if CONFIG.get_bool("debug") or CONFIG.get("log_level") == "trace":
|
||||||
|
session = DebugSession()
|
||||||
session.headers["User-Agent"] = authentik_user_agent()
|
session.headers["User-Agent"] = authentik_user_agent()
|
||||||
return session
|
return session
|
||||||
|
@ -19,7 +19,6 @@ from authentik.core.api.used_by import UsedByMixin
|
|||||||
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
|
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
|
||||||
from authentik.core.models import Provider
|
from authentik.core.models import Provider
|
||||||
from authentik.enterprise.license import LicenseKey
|
from authentik.enterprise.license import LicenseKey
|
||||||
from authentik.enterprise.providers.rac.models import RACProvider
|
|
||||||
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||||
from authentik.outposts.api.service_connections import ServiceConnectionSerializer
|
from authentik.outposts.api.service_connections import ServiceConnectionSerializer
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST, MANAGED_OUTPOST_NAME
|
from authentik.outposts.apps import MANAGED_OUTPOST, MANAGED_OUTPOST_NAME
|
||||||
@ -31,6 +30,7 @@ from authentik.outposts.models import (
|
|||||||
)
|
)
|
||||||
from authentik.providers.ldap.models import LDAPProvider
|
from authentik.providers.ldap.models import LDAPProvider
|
||||||
from authentik.providers.proxy.models import ProxyProvider
|
from authentik.providers.proxy.models import ProxyProvider
|
||||||
|
from authentik.providers.rac.models import RACProvider
|
||||||
from authentik.providers.radius.models import RadiusProvider
|
from authentik.providers.radius.models import RadiusProvider
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,8 +18,6 @@ from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
from yaml import safe_load
|
from yaml import safe_load
|
||||||
|
|
||||||
from authentik.enterprise.providers.rac.controllers.docker import RACDockerController
|
|
||||||
from authentik.enterprise.providers.rac.controllers.kubernetes import RACKubernetesController
|
|
||||||
from authentik.events.models import TaskStatus
|
from authentik.events.models import TaskStatus
|
||||||
from authentik.events.system_tasks import SystemTask, prefill_task
|
from authentik.events.system_tasks import SystemTask, prefill_task
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
@ -41,6 +39,8 @@ from authentik.providers.ldap.controllers.docker import LDAPDockerController
|
|||||||
from authentik.providers.ldap.controllers.kubernetes import LDAPKubernetesController
|
from authentik.providers.ldap.controllers.kubernetes import LDAPKubernetesController
|
||||||
from authentik.providers.proxy.controllers.docker import ProxyDockerController
|
from authentik.providers.proxy.controllers.docker import ProxyDockerController
|
||||||
from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
||||||
|
from authentik.providers.rac.controllers.docker import RACDockerController
|
||||||
|
from authentik.providers.rac.controllers.kubernetes import RACKubernetesController
|
||||||
from authentik.providers.radius.controllers.docker import RadiusDockerController
|
from authentik.providers.radius.controllers.docker import RadiusDockerController
|
||||||
from authentik.providers.radius.controllers.kubernetes import RadiusKubernetesController
|
from authentik.providers.radius.controllers.kubernetes import RadiusKubernetesController
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
|
@ -42,6 +42,12 @@ class GeoIPPolicySerializer(CountryFieldMixin, PolicySerializer):
|
|||||||
"asns",
|
"asns",
|
||||||
"countries",
|
"countries",
|
||||||
"countries_obj",
|
"countries_obj",
|
||||||
|
"check_history_distance",
|
||||||
|
"history_max_distance_km",
|
||||||
|
"distance_tolerance_km",
|
||||||
|
"history_login_count",
|
||||||
|
"check_impossible_travel",
|
||||||
|
"impossible_tolerance_km",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -0,0 +1,43 @@
|
|||||||
|
# Generated by Django 5.0.10 on 2025-01-02 20:40
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_policies_geoip", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="geoippolicy",
|
||||||
|
name="check_history_distance",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="geoippolicy",
|
||||||
|
name="check_impossible_travel",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="geoippolicy",
|
||||||
|
name="distance_tolerance_km",
|
||||||
|
field=models.PositiveIntegerField(default=50),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="geoippolicy",
|
||||||
|
name="history_login_count",
|
||||||
|
field=models.PositiveIntegerField(default=5),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="geoippolicy",
|
||||||
|
name="history_max_distance_km",
|
||||||
|
field=models.PositiveBigIntegerField(default=100),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="geoippolicy",
|
||||||
|
name="impossible_tolerance_km",
|
||||||
|
field=models.PositiveIntegerField(default=100),
|
||||||
|
),
|
||||||
|
]
|
@ -4,15 +4,21 @@ from itertools import chain
|
|||||||
|
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
from django_countries.fields import CountryField
|
from django_countries.fields import CountryField
|
||||||
|
from geopy import distance
|
||||||
from rest_framework.serializers import BaseSerializer
|
from rest_framework.serializers import BaseSerializer
|
||||||
|
|
||||||
|
from authentik.events.context_processors.geoip import GeoIPDict
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.policies.exceptions import PolicyException
|
from authentik.policies.exceptions import PolicyException
|
||||||
from authentik.policies.geoip.exceptions import GeoIPNotFoundException
|
from authentik.policies.geoip.exceptions import GeoIPNotFoundException
|
||||||
from authentik.policies.models import Policy
|
from authentik.policies.models import Policy
|
||||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||||
|
|
||||||
|
MAX_DISTANCE_HOUR_KM = 1000
|
||||||
|
|
||||||
|
|
||||||
class GeoIPPolicy(Policy):
|
class GeoIPPolicy(Policy):
|
||||||
"""Ensure the user satisfies requirements of geography or network topology, based on IP
|
"""Ensure the user satisfies requirements of geography or network topology, based on IP
|
||||||
@ -21,6 +27,15 @@ class GeoIPPolicy(Policy):
|
|||||||
asns = ArrayField(models.IntegerField(), blank=True, default=list)
|
asns = ArrayField(models.IntegerField(), blank=True, default=list)
|
||||||
countries = CountryField(multiple=True, blank=True)
|
countries = CountryField(multiple=True, blank=True)
|
||||||
|
|
||||||
|
distance_tolerance_km = models.PositiveIntegerField(default=50)
|
||||||
|
|
||||||
|
check_history_distance = models.BooleanField(default=False)
|
||||||
|
history_max_distance_km = models.PositiveBigIntegerField(default=100)
|
||||||
|
history_login_count = models.PositiveIntegerField(default=5)
|
||||||
|
|
||||||
|
check_impossible_travel = models.BooleanField(default=False)
|
||||||
|
impossible_tolerance_km = models.PositiveIntegerField(default=100)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[BaseSerializer]:
|
def serializer(self) -> type[BaseSerializer]:
|
||||||
from authentik.policies.geoip.api import GeoIPPolicySerializer
|
from authentik.policies.geoip.api import GeoIPPolicySerializer
|
||||||
@ -37,21 +52,27 @@ class GeoIPPolicy(Policy):
|
|||||||
- the client IP is advertised by an autonomous system with ASN in the `asns`
|
- the client IP is advertised by an autonomous system with ASN in the `asns`
|
||||||
- the client IP is geolocated in a country of `countries`
|
- the client IP is geolocated in a country of `countries`
|
||||||
"""
|
"""
|
||||||
results: list[PolicyResult] = []
|
static_results: list[PolicyResult] = []
|
||||||
|
dynamic_results: list[PolicyResult] = []
|
||||||
|
|
||||||
if self.asns:
|
if self.asns:
|
||||||
results.append(self.passes_asn(request))
|
static_results.append(self.passes_asn(request))
|
||||||
if self.countries:
|
if self.countries:
|
||||||
results.append(self.passes_country(request))
|
static_results.append(self.passes_country(request))
|
||||||
|
|
||||||
if not results:
|
if self.check_history_distance or self.check_impossible_travel:
|
||||||
|
dynamic_results.append(self.passes_distance(request))
|
||||||
|
|
||||||
|
if not static_results and not dynamic_results:
|
||||||
return PolicyResult(True)
|
return PolicyResult(True)
|
||||||
|
|
||||||
passing = any(r.passing for r in results)
|
passing = any(r.passing for r in static_results) and all(r.passing for r in dynamic_results)
|
||||||
messages = chain(*[r.messages for r in results])
|
messages = chain(
|
||||||
|
*[r.messages for r in static_results], *[r.messages for r in dynamic_results]
|
||||||
|
)
|
||||||
|
|
||||||
result = PolicyResult(passing, *messages)
|
result = PolicyResult(passing, *messages)
|
||||||
result.source_results = results
|
result.source_results = list(chain(static_results, dynamic_results))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -73,7 +94,7 @@ class GeoIPPolicy(Policy):
|
|||||||
|
|
||||||
def passes_country(self, request: PolicyRequest) -> PolicyResult:
|
def passes_country(self, request: PolicyRequest) -> PolicyResult:
|
||||||
# This is not a single get chain because `request.context` can contain `{ "geoip": None }`.
|
# This is not a single get chain because `request.context` can contain `{ "geoip": None }`.
|
||||||
geoip_data = request.context.get("geoip")
|
geoip_data: GeoIPDict | None = request.context.get("geoip")
|
||||||
country = geoip_data.get("country") if geoip_data else None
|
country = geoip_data.get("country") if geoip_data else None
|
||||||
|
|
||||||
if not country:
|
if not country:
|
||||||
@ -87,6 +108,42 @@ class GeoIPPolicy(Policy):
|
|||||||
|
|
||||||
return PolicyResult(True)
|
return PolicyResult(True)
|
||||||
|
|
||||||
|
def passes_distance(self, request: PolicyRequest) -> PolicyResult:
|
||||||
|
"""Check if current policy execution is out of distance range compared
|
||||||
|
to previous authentication requests"""
|
||||||
|
# Get previous login event and GeoIP data
|
||||||
|
previous_logins = Event.objects.filter(
|
||||||
|
action=EventAction.LOGIN, user__pk=request.user.pk, context__geo__isnull=False
|
||||||
|
).order_by("-created")[: self.history_login_count]
|
||||||
|
_now = now()
|
||||||
|
geoip_data: GeoIPDict | None = request.context.get("geoip")
|
||||||
|
if not geoip_data:
|
||||||
|
return PolicyResult(False)
|
||||||
|
for previous_login in previous_logins:
|
||||||
|
previous_login_geoip: GeoIPDict = previous_login.context["geo"]
|
||||||
|
|
||||||
|
# Figure out distance
|
||||||
|
dist = distance.geodesic(
|
||||||
|
(previous_login_geoip["lat"], previous_login_geoip["long"]),
|
||||||
|
(geoip_data["lat"], geoip_data["long"]),
|
||||||
|
)
|
||||||
|
if self.check_history_distance and dist.km >= (
|
||||||
|
self.history_max_distance_km + self.distance_tolerance_km
|
||||||
|
):
|
||||||
|
return PolicyResult(
|
||||||
|
False, _("Distance from previous authentication is larger than threshold.")
|
||||||
|
)
|
||||||
|
# Check if distance between `previous_login` and now is more
|
||||||
|
# than max distance per hour times the amount of hours since the previous login
|
||||||
|
# (round down to the lowest closest time of hours)
|
||||||
|
# clamped to be at least 1 hour
|
||||||
|
rel_time_hours = max(int((_now - previous_login.created).total_seconds() / 3600), 1)
|
||||||
|
if self.check_impossible_travel and dist.km >= (
|
||||||
|
(MAX_DISTANCE_HOUR_KM * rel_time_hours) + self.distance_tolerance_km
|
||||||
|
):
|
||||||
|
return PolicyResult(False, _("Distance is further than possible."))
|
||||||
|
return PolicyResult(True)
|
||||||
|
|
||||||
class Meta(Policy.PolicyMeta):
|
class Meta(Policy.PolicyMeta):
|
||||||
verbose_name = _("GeoIP Policy")
|
verbose_name = _("GeoIP Policy")
|
||||||
verbose_name_plural = _("GeoIP Policies")
|
verbose_name_plural = _("GeoIP Policies")
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
"""geoip policy tests"""
|
"""geoip policy tests"""
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from guardian.shortcuts import get_anonymous_user
|
|
||||||
|
|
||||||
|
from authentik.core.tests.utils import create_test_user
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.events.utils import get_user
|
||||||
from authentik.policies.engine import PolicyRequest, PolicyResult
|
from authentik.policies.engine import PolicyRequest, PolicyResult
|
||||||
from authentik.policies.exceptions import PolicyException
|
from authentik.policies.exceptions import PolicyException
|
||||||
from authentik.policies.geoip.exceptions import GeoIPNotFoundException
|
from authentik.policies.geoip.exceptions import GeoIPNotFoundException
|
||||||
@ -14,8 +16,8 @@ class TestGeoIPPolicy(TestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super().setUp()
|
super().setUp()
|
||||||
|
self.user = create_test_user()
|
||||||
self.request = PolicyRequest(get_anonymous_user())
|
self.request = PolicyRequest(self.user)
|
||||||
|
|
||||||
self.context_disabled_geoip = {}
|
self.context_disabled_geoip = {}
|
||||||
self.context_unknown_ip = {"asn": None, "geoip": None}
|
self.context_unknown_ip = {"asn": None, "geoip": None}
|
||||||
@ -126,3 +128,70 @@ class TestGeoIPPolicy(TestCase):
|
|||||||
result: PolicyResult = policy.passes(self.request)
|
result: PolicyResult = policy.passes(self.request)
|
||||||
|
|
||||||
self.assertTrue(result.passing)
|
self.assertTrue(result.passing)
|
||||||
|
|
||||||
|
def test_history(self):
|
||||||
|
"""Test history checks"""
|
||||||
|
Event.objects.create(
|
||||||
|
action=EventAction.LOGIN,
|
||||||
|
user=get_user(self.user),
|
||||||
|
context={
|
||||||
|
# Random location in Canada
|
||||||
|
"geo": {"lat": 55.868351, "long": -104.441011},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
# Random location in Poland
|
||||||
|
self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679}
|
||||||
|
|
||||||
|
policy = GeoIPPolicy.objects.create(check_history_distance=True)
|
||||||
|
|
||||||
|
result: PolicyResult = policy.passes(self.request)
|
||||||
|
self.assertFalse(result.passing)
|
||||||
|
|
||||||
|
def test_history_no_data(self):
|
||||||
|
"""Test history checks (with no geoip data in context)"""
|
||||||
|
Event.objects.create(
|
||||||
|
action=EventAction.LOGIN,
|
||||||
|
user=get_user(self.user),
|
||||||
|
context={
|
||||||
|
# Random location in Canada
|
||||||
|
"geo": {"lat": 55.868351, "long": -104.441011},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
policy = GeoIPPolicy.objects.create(check_history_distance=True)
|
||||||
|
|
||||||
|
result: PolicyResult = policy.passes(self.request)
|
||||||
|
self.assertFalse(result.passing)
|
||||||
|
|
||||||
|
def test_history_impossible_travel(self):
|
||||||
|
"""Test history checks"""
|
||||||
|
Event.objects.create(
|
||||||
|
action=EventAction.LOGIN,
|
||||||
|
user=get_user(self.user),
|
||||||
|
context={
|
||||||
|
# Random location in Canada
|
||||||
|
"geo": {"lat": 55.868351, "long": -104.441011},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
# Random location in Poland
|
||||||
|
self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679}
|
||||||
|
|
||||||
|
policy = GeoIPPolicy.objects.create(check_impossible_travel=True)
|
||||||
|
|
||||||
|
result: PolicyResult = policy.passes(self.request)
|
||||||
|
self.assertFalse(result.passing)
|
||||||
|
|
||||||
|
def test_history_no_geoip(self):
|
||||||
|
"""Test history checks (previous login with no geoip data)"""
|
||||||
|
Event.objects.create(
|
||||||
|
action=EventAction.LOGIN,
|
||||||
|
user=get_user(self.user),
|
||||||
|
context={},
|
||||||
|
)
|
||||||
|
# Random location in Poland
|
||||||
|
self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679}
|
||||||
|
|
||||||
|
policy = GeoIPPolicy.objects.create(check_history_distance=True)
|
||||||
|
|
||||||
|
result: PolicyResult = policy.passes(self.request)
|
||||||
|
self.assertFalse(result.passing)
|
||||||
|
@ -148,10 +148,10 @@ class PasswordPolicy(Policy):
|
|||||||
user_inputs.append(request.user.email)
|
user_inputs.append(request.user.email)
|
||||||
if request.http_request:
|
if request.http_request:
|
||||||
user_inputs.append(request.http_request.brand.branding_title)
|
user_inputs.append(request.http_request.brand.branding_title)
|
||||||
# Only calculate result for the first 100 characters, as with over 100 char
|
# Only calculate result for the first 72 characters, as with over 100 char
|
||||||
# long passwords we can be reasonably sure that they'll surpass the score anyways
|
# long passwords we can be reasonably sure that they'll surpass the score anyways
|
||||||
# See https://github.com/dropbox/zxcvbn#runtime-latency
|
# See https://github.com/dropbox/zxcvbn#runtime-latency
|
||||||
results = zxcvbn(password[:100], user_inputs)
|
results = zxcvbn(password[:72], user_inputs)
|
||||||
LOGGER.debug("password failed", check="zxcvbn", score=results["score"])
|
LOGGER.debug("password failed", check="zxcvbn", score=results["score"])
|
||||||
result = PolicyResult(results["score"] > self.zxcvbn_score_threshold)
|
result = PolicyResult(results["score"] > self.zxcvbn_score_threshold)
|
||||||
if not result.passing:
|
if not result.passing:
|
||||||
|
@ -281,7 +281,6 @@ class OAuth2Provider(WebfingerProvider, Provider):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
return request.build_absolute_uri(url)
|
return request.build_absolute_uri(url)
|
||||||
|
|
||||||
except Provider.application.RelatedObjectDoesNotExist:
|
except Provider.application.RelatedObjectDoesNotExist:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
from django.contrib.auth.signals import user_logged_out
|
from django.contrib.auth.signals import user_logged_out
|
||||||
|
from django.db.models.signals import post_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.providers.oauth2.models import AccessToken
|
from authentik.providers.oauth2.models import AccessToken, DeviceToken, RefreshToken
|
||||||
|
|
||||||
|
|
||||||
@receiver(user_logged_out)
|
@receiver(user_logged_out)
|
||||||
@ -12,3 +13,13 @@ def user_logged_out_oauth_access_token(sender, request: HttpRequest, user: User,
|
|||||||
if not request.session or not request.session.session_key:
|
if not request.session or not request.session.session_key:
|
||||||
return
|
return
|
||||||
AccessToken.objects.filter(user=user, session__session_key=request.session.session_key).delete()
|
AccessToken.objects.filter(user=user, session__session_key=request.session.session_key).delete()
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_save, sender=User)
|
||||||
|
def user_deactivated(sender, instance: User, **_):
|
||||||
|
"""Remove user tokens when deactivated"""
|
||||||
|
if instance.is_active:
|
||||||
|
return
|
||||||
|
AccessToken.objects.filter(session__user=instance).delete()
|
||||||
|
RefreshToken.objects.filter(session__user=instance).delete()
|
||||||
|
DeviceToken.objects.filter(session__user=instance).delete()
|
||||||
|
@ -150,6 +150,7 @@ class TestToken(OAuthTestCase):
|
|||||||
"id_token": provider.encode(
|
"id_token": provider.encode(
|
||||||
access.id_token.to_dict(),
|
access.id_token.to_dict(),
|
||||||
),
|
),
|
||||||
|
"scope": "",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.validate_jwt(access, provider)
|
self.validate_jwt(access, provider)
|
||||||
@ -242,6 +243,7 @@ class TestToken(OAuthTestCase):
|
|||||||
"id_token": provider.encode(
|
"id_token": provider.encode(
|
||||||
access.id_token.to_dict(),
|
access.id_token.to_dict(),
|
||||||
),
|
),
|
||||||
|
"scope": "offline_access",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.validate_jwt(access, provider)
|
self.validate_jwt(access, provider)
|
||||||
@ -301,6 +303,7 @@ class TestToken(OAuthTestCase):
|
|||||||
"id_token": provider.encode(
|
"id_token": provider.encode(
|
||||||
access.id_token.to_dict(),
|
access.id_token.to_dict(),
|
||||||
),
|
),
|
||||||
|
"scope": "offline_access",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -499,11 +499,11 @@ class OAuthFulfillmentStage(StageView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
challenge.is_valid()
|
challenge.is_valid()
|
||||||
|
self.executor.stage_ok()
|
||||||
return HttpChallengeResponse(
|
return HttpChallengeResponse(
|
||||||
challenge=challenge,
|
challenge=challenge,
|
||||||
)
|
)
|
||||||
|
self.executor.stage_ok()
|
||||||
return HttpResponseRedirectScheme(uri, allowed_schemes=[parsed.scheme])
|
return HttpResponseRedirectScheme(uri, allowed_schemes=[parsed.scheme])
|
||||||
|
|
||||||
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
|
@ -64,7 +64,8 @@ def to_base64url_uint(val: int, min_length: int = 0) -> bytes:
|
|||||||
class JWKSView(View):
|
class JWKSView(View):
|
||||||
"""Show RSA Key data for Provider"""
|
"""Show RSA Key data for Provider"""
|
||||||
|
|
||||||
def get_jwk_for_key(self, key: CertificateKeyPair, use: str) -> dict | None:
|
@staticmethod
|
||||||
|
def get_jwk_for_key(key: CertificateKeyPair, use: str) -> dict | None:
|
||||||
"""Convert a certificate-key pair into JWK"""
|
"""Convert a certificate-key pair into JWK"""
|
||||||
private_key = key.private_key
|
private_key = key.private_key
|
||||||
key_data = None
|
key_data = None
|
||||||
@ -123,12 +124,12 @@ class JWKSView(View):
|
|||||||
response_data = {}
|
response_data = {}
|
||||||
|
|
||||||
if signing_key := provider.signing_key:
|
if signing_key := provider.signing_key:
|
||||||
jwk = self.get_jwk_for_key(signing_key, "sig")
|
jwk = JWKSView.get_jwk_for_key(signing_key, "sig")
|
||||||
if jwk:
|
if jwk:
|
||||||
response_data.setdefault("keys", [])
|
response_data.setdefault("keys", [])
|
||||||
response_data["keys"].append(jwk)
|
response_data["keys"].append(jwk)
|
||||||
if encryption_key := provider.encryption_key:
|
if encryption_key := provider.encryption_key:
|
||||||
jwk = self.get_jwk_for_key(encryption_key, "enc")
|
jwk = JWKSView.get_jwk_for_key(encryption_key, "enc")
|
||||||
if jwk:
|
if jwk:
|
||||||
response_data.setdefault("keys", [])
|
response_data.setdefault("keys", [])
|
||||||
response_data["keys"].append(jwk)
|
response_data["keys"].append(jwk)
|
||||||
|
@ -627,6 +627,7 @@ class TokenView(View):
|
|||||||
response = {
|
response = {
|
||||||
"access_token": access_token.token,
|
"access_token": access_token.token,
|
||||||
"token_type": TOKEN_TYPE,
|
"token_type": TOKEN_TYPE,
|
||||||
|
"scope": " ".join(access_token.scope),
|
||||||
"expires_in": int(
|
"expires_in": int(
|
||||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||||
),
|
),
|
||||||
@ -710,6 +711,7 @@ class TokenView(View):
|
|||||||
"access_token": access_token.token,
|
"access_token": access_token.token,
|
||||||
"refresh_token": refresh_token.token,
|
"refresh_token": refresh_token.token,
|
||||||
"token_type": TOKEN_TYPE,
|
"token_type": TOKEN_TYPE,
|
||||||
|
"scope": " ".join(access_token.scope),
|
||||||
"expires_in": int(
|
"expires_in": int(
|
||||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||||
),
|
),
|
||||||
@ -736,6 +738,7 @@ class TokenView(View):
|
|||||||
return {
|
return {
|
||||||
"access_token": access_token.token,
|
"access_token": access_token.token,
|
||||||
"token_type": TOKEN_TYPE,
|
"token_type": TOKEN_TYPE,
|
||||||
|
"scope": " ".join(access_token.scope),
|
||||||
"expires_in": int(
|
"expires_in": int(
|
||||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||||
),
|
),
|
||||||
@ -767,6 +770,7 @@ class TokenView(View):
|
|||||||
response = {
|
response = {
|
||||||
"access_token": access_token.token,
|
"access_token": access_token.token,
|
||||||
"token_type": TOKEN_TYPE,
|
"token_type": TOKEN_TYPE,
|
||||||
|
"scope": " ".join(access_token.scope),
|
||||||
"expires_in": int(
|
"expires_in": int(
|
||||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||||
),
|
),
|
||||||
|
0
authentik/providers/rac/__init__.py
Normal file
0
authentik/providers/rac/__init__.py
Normal file
0
authentik/providers/rac/api/__init__.py
Normal file
0
authentik/providers/rac/api/__init__.py
Normal file
@ -6,13 +6,12 @@ from rest_framework.viewsets import GenericViewSet
|
|||||||
from authentik.core.api.groups import GroupMemberSerializer
|
from authentik.core.api.groups import GroupMemberSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import ModelSerializer
|
from authentik.core.api.utils import ModelSerializer
|
||||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
from authentik.providers.rac.api.endpoints import EndpointSerializer
|
||||||
from authentik.enterprise.providers.rac.api.endpoints import EndpointSerializer
|
from authentik.providers.rac.api.providers import RACProviderSerializer
|
||||||
from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer
|
from authentik.providers.rac.models import ConnectionToken
|
||||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
|
||||||
|
|
||||||
|
|
||||||
class ConnectionTokenSerializer(EnterpriseRequiredMixin, ModelSerializer):
|
class ConnectionTokenSerializer(ModelSerializer):
|
||||||
"""ConnectionToken Serializer"""
|
"""ConnectionToken Serializer"""
|
||||||
|
|
||||||
provider_obj = RACProviderSerializer(source="provider", read_only=True)
|
provider_obj = RACProviderSerializer(source="provider", read_only=True)
|
@ -14,10 +14,9 @@ from structlog.stdlib import get_logger
|
|||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import ModelSerializer
|
from authentik.core.api.utils import ModelSerializer
|
||||||
from authentik.core.models import Provider
|
from authentik.core.models import Provider
|
||||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
|
||||||
from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer
|
|
||||||
from authentik.enterprise.providers.rac.models import Endpoint
|
|
||||||
from authentik.policies.engine import PolicyEngine
|
from authentik.policies.engine import PolicyEngine
|
||||||
|
from authentik.providers.rac.api.providers import RACProviderSerializer
|
||||||
|
from authentik.providers.rac.models import Endpoint
|
||||||
from authentik.rbac.filters import ObjectFilter
|
from authentik.rbac.filters import ObjectFilter
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -28,7 +27,7 @@ def user_endpoint_cache_key(user_pk: str) -> str:
|
|||||||
return f"goauthentik.io/providers/rac/endpoint_access/{user_pk}"
|
return f"goauthentik.io/providers/rac/endpoint_access/{user_pk}"
|
||||||
|
|
||||||
|
|
||||||
class EndpointSerializer(EnterpriseRequiredMixin, ModelSerializer):
|
class EndpointSerializer(ModelSerializer):
|
||||||
"""Endpoint Serializer"""
|
"""Endpoint Serializer"""
|
||||||
|
|
||||||
provider_obj = RACProviderSerializer(source="provider", read_only=True)
|
provider_obj = RACProviderSerializer(source="provider", read_only=True)
|
@ -10,7 +10,7 @@ from rest_framework.viewsets import ModelViewSet
|
|||||||
from authentik.core.api.property_mappings import PropertyMappingSerializer
|
from authentik.core.api.property_mappings import PropertyMappingSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import JSONDictField
|
from authentik.core.api.utils import JSONDictField
|
||||||
from authentik.enterprise.providers.rac.models import RACPropertyMapping
|
from authentik.providers.rac.models import RACPropertyMapping
|
||||||
|
|
||||||
|
|
||||||
class RACPropertyMappingSerializer(PropertyMappingSerializer):
|
class RACPropertyMappingSerializer(PropertyMappingSerializer):
|
@ -5,11 +5,10 @@ from rest_framework.viewsets import ModelViewSet
|
|||||||
|
|
||||||
from authentik.core.api.providers import ProviderSerializer
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
from authentik.providers.rac.models import RACProvider
|
||||||
from authentik.enterprise.providers.rac.models import RACProvider
|
|
||||||
|
|
||||||
|
|
||||||
class RACProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
class RACProviderSerializer(ProviderSerializer):
|
||||||
"""RACProvider Serializer"""
|
"""RACProvider Serializer"""
|
||||||
|
|
||||||
outpost_set = ListField(child=CharField(), read_only=True, source="outpost_set.all")
|
outpost_set = ListField(child=CharField(), read_only=True, source="outpost_set.all")
|
14
authentik/providers/rac/apps.py
Normal file
14
authentik/providers/rac/apps.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
"""RAC app config"""
|
||||||
|
|
||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AuthentikProviderRAC(AppConfig):
|
||||||
|
"""authentik rac app config"""
|
||||||
|
|
||||||
|
name = "authentik.providers.rac"
|
||||||
|
label = "authentik_providers_rac"
|
||||||
|
verbose_name = "authentik Providers.RAC"
|
||||||
|
default = True
|
||||||
|
mountpoint = ""
|
||||||
|
ws_mountpoint = "authentik.providers.rac.urls"
|
@ -7,22 +7,22 @@ from channels.generic.websocket import AsyncWebsocketConsumer
|
|||||||
from django.http.request import QueryDict
|
from django.http.request import QueryDict
|
||||||
from structlog.stdlib import BoundLogger, get_logger
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
from authentik.enterprise.providers.rac.models import ConnectionToken, RACProvider
|
|
||||||
from authentik.outposts.consumer import OUTPOST_GROUP_INSTANCE
|
from authentik.outposts.consumer import OUTPOST_GROUP_INSTANCE
|
||||||
from authentik.outposts.models import Outpost, OutpostState, OutpostType
|
from authentik.outposts.models import Outpost, OutpostState, OutpostType
|
||||||
|
from authentik.providers.rac.models import ConnectionToken, RACProvider
|
||||||
|
|
||||||
# Global broadcast group, which messages are sent to when the outpost connects back
|
# Global broadcast group, which messages are sent to when the outpost connects back
|
||||||
# to authentik for a specific connection
|
# to authentik for a specific connection
|
||||||
# The `RACClientConsumer` consumer adds itself to this group on connection,
|
# The `RACClientConsumer` consumer adds itself to this group on connection,
|
||||||
# and removes itself once it has been assigned a specific outpost channel
|
# and removes itself once it has been assigned a specific outpost channel
|
||||||
RAC_CLIENT_GROUP = "group_enterprise_rac_client"
|
RAC_CLIENT_GROUP = "group_rac_client"
|
||||||
# A group for all connections in a given authentik session ID
|
# A group for all connections in a given authentik session ID
|
||||||
# A disconnect message is sent to this group when the session expires/is deleted
|
# A disconnect message is sent to this group when the session expires/is deleted
|
||||||
RAC_CLIENT_GROUP_SESSION = "group_enterprise_rac_client_%(session)s"
|
RAC_CLIENT_GROUP_SESSION = "group_rac_client_%(session)s"
|
||||||
# A group for all connections with a specific token, which in almost all cases
|
# A group for all connections with a specific token, which in almost all cases
|
||||||
# is just one connection, however this is used to disconnect the connection
|
# is just one connection, however this is used to disconnect the connection
|
||||||
# when the token is deleted
|
# when the token is deleted
|
||||||
RAC_CLIENT_GROUP_TOKEN = "group_enterprise_rac_token_%(token)s" # nosec
|
RAC_CLIENT_GROUP_TOKEN = "group_rac_token_%(token)s" # nosec
|
||||||
|
|
||||||
# Step 1: Client connects to this websocket endpoint
|
# Step 1: Client connects to this websocket endpoint
|
||||||
# Step 2: We prepare all the connection args for Guac
|
# Step 2: We prepare all the connection args for Guac
|
@ -3,7 +3,7 @@
|
|||||||
from channels.exceptions import ChannelFull
|
from channels.exceptions import ChannelFull
|
||||||
from channels.generic.websocket import AsyncWebsocketConsumer
|
from channels.generic.websocket import AsyncWebsocketConsumer
|
||||||
|
|
||||||
from authentik.enterprise.providers.rac.consumer_client import RAC_CLIENT_GROUP
|
from authentik.providers.rac.consumer_client import RAC_CLIENT_GROUP
|
||||||
|
|
||||||
|
|
||||||
class RACOutpostConsumer(AsyncWebsocketConsumer):
|
class RACOutpostConsumer(AsyncWebsocketConsumer):
|
0
authentik/providers/rac/controllers/__init__.py
Normal file
0
authentik/providers/rac/controllers/__init__.py
Normal file
0
authentik/providers/rac/migrations/__init__.py
Normal file
0
authentik/providers/rac/migrations/__init__.py
Normal file
@ -74,7 +74,7 @@ class RACProvider(Provider):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer
|
from authentik.providers.rac.api.providers import RACProviderSerializer
|
||||||
|
|
||||||
return RACProviderSerializer
|
return RACProviderSerializer
|
||||||
|
|
||||||
@ -100,7 +100,7 @@ class Endpoint(SerializerModel, PolicyBindingModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
from authentik.enterprise.providers.rac.api.endpoints import EndpointSerializer
|
from authentik.providers.rac.api.endpoints import EndpointSerializer
|
||||||
|
|
||||||
return EndpointSerializer
|
return EndpointSerializer
|
||||||
|
|
||||||
@ -129,7 +129,7 @@ class RACPropertyMapping(PropertyMapping):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
from authentik.enterprise.providers.rac.api.property_mappings import (
|
from authentik.providers.rac.api.property_mappings import (
|
||||||
RACPropertyMappingSerializer,
|
RACPropertyMappingSerializer,
|
||||||
)
|
)
|
||||||
|
|
@ -10,12 +10,12 @@ from django.dispatch import receiver
|
|||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.enterprise.providers.rac.api.endpoints import user_endpoint_cache_key
|
from authentik.providers.rac.api.endpoints import user_endpoint_cache_key
|
||||||
from authentik.enterprise.providers.rac.consumer_client import (
|
from authentik.providers.rac.consumer_client import (
|
||||||
RAC_CLIENT_GROUP_SESSION,
|
RAC_CLIENT_GROUP_SESSION,
|
||||||
RAC_CLIENT_GROUP_TOKEN,
|
RAC_CLIENT_GROUP_TOKEN,
|
||||||
)
|
)
|
||||||
from authentik.enterprise.providers.rac.models import ConnectionToken, Endpoint
|
from authentik.providers.rac.models import ConnectionToken, Endpoint
|
||||||
|
|
||||||
|
|
||||||
@receiver(user_logged_out)
|
@receiver(user_logged_out)
|
@ -3,7 +3,7 @@
|
|||||||
{% load authentik_core %}
|
{% load authentik_core %}
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
<script src="{% versioned_script 'dist/enterprise/rac/index-%v.js' %}" type="module"></script>
|
<script src="{% versioned_script 'dist/rac/index-%v.js' %}" type="module"></script>
|
||||||
<meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)">
|
<meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)">
|
||||||
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)">
|
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)">
|
||||||
<link rel="icon" href="{{ tenant.branding_favicon_url }}">
|
<link rel="icon" href="{{ tenant.branding_favicon_url }}">
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user