Compare commits
453 Commits
version/20
...
version/20
Author | SHA1 | Date | |
---|---|---|---|
3041a30193 | |||
1e28a1e311 | |||
5a1b912b76 | |||
464c27ef17 | |||
a745022f06 | |||
0b34f70205 | |||
a4b051fcc1 | |||
5ff3e9b418 | |||
8ae7403abc | |||
f6e1bfdfc8 | |||
aca3a5c458 | |||
d16c24fd53 | |||
6a8be0dc71 | |||
81b9b37e5e | |||
22b01962fb | |||
86cc99be35 | |||
416f917c4a | |||
f77bece790 | |||
a8dd846437 | |||
4c50769040 | |||
34189fcc06 | |||
fb5c8f3d7f | |||
049a55a761 | |||
4cd53f3d11 | |||
0d0dcf8de0 | |||
8cd1223081 | |||
1b4654bb1d | |||
0a3fade1fd | |||
ff64814f40 | |||
cbeb6e58ac | |||
285a9b8b1d | |||
66bfa6879d | |||
c05240afbf | |||
7370dd5f3f | |||
477c8b099e | |||
2c761da883 | |||
75070232b1 | |||
690b35e1a3 | |||
bd67f2362f | |||
896e5adce2 | |||
7f25b6311d | |||
253f345fc4 | |||
a3abbcec6a | |||
70e000d327 | |||
a7467e6740 | |||
b3da94bbb8 | |||
e62f5a75e4 | |||
39ad9d7c9d | |||
20d09c14b2 | |||
3a4d514bae | |||
4932846e14 | |||
bb62aa7c7f | |||
907b837301 | |||
b60a3d45dc | |||
3f5585ca84 | |||
ba9a4efc9b | |||
902378af53 | |||
2352a7f4d6 | |||
d89266a9d2 | |||
d678d33756 | |||
49d0ccd9c7 | |||
ea082ed9ef | |||
d62fc9766c | |||
983747b13b | |||
de4710ea71 | |||
d55b31dd82 | |||
d87871f806 | |||
148194e12b | |||
a2c587be43 | |||
673da2a96e | |||
a9a7b26264 | |||
83d2c442a5 | |||
4029e19b72 | |||
538a466090 | |||
322a343c81 | |||
6ddd6bfa72 | |||
36de302250 | |||
9eb13c50e9 | |||
cffc6a1b88 | |||
ba437beacc | |||
da32b05eba | |||
45b7e7565d | |||
a0b63f50bf | |||
dc5d571c99 | |||
05161db458 | |||
311ffa9f79 | |||
7cbe33d65d | |||
be9ca48de0 | |||
b3159a74e5 | |||
89fafff0af | |||
ae77c872a0 | |||
5f13563e03 | |||
e17c9040bb | |||
280ef3d265 | |||
a5bb583268 | |||
212ff11b6d | |||
1fa9d70945 | |||
eeeaa9317b | |||
09b932100f | |||
aa701c5725 | |||
6f98833150 | |||
30aa24ce6e | |||
a426a1a0b6 | |||
061c549a40 | |||
efa09d5e1d | |||
4fe0bd4b6c | |||
7c2decf5ec | |||
7f39399c32 | |||
7fd78a591d | |||
bdb84b7a8f | |||
84e9748340 | |||
7dfc621ae4 | |||
cd0a6f2d7c | |||
b7835a751b | |||
fd197ceee7 | |||
be5c8341d2 | |||
2036827f04 | |||
35665d248e | |||
bc30b41157 | |||
2af7fab42c | |||
4de205809b | |||
e8433472fd | |||
3896299312 | |||
5cfbb0993a | |||
a62e3557ac | |||
626936636a | |||
85ec713213 | |||
406bbdcfc9 | |||
02f87032cc | |||
b7a929d304 | |||
3c0cc27ea1 | |||
ec254d5927 | |||
92ba77e9e5 | |||
7ddb459030 | |||
076e89b600 | |||
ba5fa2a04f | |||
90fe1c2ce8 | |||
85f88e785f | |||
a7c4f81275 | |||
396fbc4a76 | |||
2dcd0128aa | |||
e5aa9e0774 | |||
53d78d561b | |||
93001d1329 | |||
40428f5a82 | |||
007838fcf2 | |||
5e03b27348 | |||
7c51afa36c | |||
38fd5c5614 | |||
7e3148fab5 | |||
948db46406 | |||
cccddd8c69 | |||
adc4cd9c0d | |||
abed254ca1 | |||
edfab0995f | |||
528dedf99d | |||
5d7eec3049 | |||
ad44567ebe | |||
ac82002339 | |||
df92111296 | |||
da8417a141 | |||
7f32355e3e | |||
5afe88a605 | |||
320dab3425 | |||
ca44f8bd60 | |||
5fd408ca82 | |||
becb9e34b5 | |||
4917ab9985 | |||
bd92505bc2 | |||
30033d1f90 | |||
3e5dfcbd0f | |||
bf0141acc6 | |||
0c8d513567 | |||
d07704fdf1 | |||
086a8753c0 | |||
ae7a6e2fd6 | |||
6a4ddcaba7 | |||
2c9b596f01 | |||
7257108091 | |||
91f7b289cc | |||
77a507d2f8 | |||
3e60e956f4 | |||
84ec70c2a2 | |||
72846f0ae1 | |||
dd53e7e9b1 | |||
9df16a9ae0 | |||
3dc9e247d5 | |||
02dd44eeec | |||
2f78e14381 | |||
ef6f692526 | |||
2dd575874b | |||
84c2ebabaa | |||
3e26170f4b | |||
4709dca33c | |||
6064a481fb | |||
3979b0bde7 | |||
4280847bcc | |||
ade8644da6 | |||
3c3fd53999 | |||
7b823f23ae | |||
a67bea95d4 | |||
775e0ef2fa | |||
d102c59654 | |||
03448a9169 | |||
1e6c081e5c | |||
8b9ce4a745 | |||
2a0bd50e23 | |||
014d93d485 | |||
ff42663d3c | |||
ce49d7ea5b | |||
8429dd19b2 | |||
680b182d95 | |||
b2a832175e | |||
b3ce8331f5 | |||
ef0f618234 | |||
b8a7186a55 | |||
b39530f873 | |||
7937c84f2b | |||
621843c60c | |||
c19da839b1 | |||
fea1f3be6f | |||
6f5ec7838f | |||
94300492e7 | |||
5d3931c128 | |||
262a8b5ae8 | |||
fe069c5e55 | |||
c6e60c0ebc | |||
90b457c5ee | |||
5e724e4299 | |||
b4c8dd6b91 | |||
63d163cc65 | |||
2b1356bb91 | |||
ba9edd6c44 | |||
3b2b3262d7 | |||
5431e7fe9d | |||
7d9c74ce04 | |||
60c3cf890a | |||
4ec5df6b12 | |||
0403f6d373 | |||
b7f4d15a94 | |||
56450887ca | |||
9bd613a31d | |||
3fe0483dbf | |||
63a28ca1e9 | |||
2543b075be | |||
b8bdf7a035 | |||
a3ff7cea23 | |||
bb776c2710 | |||
c9ad87d419 | |||
0d81eaffff | |||
6930c84425 | |||
eaaeaccf5d | |||
efbbd0adcf | |||
c8d9771640 | |||
1554dc9feb | |||
1005f341e4 | |||
2b98637ca5 | |||
e3f7185564 | |||
d1198fc6c1 | |||
8cb5f8fbee | |||
31a58e2c25 | |||
229715acb2 | |||
fad5b09aee | |||
2a670afd02 | |||
b69248dd55 | |||
5ff5edf769 | |||
939889e0ec | |||
19ae6585dc | |||
a81c847392 | |||
c6ede78fba | |||
cea1289186 | |||
c297f28552 | |||
35b25bd76e | |||
64d7610b13 | |||
2c8fcff832 | |||
054e76d02a | |||
80fa132dd9 | |||
4c59c3abef | |||
22d319c0e7 | |||
89edd77484 | |||
04e52d8ba6 | |||
9b5e3921cb | |||
2bbad64dc3 | |||
f6026fdb13 | |||
49def45ca3 | |||
a4856969f4 | |||
2aa7266688 | |||
25817cae6b | |||
5383ae2c19 | |||
c0c246edab | |||
831b32c279 | |||
70ccc63702 | |||
de954250e5 | |||
f268bd4c69 | |||
57a48b6350 | |||
9aac114115 | |||
66e3cbdc46 | |||
2d76d23f7b | |||
4327b35bc3 | |||
f7047df40e | |||
ef77a4b64e | |||
5d7d21076f | |||
ede072889e | |||
9cb7e6c606 | |||
e7d36c095d | |||
b88eb430c1 | |||
641872a33a | |||
405c690193 | |||
932cf48d2b | |||
402819107d | |||
41f135126b | |||
591a339302 | |||
35f2c5d96a | |||
fe6963c428 | |||
19cac4bf43 | |||
4ca564490e | |||
fcb795c273 | |||
14c70b3e4a | |||
ac880c28d7 | |||
f3c6b9a4f6 | |||
cba0cf0d76 | |||
73b67cf0f0 | |||
23a8052cc8 | |||
57c49c3865 | |||
cbea51ae5b | |||
8962081d92 | |||
e743f13f81 | |||
b20a8b7c17 | |||
b53c94d76a | |||
d4419d66c1 | |||
79044368d2 | |||
426686957d | |||
28cb803fd9 | |||
b98895ac2c | |||
85c3a36b62 | |||
6dc38b0132 | |||
e154e28611 | |||
690b7be1d8 | |||
9ba8a715b1 | |||
358750f66e | |||
b9918529b8 | |||
a5673b4ec8 | |||
d9287d0c0e | |||
d9c2b64116 | |||
2b150d3077 | |||
dec7a9cfb9 | |||
e0f48a30b7 | |||
973f14d911 | |||
e8978adc1b | |||
3ca8d9c968 | |||
42636142fa | |||
57c459348f | |||
493b34cf0d | |||
f0493f418b | |||
d45a292652 | |||
b21ea360db | |||
6816f8b851 | |||
de714f0390 | |||
800df332b5 | |||
16c194d2dc | |||
53100a72fe | |||
ec4c3f44cb | |||
f10bd432b3 | |||
4de927ba5b | |||
74e578c2bf | |||
e584fd1344 | |||
0e02925a3d | |||
5b837c3ccc | |||
2580371f94 | |||
4e9be85353 | |||
79508e1965 | |||
3a88dde545 | |||
31fc4d1cb9 | |||
09cd8f8f63 | |||
d824b09365 | |||
cabbd18880 | |||
c9dda17c68 | |||
bb8559ee18 | |||
5ae32e525c | |||
0832145a01 | |||
4167276c8f | |||
afb84c7bc5 | |||
82b2c7e3f0 | |||
fc8004db2b | |||
ddfc943bba | |||
8c0c12292e | |||
803490d98b | |||
16835ab478 | |||
572b8d87b5 | |||
31d2ea65fd | |||
f4ac2f50e2 | |||
969a3f0ddd | |||
4e18f47f28 | |||
f10286edf8 | |||
d789dcc28f | |||
715a71427e | |||
84c21d16cf | |||
2e4e17adb7 | |||
00cbaaf672 | |||
74e4e8f6aa | |||
d78fda990a | |||
10d949f7a9 | |||
6661af032d | |||
fb5e4a3af8 | |||
1dfad83a34 | |||
70025c648c | |||
676b77aa7c | |||
e35e096266 | |||
7af12d4fec | |||
8d6db0fabf | |||
8ddcf99bf7 | |||
e25f6aea8c | |||
b1a9eda1d3 | |||
2c15ab9995 | |||
b3c51e426d | |||
71578af47f | |||
6c985acb36 | |||
d878d2140e | |||
4766d6ff3d | |||
3a64d97040 | |||
2275ba3add | |||
9f7c941426 | |||
34ae9e6dab | |||
bf683514ee | |||
9b58bdb447 | |||
4237f20ccd | |||
2408719a47 | |||
b33fef7929 | |||
73b9847e7d | |||
a7e4eb021d | |||
11306770ad | |||
5235e00d3c | |||
7834146efc | |||
d4379ecd31 | |||
7492608ace | |||
7eef501446 | |||
b73de96aa6 | |||
a7adeb917e | |||
4ee2f951da | |||
01c5235e82 | |||
0ce4f9fe12 | |||
2f4f951818 | |||
a6c214e8fa | |||
57f8b108c4 | |||
7c1fe1243f | |||
3f69dd34ba | |||
c81431895a | |||
560c979d26 | |||
c5cc8842ec | |||
2a881d241d | |||
6291834573 | |||
eeea36acea | |||
e95b9da586 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2021.6.1-rc1
|
||||
current_version = 2021.7.1-rc2
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
||||
@ -21,14 +21,14 @@ values =
|
||||
|
||||
[bumpversion:file:docker-compose.yml]
|
||||
|
||||
[bumpversion:file:schema.yml]
|
||||
|
||||
[bumpversion:file:.github/workflows/release.yml]
|
||||
|
||||
[bumpversion:file:authentik/__init__.py]
|
||||
|
||||
[bumpversion:file:internal/constants/constants.go]
|
||||
|
||||
[bumpversion:file:outpost/pkg/version.go]
|
||||
|
||||
[bumpversion:file:web/src/constants.ts]
|
||||
|
||||
[bumpversion:file:website/docs/outposts/manual-deploy-docker-compose.md]
|
||||
|
@ -1,6 +1,8 @@
|
||||
env
|
||||
helm
|
||||
static
|
||||
htmlcov
|
||||
*.env.yml
|
||||
**/node_modules
|
||||
dist/**
|
||||
build/**
|
||||
build_docs/**
|
||||
|
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@ -9,7 +9,7 @@ updates:
|
||||
assignees:
|
||||
- BeryJu
|
||||
- package-ecosystem: gomod
|
||||
directory: "/outpost"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
@ -48,11 +48,3 @@ updates:
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
- package-ecosystem: docker
|
||||
directory: "/outpost"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
|
1
.github/stale.yml
vendored
1
.github/stale.yml
vendored
@ -6,6 +6,7 @@ daysUntilClose: 7
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
- pr_wanted
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
|
62
.github/workflows/release.yml
vendored
62
.github/workflows/release.yml
vendored
@ -33,12 +33,21 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik:2021.6.1-rc1,
|
||||
beryju/authentik:2021.7.1-rc2,
|
||||
beryju/authentik:latest,
|
||||
ghcr.io/goauthentik/server:2021.6.1-rc1,
|
||||
ghcr.io/goauthentik/server:2021.7.1-rc2,
|
||||
ghcr.io/goauthentik/server:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.7.1-rc2', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik:latest
|
||||
docker tag beryju/authentik:latest beryju/authentik:stable
|
||||
docker push beryju/authentik:stable
|
||||
docker pull ghcr.io/goauthentik/server:latest
|
||||
docker tag ghcr.io/goauthentik/server:latest ghcr.io/goauthentik/server:stable
|
||||
docker push ghcr.io/goauthentik/server:stable
|
||||
build-proxy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@ -66,12 +75,21 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-proxy:2021.6.1-rc1,
|
||||
beryju/authentik-proxy:2021.7.1-rc2,
|
||||
beryju/authentik-proxy:latest,
|
||||
ghcr.io/goauthentik/proxy:2021.6.1-rc1,
|
||||
ghcr.io/goauthentik/proxy:2021.7.1-rc2,
|
||||
ghcr.io/goauthentik/proxy:latest
|
||||
file: outpost/proxy.Dockerfile
|
||||
file: proxy.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.7.1-rc2', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik-proxy:latest
|
||||
docker tag beryju/authentik-proxy:latest beryju/authentik-proxy:stable
|
||||
docker push beryju/authentik-proxy:stable
|
||||
docker pull ghcr.io/goauthentik/proxy:latest
|
||||
docker tag ghcr.io/goauthentik/proxy:latest ghcr.io/goauthentik/proxy:stable
|
||||
docker push ghcr.io/goauthentik/proxy:stable
|
||||
build-ldap:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@ -99,14 +117,22 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-ldap:2021.6.1-rc1,
|
||||
beryju/authentik-ldap:2021.7.1-rc2,
|
||||
beryju/authentik-ldap:latest,
|
||||
ghcr.io/goauthentik/ldap:2021.6.1-rc1,
|
||||
ghcr.io/goauthentik/ldap:2021.7.1-rc2,
|
||||
ghcr.io/goauthentik/ldap:latest
|
||||
file: outpost/ldap.Dockerfile
|
||||
file: ldap.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.7.1-rc2', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik-ldap:latest
|
||||
docker tag beryju/authentik-ldap:latest beryju/authentik-ldap:stable
|
||||
docker push beryju/authentik-ldap:stable
|
||||
docker pull ghcr.io/goauthentik/ldap:latest
|
||||
docker tag ghcr.io/goauthentik/ldap:latest ghcr.io/goauthentik/ldap:stable
|
||||
docker push ghcr.io/goauthentik/ldap:stable
|
||||
test-release:
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
needs:
|
||||
- build-server
|
||||
- build-proxy
|
||||
@ -122,7 +148,7 @@ jobs:
|
||||
docker-compose pull -q
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root --entrypoint /bin/bash server -c "apt-get update && apt-get install -y --no-install-recommends git && pip install --no-cache -r requirements-dev.txt && ./manage.py test authentik"
|
||||
docker-compose run -u root server test
|
||||
sentry-release:
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
needs:
|
||||
@ -130,13 +156,27 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2.3.0
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: Build web api client and web ui
|
||||
run: |
|
||||
export NODE_ENV=production
|
||||
make gen-web
|
||||
cd web
|
||||
npm i
|
||||
npm run build
|
||||
- name: Create a Sentry.io release
|
||||
uses: getsentry/action-release@v1
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: beryjuorg
|
||||
SENTRY_PROJECT: authentik
|
||||
SENTRY_URL: https://sentry.beryju.org
|
||||
with:
|
||||
version: authentik@2021.6.1-rc1
|
||||
version: authentik@2021.7.1-rc2
|
||||
environment: beryjuorg-prod
|
||||
sourcemaps: './web/dist'
|
||||
url_prefix: /static/dist
|
||||
|
4
.github/workflows/tag.yml
vendored
4
.github/workflows/tag.yml
vendored
@ -20,11 +20,11 @@ jobs:
|
||||
docker-compose pull -q
|
||||
docker build \
|
||||
--no-cache \
|
||||
-t beryju/authentik:latest \
|
||||
-t ghcr.io/goauthentik/server:latest \
|
||||
-f Dockerfile .
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root --entrypoint /bin/bash server -c "apt-get update && apt-get install -y --no-install-recommends git && pip install --no-cache -r requirements-dev.txt && ./manage.py test authentik"
|
||||
docker-compose run -u root server test
|
||||
- name: Extract version number
|
||||
id: get_version
|
||||
uses: actions/github-script@v4.0.2
|
||||
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -193,10 +193,6 @@ pip-selfcheck.json
|
||||
local.env.yml
|
||||
.vscode/
|
||||
|
||||
### Helm ###
|
||||
# Chart dependencies
|
||||
**/charts/*.tgz
|
||||
|
||||
# Selenium Screenshots
|
||||
selenium_screenshots/
|
||||
backups/
|
||||
@ -204,3 +200,4 @@ media/
|
||||
*mmdb
|
||||
|
||||
.idea/
|
||||
api/
|
||||
|
55
Dockerfile
55
Dockerfile
@ -8,10 +8,18 @@ WORKDIR /app/
|
||||
|
||||
RUN pip install pipenv && \
|
||||
pipenv lock -r > requirements.txt && \
|
||||
pipenv lock -rd > requirements-dev.txt
|
||||
pipenv lock -r --dev-only > requirements-dev.txt
|
||||
|
||||
# Stage 2: Build web API
|
||||
FROM openapitools/openapi-generator-cli as api-builder
|
||||
# Stage 2: Build website
|
||||
FROM node as website-builder
|
||||
|
||||
COPY ./website /static/
|
||||
|
||||
ENV NODE_ENV=production
|
||||
RUN cd /static && npm i && npm run build-docs-only
|
||||
|
||||
# Stage 3: Build web API
|
||||
FROM openapitools/openapi-generator-cli as web-api-builder
|
||||
|
||||
COPY ./schema.yml /local/schema.yml
|
||||
|
||||
@ -21,34 +29,52 @@ RUN docker-entrypoint.sh generate \
|
||||
-o /local/web/api \
|
||||
--additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=authentik-api,npmVersion=1.0.0
|
||||
|
||||
# Stage 3: Build webui
|
||||
FROM node as npm-builder
|
||||
# Stage 3: Generate API Client
|
||||
FROM openapitools/openapi-generator-cli as go-api-builder
|
||||
|
||||
COPY ./schema.yml /local/schema.yml
|
||||
|
||||
RUN docker-entrypoint.sh generate \
|
||||
--git-host goauthentik.io \
|
||||
--git-repo-id outpost \
|
||||
--git-user-id api \
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/api \
|
||||
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true && \
|
||||
rm -f /local/api/go.mod /local/api/go.sum
|
||||
|
||||
# Stage 4: Build webui
|
||||
FROM node as web-builder
|
||||
|
||||
COPY ./web /static/
|
||||
COPY --from=api-builder /local/web/api /static/api
|
||||
COPY --from=web-api-builder /local/web/api /static/api
|
||||
|
||||
ENV NODE_ENV=production
|
||||
RUN cd /static && npm i --production=false && npm run build
|
||||
RUN cd /static && npm i && npm run build
|
||||
|
||||
# Stage 4: Build go proxy
|
||||
FROM golang:1.16.5 AS builder
|
||||
# Stage 5: Build go proxy
|
||||
FROM golang:1.16.6 AS builder
|
||||
|
||||
WORKDIR /work
|
||||
|
||||
COPY --from=npm-builder /static/robots.txt /work/web/robots.txt
|
||||
COPY --from=npm-builder /static/security.txt /work/web/security.txt
|
||||
COPY --from=npm-builder /static/dist/ /work/web/dist/
|
||||
COPY --from=npm-builder /static/authentik/ /work/web/authentik/
|
||||
COPY --from=web-builder /static/robots.txt /work/web/robots.txt
|
||||
COPY --from=web-builder /static/security.txt /work/web/security.txt
|
||||
COPY --from=web-builder /static/dist/ /work/web/dist/
|
||||
COPY --from=web-builder /static/authentik/ /work/web/authentik/
|
||||
COPY --from=website-builder /static/help/ /work/website/help/
|
||||
|
||||
COPY --from=go-api-builder /local/api api
|
||||
COPY ./cmd /work/cmd
|
||||
COPY ./web/static.go /work/web/static.go
|
||||
COPY ./website/static.go /work/website/static.go
|
||||
COPY ./internal /work/internal
|
||||
COPY ./go.mod /work/go.mod
|
||||
COPY ./go.sum /work/go.sum
|
||||
|
||||
RUN go build -o /work/authentik ./cmd/server/main.go
|
||||
|
||||
# Stage 5: Run
|
||||
# Stage 6: Run
|
||||
FROM python:3.9-slim-buster
|
||||
|
||||
WORKDIR /
|
||||
@ -76,6 +102,7 @@ RUN apt-get update && \
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./xml /xml
|
||||
COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY --from=builder /work/authentik /authentik-proxy
|
||||
|
9
Makefile
9
Makefile
@ -32,7 +32,7 @@ gen-build:
|
||||
|
||||
gen-clean:
|
||||
rm -rf web/api/src/
|
||||
rm -rf outpost/api/
|
||||
rm -rf api/
|
||||
|
||||
gen-web:
|
||||
docker run \
|
||||
@ -55,11 +55,14 @@ gen-outpost:
|
||||
--git-user-id api \
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/outpost/api \
|
||||
-o /local/api \
|
||||
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true
|
||||
rm -f outpost/api/go.mod outpost/api/go.sum
|
||||
rm -f api/go.mod api/go.sum
|
||||
|
||||
gen: gen-build gen-clean gen-web gen-outpost
|
||||
|
||||
migrate:
|
||||
python -m lifecycle.migrate
|
||||
|
||||
run:
|
||||
go run -v cmd/server/main.go
|
||||
|
2
Pipfile
2
Pipfile
@ -46,6 +46,8 @@ webauthn = "*"
|
||||
xmlsec = "*"
|
||||
duo-client = "*"
|
||||
ua-parser = "*"
|
||||
deepmerge = "*"
|
||||
colorama = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.9"
|
||||
|
597
Pipfile.lock
generated
597
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
@ -21,7 +21,7 @@ authentik is an open-source Identity Provider focused on flexibility and versati
|
||||
|
||||
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/)
|
||||
|
||||
For bigger setups, there is a Helm Chart in the `helm/` directory. This is documented [here](https://goauthentik.io/docs/installation/kubernetes/)
|
||||
For bigger setups, there is a Helm Chart [here])(https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/)
|
||||
|
||||
## Screenshots
|
||||
|
||||
|
@ -1,3 +1,3 @@
|
||||
"""authentik"""
|
||||
__version__ = "2021.6.1-rc1"
|
||||
__version__ = "2021.7.1-rc2"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
@ -10,3 +10,25 @@ class AuthentikAPIConfig(AppConfig):
|
||||
label = "authentik_api"
|
||||
mountpoint = "api/"
|
||||
verbose_name = "authentik API"
|
||||
|
||||
def ready(self) -> None:
|
||||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
|
||||
|
||||
from authentik.api.authentication import TokenAuthentication
|
||||
|
||||
# Class is defined here as it needs to be created early enough that drf-spectacular will
|
||||
# find it, but also won't cause any import issues
|
||||
# pylint: disable=unused-variable
|
||||
class TokenSchema(OpenApiAuthenticationExtension):
|
||||
"""Auth schema"""
|
||||
|
||||
target_class = TokenAuthentication
|
||||
name = "authentik"
|
||||
|
||||
def get_security_definition(self, auto_schema):
|
||||
"""Auth schema"""
|
||||
return {
|
||||
"type": "apiKey",
|
||||
"in": "header",
|
||||
"name": "Authorization",
|
||||
}
|
||||
|
@ -3,7 +3,6 @@ from base64 import b64decode
|
||||
from binascii import Error
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from drf_spectacular.authentication import OpenApiAuthenticationExtension
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from rest_framework.request import Request
|
||||
@ -20,7 +19,7 @@ def token_from_header(raw_header: bytes) -> Optional[Token]:
|
||||
auth_credentials = raw_header.decode()
|
||||
if auth_credentials == "" or " " not in auth_credentials:
|
||||
return None
|
||||
auth_type, auth_credentials = auth_credentials.split()
|
||||
auth_type, _, auth_credentials = auth_credentials.partition(" ")
|
||||
if auth_type.lower() not in ["basic", "bearer"]:
|
||||
LOGGER.debug("Unsupported authentication type, denying", type=auth_type.lower())
|
||||
raise AuthenticationFailed("Unsupported authentication type")
|
||||
@ -56,18 +55,3 @@ class TokenAuthentication(BaseAuthentication):
|
||||
return None
|
||||
|
||||
return (token.user, None) # pragma: no cover
|
||||
|
||||
|
||||
class TokenSchema(OpenApiAuthenticationExtension):
|
||||
"""Auth schema"""
|
||||
|
||||
target_class = TokenAuthentication
|
||||
name = "authentik"
|
||||
|
||||
def get_security_definition(self, auto_schema):
|
||||
"""Auth schema"""
|
||||
return {
|
||||
"type": "apiKey",
|
||||
"in": "header",
|
||||
"name": "Authorization",
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ class ConfigView(APIView):
|
||||
caps.append(Capabilities.CAN_GEO_IP)
|
||||
if SERVICE_HOST_ENV_NAME in environ:
|
||||
# Running in k8s, only s3 backup is supported
|
||||
if CONFIG.y("postgresql.s3_backup"):
|
||||
if CONFIG.y_bool("postgresql.s3_backup"):
|
||||
caps.append(Capabilities.CAN_BACKUP)
|
||||
else:
|
||||
# Running in compose, backup is always supported
|
||||
|
38
authentik/api/v2/sentry.py
Normal file
38
authentik/api/v2/sentry.py
Normal file
@ -0,0 +1,38 @@
|
||||
"""Sentry tunnel"""
|
||||
from json import loads
|
||||
|
||||
from django.conf import settings
|
||||
from django.http.request import HttpRequest
|
||||
from django.http.response import HttpResponse
|
||||
from django.views.generic.base import View
|
||||
from requests import post
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
class SentryTunnelView(View):
|
||||
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
|
||||
|
||||
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
|
||||
# Only allow usage of this endpoint when error reporting is enabled
|
||||
if not CONFIG.y_bool("error_reporting.enabled", False):
|
||||
return HttpResponse(status=400)
|
||||
# Body is 2 json objects separated by \n
|
||||
full_body = request.body
|
||||
header = loads(full_body.splitlines()[0])
|
||||
# Check that the DSN is what we expect
|
||||
dsn = header.get("dsn", "")
|
||||
if dsn != settings.SENTRY_DSN:
|
||||
return HttpResponse(status=400)
|
||||
response = post(
|
||||
"https://sentry.beryju.org/api/8/envelope/",
|
||||
data=full_body,
|
||||
headers={"Content-Type": "application/octet-stream"},
|
||||
)
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except RequestException:
|
||||
return HttpResponse(status=500)
|
||||
return HttpResponse(status=response.status_code)
|
@ -1,5 +1,6 @@
|
||||
"""api v2 urls"""
|
||||
from django.urls import path
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from drf_spectacular.views import SpectacularAPIView
|
||||
from rest_framework import routers
|
||||
|
||||
@ -10,6 +11,7 @@ from authentik.admin.api.tasks import TaskViewSet
|
||||
from authentik.admin.api.version import VersionView
|
||||
from authentik.admin.api.workers import WorkerView
|
||||
from authentik.api.v2.config import ConfigView
|
||||
from authentik.api.v2.sentry import SentryTunnelView
|
||||
from authentik.api.views import APIBrowserView
|
||||
from authentik.core.api.applications import ApplicationViewSet
|
||||
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
|
||||
@ -235,6 +237,7 @@ urlpatterns = (
|
||||
FlowExecutorView.as_view(),
|
||||
name="flow-executor",
|
||||
),
|
||||
path("sentry/", csrf_exempt(SentryTunnelView.as_view()), name="sentry"),
|
||||
path("schema/", SpectacularAPIView.as_view(), name="schema"),
|
||||
]
|
||||
)
|
||||
|
@ -11,13 +11,7 @@ from drf_spectacular.utils import (
|
||||
inline_serializer,
|
||||
)
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import (
|
||||
BooleanField,
|
||||
CharField,
|
||||
FileField,
|
||||
IntegerField,
|
||||
ReadOnlyField,
|
||||
)
|
||||
from rest_framework.fields import BooleanField, CharField, FileField, ReadOnlyField
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -29,6 +23,7 @@ from structlog.stdlib import get_logger
|
||||
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.models import Application, User
|
||||
from authentik.events.models import EventAction
|
||||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||
@ -73,7 +68,7 @@ class ApplicationSerializer(ModelSerializer):
|
||||
}
|
||||
|
||||
|
||||
class ApplicationViewSet(ModelViewSet):
|
||||
class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Application Viewset"""
|
||||
|
||||
queryset = Application.objects.all()
|
||||
@ -106,15 +101,19 @@ class ApplicationViewSet(ModelViewSet):
|
||||
return applications
|
||||
|
||||
@extend_schema(
|
||||
request=inline_serializer(
|
||||
"CheckAccessRequest", fields={"for_user": IntegerField(required=False)}
|
||||
),
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="for_user",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
)
|
||||
],
|
||||
responses={
|
||||
200: PolicyTestResultSerializer(),
|
||||
404: OpenApiResponse(description="for_user user not found"),
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"])
|
||||
@action(detail=True, methods=["GET"])
|
||||
# pylint: disable=unused-argument
|
||||
def check_access(self, request: Request, slug: str) -> Response:
|
||||
"""Check access to a single application by slug"""
|
||||
@ -203,7 +202,7 @@ class ApplicationViewSet(ModelViewSet):
|
||||
"""Set application icon"""
|
||||
app: Application = self.get_object()
|
||||
icon = request.FILES.get("file", None)
|
||||
clear = request.data.get("clear", False)
|
||||
clear = request.data.get("clear", "false").lower() == "true"
|
||||
if clear:
|
||||
# .delete() saves the model by default
|
||||
app.meta_icon.delete()
|
||||
|
@ -11,6 +11,7 @@ from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from ua_parser import user_agent_parser
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
from authentik.events.geo import GEOIP_READER, GeoIPDict
|
||||
|
||||
@ -92,6 +93,7 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
||||
class AuthenticatedSessionViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
UsedByMixin,
|
||||
mixins.ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
|
@ -1,32 +1,90 @@
|
||||
"""Groups API Viewset"""
|
||||
from django.db.models.query import QuerySet
|
||||
from rest_framework.fields import JSONField
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from django_filters.filters import ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from rest_framework.fields import BooleanField, CharField, JSONField
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import is_dict
|
||||
from authentik.core.models import Group
|
||||
from authentik.core.models import Group, User
|
||||
|
||||
|
||||
class GroupMemberSerializer(ModelSerializer):
|
||||
"""Stripped down user serializer to show relevant users for groups"""
|
||||
|
||||
is_superuser = BooleanField(read_only=True)
|
||||
avatar = CharField(read_only=True)
|
||||
attributes = JSONField(validators=[is_dict], required=False)
|
||||
uid = CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
"pk",
|
||||
"username",
|
||||
"name",
|
||||
"is_active",
|
||||
"last_login",
|
||||
"is_superuser",
|
||||
"email",
|
||||
"avatar",
|
||||
"attributes",
|
||||
"uid",
|
||||
]
|
||||
|
||||
|
||||
class GroupSerializer(ModelSerializer):
|
||||
"""Group Serializer"""
|
||||
|
||||
attributes = JSONField(validators=[is_dict], required=False)
|
||||
users_obj = ListSerializer(
|
||||
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = ["pk", "name", "is_superuser", "parent", "users", "attributes"]
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
"is_superuser",
|
||||
"parent",
|
||||
"users",
|
||||
"attributes",
|
||||
"users_obj",
|
||||
]
|
||||
|
||||
|
||||
class GroupViewSet(ModelViewSet):
|
||||
class GroupFilter(FilterSet):
|
||||
"""Filter for groups"""
|
||||
|
||||
members_by_username = ModelMultipleChoiceFilter(
|
||||
field_name="users__username",
|
||||
to_field_name="username",
|
||||
queryset=User.objects.all(),
|
||||
)
|
||||
members_by_pk = ModelMultipleChoiceFilter(
|
||||
field_name="users",
|
||||
queryset=User.objects.all(),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = ["name", "is_superuser", "members_by_pk", "members_by_username"]
|
||||
|
||||
|
||||
class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Group Viewset"""
|
||||
|
||||
queryset = Group.objects.all()
|
||||
serializer_class = GroupSerializer
|
||||
search_fields = ["name", "is_superuser"]
|
||||
filterset_fields = ["name", "is_superuser"]
|
||||
filterset_class = GroupFilter
|
||||
ordering = ["name"]
|
||||
|
||||
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||
|
@ -14,6 +14,7 @@ from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import (
|
||||
MetaNameSerializer,
|
||||
PassiveSerializer,
|
||||
@ -65,6 +66,7 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
|
||||
class PropertyMappingViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
UsedByMixin,
|
||||
mixins.ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
|
@ -9,6 +9,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
||||
from authentik.core.models import Provider
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
@ -48,6 +49,7 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
||||
class ProviderViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
UsedByMixin,
|
||||
mixins.ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
|
@ -10,6 +10,7 @@ from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
||||
from authentik.core.models import Source
|
||||
from authentik.core.types import UserSettingSerializer
|
||||
@ -52,6 +53,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
||||
class SourceViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
UsedByMixin,
|
||||
mixins.ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
|
@ -9,9 +9,10 @@ from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.users import UserSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.core.models import Token, TokenIntents
|
||||
from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.managed.api import ManagedSerializer
|
||||
|
||||
@ -43,7 +44,7 @@ class TokenViewSerializer(PassiveSerializer):
|
||||
key = CharField(read_only=True)
|
||||
|
||||
|
||||
class TokenViewSet(ModelViewSet):
|
||||
class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Token Viewset"""
|
||||
|
||||
lookup_field = "identifier"
|
||||
@ -60,11 +61,19 @@ class TokenViewSet(ModelViewSet):
|
||||
"intent",
|
||||
"user__username",
|
||||
"description",
|
||||
"expires",
|
||||
"expiring",
|
||||
]
|
||||
ordering = ["expires"]
|
||||
|
||||
def perform_create(self, serializer: TokenSerializer):
|
||||
serializer.save(user=self.request.user, intent=TokenIntents.INTENT_API)
|
||||
serializer.save(
|
||||
user=self.request.user,
|
||||
intent=TokenIntents.INTENT_API,
|
||||
expiring=self.request.user.attributes.get(
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING, True
|
||||
),
|
||||
)
|
||||
|
||||
@permission_required("authentik_core.view_token_key")
|
||||
@extend_schema(
|
||||
|
102
authentik/core/api/used_by.py
Normal file
102
authentik/core/api/used_by.py
Normal file
@ -0,0 +1,102 @@
|
||||
"""used_by mixin"""
|
||||
from enum import Enum
|
||||
from inspect import getmembers
|
||||
|
||||
from django.db.models.base import Model
|
||||
from django.db.models.deletion import SET_DEFAULT, SET_NULL
|
||||
from django.db.models.manager import Manager
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, ChoiceField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
|
||||
|
||||
class DeleteAction(Enum):
|
||||
"""Which action a delete will have on a used object"""
|
||||
|
||||
CASCADE = "cascade"
|
||||
CASCADE_MANY = "cascade_many"
|
||||
SET_NULL = "set_null"
|
||||
SET_DEFAULT = "set_default"
|
||||
|
||||
|
||||
class UsedBySerializer(PassiveSerializer):
|
||||
"""A list of all objects referencing the queried object"""
|
||||
|
||||
app = CharField()
|
||||
model_name = CharField()
|
||||
pk = CharField()
|
||||
name = CharField()
|
||||
action = ChoiceField(choices=[(x.name, x.name) for x in DeleteAction])
|
||||
|
||||
|
||||
def get_delete_action(manager: Manager) -> str:
|
||||
"""Get the delete action from the Foreign key, falls back to cascade"""
|
||||
if hasattr(manager, "field"):
|
||||
if manager.field.remote_field.on_delete.__name__ == SET_NULL.__name__:
|
||||
return DeleteAction.SET_NULL.name
|
||||
if manager.field.remote_field.on_delete.__name__ == SET_DEFAULT.__name__:
|
||||
return DeleteAction.SET_DEFAULT.name
|
||||
if hasattr(manager, "source_field"):
|
||||
return DeleteAction.CASCADE_MANY.name
|
||||
return DeleteAction.CASCADE.name
|
||||
|
||||
|
||||
class UsedByMixin:
|
||||
"""Mixin to add a used_by endpoint to return a list of all objects using this object"""
|
||||
|
||||
@extend_schema(
|
||||
responses={200: UsedBySerializer(many=True)},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name, unused-argument, too-many-locals
|
||||
def used_by(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Get a list of all objects that use this object"""
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
model: Model = self.get_object()
|
||||
used_by = []
|
||||
shadows = []
|
||||
for attr_name, manager in getmembers(model, lambda x: isinstance(x, Manager)):
|
||||
if attr_name == "objects": # pragma: no cover
|
||||
continue
|
||||
manager: Manager
|
||||
if manager.model._meta.abstract:
|
||||
continue
|
||||
app = manager.model._meta.app_label
|
||||
model_name = manager.model._meta.model_name
|
||||
delete_action = get_delete_action(manager)
|
||||
|
||||
# To make sure we only apply shadows when there are any objects,
|
||||
# but so we only apply them once, have a simple flag for the first object
|
||||
first_object = True
|
||||
|
||||
for obj in get_objects_for_user(
|
||||
request.user, f"{app}.view_{model_name}", manager
|
||||
).all():
|
||||
# Only merge shadows on first object
|
||||
if first_object:
|
||||
shadows += getattr(
|
||||
manager.model._meta, "authentik_used_by_shadows", []
|
||||
)
|
||||
first_object = False
|
||||
serializer = UsedBySerializer(
|
||||
data={
|
||||
"app": app,
|
||||
"model_name": model_name,
|
||||
"pk": str(obj.pk),
|
||||
"name": str(obj),
|
||||
"action": delete_action,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
used_by.append(serializer.data)
|
||||
# Check the shadows map and remove anything that should be shadowed
|
||||
for idx, user in enumerate(used_by):
|
||||
full_model_name = f"{user['app']}.{user['model_name']}"
|
||||
if full_model_name in shadows:
|
||||
del used_by[idx]
|
||||
return Response(used_by)
|
@ -2,12 +2,11 @@
|
||||
from json import loads
|
||||
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http.response import Http404
|
||||
from django.urls import reverse_lazy
|
||||
from django.utils.http import urlencode
|
||||
from django_filters.filters import BooleanFilter, CharFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema, extend_schema_field
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||
from guardian.utils import get_anonymous_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, JSONField, SerializerMethodField
|
||||
@ -25,6 +24,7 @@ from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.groups import GroupSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
|
||||
from authentik.core.middleware import (
|
||||
SESSION_IMPERSONATE_ORIGINAL_USER,
|
||||
@ -128,15 +128,22 @@ class UsersFilter(FilterSet):
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ["username", "name", "is_active", "is_superuser", "attributes"]
|
||||
fields = [
|
||||
"username",
|
||||
"email",
|
||||
"name",
|
||||
"is_active",
|
||||
"is_superuser",
|
||||
"attributes",
|
||||
]
|
||||
|
||||
|
||||
class UserViewSet(ModelViewSet):
|
||||
class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
"""User Viewset"""
|
||||
|
||||
queryset = User.objects.none()
|
||||
serializer_class = UserSerializer
|
||||
search_fields = ["username", "name", "is_active"]
|
||||
search_fields = ["username", "name", "is_active", "email"]
|
||||
filterset_class = UsersFilter
|
||||
|
||||
def get_queryset(self): # pragma: no cover
|
||||
@ -172,7 +179,7 @@ class UserViewSet(ModelViewSet):
|
||||
@extend_schema(
|
||||
responses={
|
||||
"200": LinkSerializer(many=False),
|
||||
"404": OpenApiResponse(description="No recovery flow found."),
|
||||
"404": LinkSerializer(many=False),
|
||||
},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
@ -183,7 +190,7 @@ class UserViewSet(ModelViewSet):
|
||||
# Check that there is a recovery flow, if not return an error
|
||||
flow = tenant.flow_recovery
|
||||
if not flow:
|
||||
raise Http404
|
||||
return Response({"link": ""}, status=404)
|
||||
user: User = self.get_object()
|
||||
token, __ = Token.objects.get_or_create(
|
||||
identifier=f"{user.uid}-password-reset",
|
||||
@ -206,6 +213,6 @@ class UserViewSet(ModelViewSet):
|
||||
return queryset
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if self.request.user.has_perm("authentik_core.view_group"):
|
||||
if self.request.user.has_perm("authentik_core.view_user"):
|
||||
return self._filter_queryset_for_list(queryset)
|
||||
return super().filter_queryset(queryset)
|
||||
|
@ -14,7 +14,9 @@ def is_dict(value: Any):
|
||||
"""Ensure a value is a dictionary, useful for JSONFields"""
|
||||
if isinstance(value, dict):
|
||||
return
|
||||
raise ValidationError("Value must be a dictionary.")
|
||||
raise ValidationError(
|
||||
"Value must be a dictionary, and not have any duplicate keys."
|
||||
)
|
||||
|
||||
|
||||
class PassiveSerializer(Serializer):
|
||||
|
@ -3,23 +3,33 @@ from traceback import format_tb
|
||||
from typing import Optional
|
||||
|
||||
from django.http import HttpRequest
|
||||
from guardian.utils import get_anonymous_user
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.core.models import PropertyMapping, User
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.expression.evaluator import BaseEvaluator
|
||||
from authentik.policies.types import PolicyRequest
|
||||
|
||||
|
||||
class PropertyMappingEvaluator(BaseEvaluator):
|
||||
"""Custom Evalautor that adds some different context variables."""
|
||||
|
||||
def set_context(
|
||||
self, user: Optional[User], request: Optional[HttpRequest], **kwargs
|
||||
self,
|
||||
user: Optional[User],
|
||||
request: Optional[HttpRequest],
|
||||
mapping: PropertyMapping,
|
||||
**kwargs,
|
||||
):
|
||||
"""Update context with context from PropertyMapping's evaluate"""
|
||||
req = PolicyRequest(user=get_anonymous_user())
|
||||
req.obj = mapping
|
||||
if user:
|
||||
req.user = user
|
||||
self._context["user"] = user
|
||||
if request:
|
||||
self._context["request"] = request
|
||||
req.http_request = request
|
||||
self._context["request"] = req
|
||||
self._context.update(**kwargs)
|
||||
|
||||
def handle_error(self, exc: Exception, expression_source: str):
|
||||
@ -30,9 +40,8 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
||||
expression=expression_source,
|
||||
message=error_string,
|
||||
)
|
||||
if "user" in self._context:
|
||||
event.set_user(self._context["user"])
|
||||
if "request" in self._context:
|
||||
event.from_http(self._context["request"])
|
||||
req: PolicyRequest = self._context["request"]
|
||||
event.from_http(req.http_request, req.user)
|
||||
return
|
||||
event.save()
|
||||
|
@ -26,6 +26,8 @@ class ImpersonateMiddleware:
|
||||
|
||||
if SESSION_IMPERSONATE_USER in request.session:
|
||||
request.user = request.session[SESSION_IMPERSONATE_USER]
|
||||
# Ensure that the user is active, otherwise nothing will work
|
||||
request.user.is_active = True
|
||||
|
||||
return self.get_response(request)
|
||||
|
||||
|
@ -0,0 +1,20 @@
|
||||
# Generated by Django 3.2.5 on 2021-07-09 17:27
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0025_alter_application_meta_icon"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="application",
|
||||
name="meta_icon",
|
||||
field=models.FileField(
|
||||
default=None, max_length=500, null=True, upload_to="application-icons/"
|
||||
),
|
||||
),
|
||||
]
|
@ -5,12 +5,13 @@ from typing import Any, Optional, Type
|
||||
from urllib.parse import urlencode
|
||||
from uuid import uuid4
|
||||
|
||||
from deepmerge import always_merger
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.contrib.auth.models import UserManager as DjangoUserManager
|
||||
from django.core import validators
|
||||
from django.db import models
|
||||
from django.db.models import Q, QuerySet
|
||||
from django.db.models import Q, QuerySet, options
|
||||
from django.http import HttpRequest
|
||||
from django.templatetags.static import static
|
||||
from django.utils.functional import cached_property
|
||||
@ -36,11 +37,16 @@ LOGGER = get_logger()
|
||||
USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug"
|
||||
USER_ATTRIBUTE_SA = "goauthentik.io/user/service-account"
|
||||
USER_ATTRIBUTE_SOURCES = "goauthentik.io/user/sources"
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING = "goauthentik.io/user/token-expires" # nosec
|
||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips"
|
||||
|
||||
GRAVATAR_URL = "https://secure.gravatar.com"
|
||||
DEFAULT_AVATAR = static("dist/assets/images/user_default.png")
|
||||
|
||||
|
||||
options.DEFAULT_NAMES = options.DEFAULT_NAMES + ("authentik_used_by_shadows",)
|
||||
|
||||
|
||||
def default_token_duration():
|
||||
"""Default duration a Token is valid"""
|
||||
return now() + timedelta(minutes=30)
|
||||
@ -110,8 +116,8 @@ class User(GuardianUserMixin, AbstractUser):
|
||||
including the users attributes"""
|
||||
final_attributes = {}
|
||||
for group in self.ak_groups.all().order_by("name"):
|
||||
final_attributes.update(group.attributes)
|
||||
final_attributes.update(self.attributes)
|
||||
always_merger.merge(final_attributes, group.attributes)
|
||||
always_merger.merge(final_attributes, self.attributes)
|
||||
return final_attributes
|
||||
|
||||
@cached_property
|
||||
@ -138,21 +144,25 @@ class User(GuardianUserMixin, AbstractUser):
|
||||
@property
|
||||
def avatar(self) -> str:
|
||||
"""Get avatar, depending on authentik.avatar setting"""
|
||||
mode = CONFIG.raw.get("authentik").get("avatars")
|
||||
mode: str = CONFIG.y("avatars", "none")
|
||||
if mode == "none":
|
||||
return DEFAULT_AVATAR
|
||||
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
||||
mail_hash = md5(self.email.encode("utf-8")).hexdigest() # nosec
|
||||
if mode == "gravatar":
|
||||
parameters = [
|
||||
("s", "158"),
|
||||
("r", "g"),
|
||||
]
|
||||
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
||||
mail_hash = md5(self.email.encode("utf-8")).hexdigest() # nosec
|
||||
gravatar_url = (
|
||||
f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}"
|
||||
)
|
||||
return escape(gravatar_url)
|
||||
raise ValueError(f"Invalid avatar mode {mode}")
|
||||
return mode % {
|
||||
"username": self.username,
|
||||
"mail_hash": mail_hash,
|
||||
"upn": self.attributes.get("upn", ""),
|
||||
}
|
||||
|
||||
class Meta:
|
||||
|
||||
@ -220,7 +230,10 @@ class Application(PolicyBindingModel):
|
||||
)
|
||||
# For template applications, this can be set to /static/authentik/applications/*
|
||||
meta_icon = models.FileField(
|
||||
upload_to="application-icons/", default=None, null=True
|
||||
upload_to="application-icons/",
|
||||
default=None,
|
||||
null=True,
|
||||
max_length=500,
|
||||
)
|
||||
meta_description = models.TextField(default="", blank=True)
|
||||
meta_publisher = models.TextField(default="", blank=True)
|
||||
@ -369,6 +382,13 @@ class ExpiringModel(models.Model):
|
||||
expires = models.DateTimeField(default=default_token_duration)
|
||||
expiring = models.BooleanField(default=True)
|
||||
|
||||
def expire_action(self, *args, **kwargs):
|
||||
"""Handler which is called when this object is expired. By
|
||||
default the object is deleted. This is less efficient compared
|
||||
to bulk deleting objects, but classes like Token() need to change
|
||||
values instead of being deleted."""
|
||||
return self.delete(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def filter_not_expired(cls, **kwargs) -> QuerySet:
|
||||
"""Filer for tokens which are not expired yet or are not expiring,
|
||||
@ -413,6 +433,18 @@ class Token(ManagedModel, ExpiringModel):
|
||||
user = models.ForeignKey("User", on_delete=models.CASCADE, related_name="+")
|
||||
description = models.TextField(default="", blank=True)
|
||||
|
||||
def expire_action(self, *args, **kwargs):
|
||||
"""Handler which is called when this object is expired."""
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
self.key = default_token_key()
|
||||
self.save(*args, **kwargs)
|
||||
Event.new(
|
||||
action=EventAction.SECRET_ROTATE,
|
||||
token=self,
|
||||
message=f"Token {self.identifier}'s secret was rotated.",
|
||||
).save()
|
||||
|
||||
def __str__(self):
|
||||
description = f"{self.identifier}"
|
||||
if self.expiring:
|
||||
@ -456,11 +488,11 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
||||
from authentik.core.expression import PropertyMappingEvaluator
|
||||
|
||||
evaluator = PropertyMappingEvaluator()
|
||||
evaluator.set_context(user, request, **kwargs)
|
||||
evaluator.set_context(user, request, self, **kwargs)
|
||||
try:
|
||||
return evaluator.evaluate(self.expression)
|
||||
except (ValueError, SyntaxError) as exc:
|
||||
raise PropertyMappingExpressionException from exc
|
||||
except Exception as exc:
|
||||
raise PropertyMappingExpressionException(str(exc)) from exc
|
||||
|
||||
def __str__(self):
|
||||
return f"Property Mapping {self.name}"
|
||||
@ -490,8 +522,12 @@ class AuthenticatedSession(ExpiringModel):
|
||||
last_used = models.DateTimeField(auto_now=True)
|
||||
|
||||
@staticmethod
|
||||
def from_request(request: HttpRequest, user: User) -> "AuthenticatedSession":
|
||||
def from_request(
|
||||
request: HttpRequest, user: User
|
||||
) -> Optional["AuthenticatedSession"]:
|
||||
"""Create a new session from a http request"""
|
||||
if not hasattr(request, "session") or not request.session.session_key:
|
||||
return None
|
||||
return AuthenticatedSession(
|
||||
session_key=request.session.session_key,
|
||||
user=user,
|
||||
|
@ -1,11 +1,12 @@
|
||||
"""authentik core signals"""
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Type
|
||||
|
||||
from django.contrib.auth.signals import user_logged_in, user_logged_out
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
from django.core.signals import Signal
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_save
|
||||
from django.db.models.signals import post_save, pre_delete
|
||||
from django.dispatch import receiver
|
||||
from django.http.request import HttpRequest
|
||||
from prometheus_client import Gauge
|
||||
@ -18,7 +19,7 @@ GAUGE_MODELS = Gauge(
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.core.models import User
|
||||
from authentik.core.models import AuthenticatedSession, User
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
@ -48,7 +49,9 @@ def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
|
||||
"""Create an AuthenticatedSession from request"""
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
|
||||
AuthenticatedSession.from_request(request, user).save()
|
||||
session = AuthenticatedSession.from_request(request, user)
|
||||
if session:
|
||||
session.save()
|
||||
|
||||
|
||||
@receiver(user_logged_out)
|
||||
@ -60,3 +63,17 @@ def user_logged_out_session(sender, request: HttpRequest, user: "User", **_):
|
||||
AuthenticatedSession.objects.filter(
|
||||
session_key=request.session.session_key
|
||||
).delete()
|
||||
|
||||
|
||||
@receiver(pre_delete)
|
||||
def authenticated_session_delete(
|
||||
sender: Type[Model], instance: "AuthenticatedSession", **_
|
||||
):
|
||||
"""Delete session when authenticated session is deleted"""
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
|
||||
if sender != AuthenticatedSession:
|
||||
return
|
||||
|
||||
cache_key = f"{KEY_PREFIX}{instance.session_key}"
|
||||
cache.delete(cache_key)
|
||||
|
@ -33,6 +33,7 @@ from authentik.flows.planner import (
|
||||
from authentik.flows.views import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN
|
||||
from authentik.lib.utils.urls import redirect_with_qs
|
||||
from authentik.policies.utils import delete_none_keys
|
||||
from authentik.stages.password import BACKEND_DJANGO
|
||||
from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND
|
||||
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
|
||||
|
||||
@ -182,6 +183,8 @@ class SourceFlowManager:
|
||||
# pylint: disable=unused-argument
|
||||
def get_stages_to_append(self, flow: Flow) -> list[Stage]:
|
||||
"""Hook to override stages which are appended to the flow"""
|
||||
if not self.source.enrollment_flow:
|
||||
return []
|
||||
if flow.slug == self.source.enrollment_flow.slug:
|
||||
return [
|
||||
in_memory_stage(PostUserEnrollmentStage),
|
||||
@ -198,7 +201,7 @@ class SourceFlowManager:
|
||||
kwargs.update(
|
||||
{
|
||||
# Since we authenticate the user by their token, they have no backend set
|
||||
PLAN_CONTEXT_AUTHENTICATION_BACKEND: "django.contrib.auth.backends.ModelBackend",
|
||||
PLAN_CONTEXT_AUTHENTICATION_BACKEND: BACKEND_DJANGO,
|
||||
PLAN_CONTEXT_SSO: True,
|
||||
PLAN_CONTEXT_SOURCE: self.source,
|
||||
PLAN_CONTEXT_REDIRECT: final_redirect,
|
||||
@ -210,7 +213,7 @@ class SourceFlowManager:
|
||||
planner = FlowPlanner(flow)
|
||||
plan = planner.plan(self.request, kwargs)
|
||||
for stage in self.get_stages_to_append(flow):
|
||||
plan.append(stage)
|
||||
plan.append_stage(stage=stage)
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
return redirect_with_qs(
|
||||
"authentik_core:if-flow",
|
||||
|
@ -26,14 +26,16 @@ def clean_expired_models(self: MonitoredTask):
|
||||
messages = []
|
||||
for cls in ExpiringModel.__subclasses__():
|
||||
cls: ExpiringModel
|
||||
amount, _ = (
|
||||
objects = (
|
||||
cls.objects.all()
|
||||
.exclude(expiring=False)
|
||||
.exclude(expiring=True, expires__gt=now())
|
||||
.delete()
|
||||
)
|
||||
LOGGER.debug("Deleted expired models", model=cls, amount=amount)
|
||||
messages.append(f"Deleted {amount} expired {cls._meta.verbose_name_plural}")
|
||||
for obj in objects:
|
||||
obj.expire_action()
|
||||
amount = objects.count()
|
||||
LOGGER.debug("Expired models", model=cls, amount=amount)
|
||||
messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}")
|
||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, messages))
|
||||
|
||||
|
||||
|
@ -3,16 +3,6 @@
|
||||
{% load static %}
|
||||
{% load i18n %}
|
||||
|
||||
{% block head %}
|
||||
{{ block.super }}
|
||||
<style>
|
||||
.pf-c-background-image::before {
|
||||
background-image: url("{% static 'dist/assets/images/flow_background.jpg' %}");
|
||||
background-position: center;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block title %}
|
||||
{% trans 'End session' %} - {{ tenant.branding_title }}
|
||||
{% endblock %}
|
||||
|
@ -11,6 +11,11 @@
|
||||
|
||||
{% block head %}
|
||||
<script src="{% static 'dist/FlowInterface.js' %}?v={{ ak_version }}" type="module"></script>
|
||||
<style>
|
||||
.pf-c-background-image::before {
|
||||
--ak-flow-background: url("{{ flow.background_url }}");
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
|
@ -7,6 +7,14 @@
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}?v={{ ak_version }}">
|
||||
{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<style>
|
||||
.pf-c-background-image::before {
|
||||
--ak-flow-background: url("/static/dist/assets/images/flow_background.jpg");
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<div class="pf-c-background-image">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="pf-c-background-image__filter" width="0" height="0">
|
||||
|
@ -26,7 +26,7 @@ class TestApplicationsAPI(APITestCase):
|
||||
def test_check_access(self):
|
||||
"""Test check_access operation"""
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.post(
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:application-check-access",
|
||||
kwargs={"slug": self.allowed.slug},
|
||||
@ -36,7 +36,7 @@ class TestApplicationsAPI(APITestCase):
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content), {"messages": [], "passing": True}
|
||||
)
|
||||
response = self.client.post(
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:application-check-access",
|
||||
kwargs={"slug": self.denied.slug},
|
||||
|
@ -17,6 +17,9 @@ class TestImpersonation(TestCase):
|
||||
|
||||
def test_impersonate_simple(self):
|
||||
"""test simple impersonation and un-impersonation"""
|
||||
# test with an inactive user to ensure that still works
|
||||
self.other_user.is_active = False
|
||||
self.other_user.save()
|
||||
self.client.force_login(self.akadmin)
|
||||
|
||||
self.client.get(
|
||||
|
@ -31,7 +31,7 @@ class TestPropertyMappings(TestCase):
|
||||
"""Test expression error"""
|
||||
expr = "return aaa"
|
||||
mapping = PropertyMapping.objects.create(name="test", expression=expr)
|
||||
with self.assertRaises(NameError):
|
||||
with self.assertRaises(PropertyMappingExpressionException):
|
||||
mapping.evaluate(None, None)
|
||||
events = Event.objects.filter(
|
||||
action=EventAction.PROPERTY_MAPPING_EXCEPTION, context__expression=expr
|
||||
@ -44,7 +44,7 @@ class TestPropertyMappings(TestCase):
|
||||
expr = "return aaa"
|
||||
request = self.factory.get("/")
|
||||
mapping = PropertyMapping.objects.create(name="test", expression=expr)
|
||||
with self.assertRaises(NameError):
|
||||
with self.assertRaises(PropertyMappingExpressionException):
|
||||
mapping.evaluate(get_anonymous_user(), request)
|
||||
events = Event.objects.filter(
|
||||
action=EventAction.PROPERTY_MAPPING_EXCEPTION, context__expression=expr
|
||||
|
@ -1,18 +0,0 @@
|
||||
"""authentik core task tests"""
|
||||
from django.test import TestCase
|
||||
from django.utils.timezone import now
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
|
||||
from authentik.core.models import Token
|
||||
from authentik.core.tasks import clean_expired_models
|
||||
|
||||
|
||||
class TestTasks(TestCase):
|
||||
"""Test Tasks"""
|
||||
|
||||
def test_token_cleanup(self):
|
||||
"""Test Token cleanup task"""
|
||||
Token.objects.create(expires=now(), user=get_anonymous_user())
|
||||
self.assertEqual(Token.objects.all().count(), 1)
|
||||
clean_expired_models.delay().get()
|
||||
self.assertEqual(Token.objects.all().count(), 0)
|
@ -1,8 +1,16 @@
|
||||
"""Test token API"""
|
||||
from django.urls.base import reverse
|
||||
from django.utils.timezone import now
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||
Token,
|
||||
TokenIntents,
|
||||
User,
|
||||
)
|
||||
from authentik.core.tasks import clean_expired_models
|
||||
|
||||
|
||||
class TestTokenAPI(APITestCase):
|
||||
@ -22,3 +30,25 @@ class TestTokenAPI(APITestCase):
|
||||
token = Token.objects.get(identifier="test-token")
|
||||
self.assertEqual(token.user, self.user)
|
||||
self.assertEqual(token.intent, TokenIntents.INTENT_API)
|
||||
self.assertEqual(token.expiring, True)
|
||||
|
||||
def test_token_create_non_expiring(self):
|
||||
"""Test token creation endpoint"""
|
||||
self.user.attributes[USER_ATTRIBUTE_TOKEN_EXPIRING] = False
|
||||
self.user.save()
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:token-list"), {"identifier": "test-token"}
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
token = Token.objects.get(identifier="test-token")
|
||||
self.assertEqual(token.user, self.user)
|
||||
self.assertEqual(token.intent, TokenIntents.INTENT_API)
|
||||
self.assertEqual(token.expiring, False)
|
||||
|
||||
def test_token_expire(self):
|
||||
"""Test Token expire task"""
|
||||
token: Token = Token.objects.create(expires=now(), user=get_anonymous_user())
|
||||
key = token.key
|
||||
clean_expired_models.delay().get()
|
||||
token.refresh_from_db()
|
||||
self.assertNotEqual(key, token.key)
|
||||
|
@ -2,7 +2,7 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from rest_framework.fields import CharField, DictField
|
||||
from rest_framework.fields import CharField
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.flows.challenge import Challenge
|
||||
@ -22,18 +22,10 @@ class UILoginButton:
|
||||
icon_url: Optional[str] = None
|
||||
|
||||
|
||||
class UILoginButtonSerializer(PassiveSerializer):
|
||||
"""Serializer for Login buttons of sources"""
|
||||
|
||||
name = CharField()
|
||||
challenge = DictField()
|
||||
icon_url = CharField(required=False, allow_null=True)
|
||||
|
||||
|
||||
class UserSettingSerializer(PassiveSerializer):
|
||||
"""Serializer for User settings for stages and sources"""
|
||||
|
||||
object_uid = CharField()
|
||||
component = CharField()
|
||||
title = CharField()
|
||||
configure_url = CharField()
|
||||
configure_url = CharField(required=False)
|
||||
|
@ -1,10 +1,12 @@
|
||||
"""Crypto API Views"""
|
||||
import django_filters
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||
from cryptography.x509 import load_pem_x509_certificate
|
||||
from django.http.response import HttpResponse
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_filters import FilterSet
|
||||
from django_filters.filters import BooleanFilter
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from rest_framework.decorators import action
|
||||
@ -20,6 +22,7 @@ from rest_framework.serializers import ModelSerializer, ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.crypto.builder import CertificateBuilder
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
@ -33,6 +36,9 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
||||
cert_subject = SerializerMethodField()
|
||||
private_key_available = SerializerMethodField()
|
||||
|
||||
certificate_download_url = SerializerMethodField()
|
||||
private_key_download_url = SerializerMethodField()
|
||||
|
||||
def get_cert_subject(self, instance: CertificateKeyPair) -> str:
|
||||
"""Get certificate subject as full rfc4514"""
|
||||
return instance.certificate.subject.rfc4514_string()
|
||||
@ -41,6 +47,26 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
||||
"""Show if this keypair has a private key configured or not"""
|
||||
return instance.key_data != "" and instance.key_data is not None
|
||||
|
||||
def get_certificate_download_url(self, instance: CertificateKeyPair) -> str:
|
||||
"""Get URL to download certificate"""
|
||||
return (
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-view-certificate",
|
||||
kwargs={"pk": instance.pk},
|
||||
)
|
||||
+ "?download"
|
||||
)
|
||||
|
||||
def get_private_key_download_url(self, instance: CertificateKeyPair) -> str:
|
||||
"""Get URL to download private key"""
|
||||
return (
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-view-private-key",
|
||||
kwargs={"pk": instance.pk},
|
||||
)
|
||||
+ "?download"
|
||||
)
|
||||
|
||||
def validate_certificate_data(self, value: str) -> str:
|
||||
"""Verify that input is a valid PEM x509 Certificate"""
|
||||
try:
|
||||
@ -71,12 +97,15 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
"fingerprint",
|
||||
"fingerprint_sha256",
|
||||
"fingerprint_sha1",
|
||||
"certificate_data",
|
||||
"key_data",
|
||||
"cert_expiry",
|
||||
"cert_subject",
|
||||
"private_key_available",
|
||||
"certificate_download_url",
|
||||
"private_key_download_url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"key_data": {"write_only": True},
|
||||
@ -100,10 +129,10 @@ class CertificateGenerationSerializer(PassiveSerializer):
|
||||
validity_days = IntegerField(initial=365)
|
||||
|
||||
|
||||
class CertificateKeyPairFilter(django_filters.FilterSet):
|
||||
class CertificateKeyPairFilter(FilterSet):
|
||||
"""Filter for certificates"""
|
||||
|
||||
has_key = django_filters.BooleanFilter(
|
||||
has_key = BooleanFilter(
|
||||
label="Only return certificate-key pairs with keys", method="filter_has_key"
|
||||
)
|
||||
|
||||
@ -117,7 +146,7 @@ class CertificateKeyPairFilter(django_filters.FilterSet):
|
||||
fields = ["name"]
|
||||
|
||||
|
||||
class CertificateKeyPairViewSet(ModelViewSet):
|
||||
class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
||||
"""CertificateKeyPair Viewset"""
|
||||
|
||||
queryset = CertificateKeyPair.objects.all()
|
||||
|
@ -16,11 +16,6 @@ from authentik.crypto.models import CertificateKeyPair
|
||||
class CertificateBuilder:
|
||||
"""Build self-signed certificates"""
|
||||
|
||||
__public_key = None
|
||||
__private_key = None
|
||||
__builder = None
|
||||
__certificate = None
|
||||
|
||||
common_name: str
|
||||
|
||||
def __init__(self):
|
||||
|
@ -55,20 +55,32 @@ class CertificateKeyPair(CreatedUpdatedModel):
|
||||
def private_key(self) -> Optional[RSAPrivateKey]:
|
||||
"""Get python cryptography PrivateKey instance"""
|
||||
if not self._private_key and self._private_key != "":
|
||||
self._private_key = load_pem_private_key(
|
||||
str.encode("\n".join([x.strip() for x in self.key_data.split("\n")])),
|
||||
password=None,
|
||||
backend=default_backend(),
|
||||
)
|
||||
try:
|
||||
self._private_key = load_pem_private_key(
|
||||
str.encode(
|
||||
"\n".join([x.strip() for x in self.key_data.split("\n")])
|
||||
),
|
||||
password=None,
|
||||
backend=default_backend(),
|
||||
)
|
||||
except ValueError:
|
||||
return None
|
||||
return self._private_key
|
||||
|
||||
@property
|
||||
def fingerprint(self) -> str:
|
||||
def fingerprint_sha256(self) -> str:
|
||||
"""Get SHA256 Fingerprint of certificate_data"""
|
||||
return hexlify(self.certificate.fingerprint(hashes.SHA256()), ":").decode(
|
||||
"utf-8"
|
||||
)
|
||||
|
||||
@property
|
||||
def fingerprint_sha1(self) -> str:
|
||||
"""Get SHA1 Fingerprint of certificate_data"""
|
||||
return hexlify(
|
||||
self.certificate.fingerprint(hashes.SHA1()), ":" # nosec
|
||||
).decode("utf-8")
|
||||
|
||||
@property
|
||||
def kid(self):
|
||||
"""Get Key ID used for JWKS"""
|
||||
|
@ -4,10 +4,14 @@ import datetime
|
||||
from django.test import TestCase
|
||||
from django.urls import reverse
|
||||
|
||||
from authentik.core.api.used_by import DeleteAction
|
||||
from authentik.core.models import User
|
||||
from authentik.crypto.api import CertificateKeyPairSerializer
|
||||
from authentik.crypto.builder import CertificateBuilder
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.providers.oauth2.generators import generate_client_secret
|
||||
from authentik.providers.oauth2.models import OAuth2Provider
|
||||
|
||||
|
||||
class TestCrypto(TestCase):
|
||||
@ -91,3 +95,35 @@ class TestCrypto(TestCase):
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertIn("Content-Disposition", response)
|
||||
|
||||
def test_used_by(self):
|
||||
"""Test used_by endpoint"""
|
||||
self.client.force_login(User.objects.get(username="akadmin"))
|
||||
keypair = CertificateKeyPair.objects.first()
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name="test",
|
||||
client_id="test",
|
||||
client_secret=generate_client_secret(),
|
||||
authorization_flow=Flow.objects.first(),
|
||||
redirect_uris="http://localhost",
|
||||
rsa_key=CertificateKeyPair.objects.first(),
|
||||
)
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-used-by",
|
||||
kwargs={"pk": keypair.pk},
|
||||
)
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
[
|
||||
{
|
||||
"app": "authentik_providers_oauth2",
|
||||
"model_name": "oauth2provider",
|
||||
"pk": str(provider.pk),
|
||||
"name": str(provider),
|
||||
"action": DeleteAction.SET_NULL.name,
|
||||
}
|
||||
],
|
||||
)
|
||||
|
@ -6,11 +6,11 @@ from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, DictField, IntegerField
|
||||
from rest_framework.fields import DictField, IntegerField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer, TypeCreateSerializer
|
||||
from authentik.events.models import Event, EventAction
|
||||
@ -19,11 +19,6 @@ from authentik.events.models import Event, EventAction
|
||||
class EventSerializer(ModelSerializer):
|
||||
"""Event Serializer"""
|
||||
|
||||
# Since we only use this serializer for read-only operations,
|
||||
# no checking of the action is done here.
|
||||
# This allows clients to check wildcards, prefixes and custom types
|
||||
action = CharField()
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Event
|
||||
@ -36,6 +31,7 @@ class EventSerializer(ModelSerializer):
|
||||
"client_ip",
|
||||
"created",
|
||||
"expires",
|
||||
"tenant",
|
||||
]
|
||||
|
||||
|
||||
@ -76,6 +72,11 @@ class EventsFilter(django_filters.FilterSet):
|
||||
field_name="action",
|
||||
lookup_expr="icontains",
|
||||
)
|
||||
tenant_name = django_filters.CharFilter(
|
||||
field_name="tenant",
|
||||
lookup_expr="name",
|
||||
label="Tenant name",
|
||||
)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def filter_context_model_pk(self, queryset, name, value):
|
||||
@ -90,7 +91,7 @@ class EventsFilter(django_filters.FilterSet):
|
||||
fields = ["action", "client_ip", "username"]
|
||||
|
||||
|
||||
class EventViewSet(ReadOnlyModelViewSet):
|
||||
class EventViewSet(ModelViewSet):
|
||||
"""Event Read-Only Viewset"""
|
||||
|
||||
queryset = Event.objects.all()
|
||||
|
@ -7,6 +7,7 @@ from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.api.authorization import OwnerFilter, OwnerPermissions
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.events.api.event import EventSerializer
|
||||
from authentik.events.models import Notification
|
||||
|
||||
@ -35,6 +36,7 @@ class NotificationViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.UpdateModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
UsedByMixin,
|
||||
mixins.ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
|
@ -3,6 +3,7 @@ from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.groups import GroupSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.events.models import NotificationRule
|
||||
|
||||
|
||||
@ -24,7 +25,7 @@ class NotificationRuleSerializer(ModelSerializer):
|
||||
]
|
||||
|
||||
|
||||
class NotificationRuleViewSet(ModelViewSet):
|
||||
class NotificationRuleViewSet(UsedByMixin, ModelViewSet):
|
||||
"""NotificationRule Viewset"""
|
||||
|
||||
queryset = NotificationRule.objects.all()
|
||||
|
@ -9,6 +9,7 @@ from rest_framework.serializers import ModelSerializer, Serializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.events.models import (
|
||||
Notification,
|
||||
NotificationSeverity,
|
||||
@ -45,14 +46,14 @@ class NotificationTransportTestSerializer(Serializer):
|
||||
|
||||
messages = ListField(child=CharField())
|
||||
|
||||
def create(self, request: Request) -> Response:
|
||||
def create(self, validated_data: Request) -> Response:
|
||||
raise NotImplementedError
|
||||
|
||||
def update(self, request: Request) -> Response:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class NotificationTransportViewSet(ModelViewSet):
|
||||
class NotificationTransportViewSet(UsedByMixin, ModelViewSet):
|
||||
"""NotificationTransport Viewset"""
|
||||
|
||||
queryset = NotificationTransport.objects.all()
|
||||
|
@ -27,10 +27,9 @@ class GeoIPDict(TypedDict):
|
||||
class GeoIPReader:
|
||||
"""Slim wrapper around GeoIP API"""
|
||||
|
||||
__reader: Optional[Reader] = None
|
||||
__last_mtime: float = 0.0
|
||||
|
||||
def __init__(self):
|
||||
self.__reader: Optional[Reader] = None
|
||||
self.__last_mtime: float = 0.0
|
||||
self.__open()
|
||||
|
||||
def __open(self):
|
||||
@ -40,9 +39,9 @@ class GeoIPReader:
|
||||
return
|
||||
try:
|
||||
reader = Reader(path)
|
||||
LOGGER.info("Loaded GeoIP database")
|
||||
self.__reader = reader
|
||||
self.__last_mtime = stat(path).st_mtime
|
||||
LOGGER.info("Loaded GeoIP database", last_write=self.__last_mtime)
|
||||
except OSError as exc:
|
||||
LOGGER.warning("Failed to load GeoIP database", exc=exc)
|
||||
|
||||
|
@ -2,6 +2,8 @@
|
||||
from functools import partial
|
||||
from typing import Callable
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_save, pre_delete
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
@ -12,6 +14,8 @@ from authentik.core.models import User
|
||||
from authentik.events.models import Event, EventAction, Notification
|
||||
from authentik.events.signals import EventNewThread
|
||||
from authentik.events.utils import model_to_dict
|
||||
from authentik.lib.sentry import before_send
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
|
||||
|
||||
class AuditMiddleware:
|
||||
@ -54,10 +58,28 @@ class AuditMiddleware:
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def process_exception(self, request: HttpRequest, exception: Exception):
|
||||
"""Unregister handlers in case of exception"""
|
||||
"""Disconnect handlers in case of exception"""
|
||||
post_save.disconnect(dispatch_uid=LOCAL.authentik["request_id"])
|
||||
pre_delete.disconnect(dispatch_uid=LOCAL.authentik["request_id"])
|
||||
|
||||
if settings.DEBUG:
|
||||
return
|
||||
# Special case for SuspiciousOperation, we have a special event action for that
|
||||
if isinstance(exception, SuspiciousOperation):
|
||||
thread = EventNewThread(
|
||||
EventAction.SUSPICIOUS_REQUEST,
|
||||
request,
|
||||
message=str(exception),
|
||||
)
|
||||
thread.run()
|
||||
elif before_send({}, {"exc_info": (None, exception, None)}) is not None:
|
||||
thread = EventNewThread(
|
||||
EventAction.SYSTEM_EXCEPTION,
|
||||
request,
|
||||
message=exception_to_string(exception),
|
||||
)
|
||||
thread.run()
|
||||
|
||||
@staticmethod
|
||||
# pylint: disable=unused-argument
|
||||
def post_save_handler(
|
||||
|
55
authentik/events/migrations/0016_add_tenant.py
Normal file
55
authentik/events/migrations/0016_add_tenant.py
Normal file
@ -0,0 +1,55 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-14 15:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import authentik.events.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_events", "0015_alter_event_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="event",
|
||||
name="tenant",
|
||||
field=models.JSONField(
|
||||
blank=True, default=authentik.events.models.default_tenant
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("secret_view", "Secret View"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("system_exception", "System Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("email_sent", "Email Sent"),
|
||||
("update_available", "Update Available"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
]
|
47
authentik/events/migrations/0017_alter_event_action.py
Normal file
47
authentik/events/migrations/0017_alter_event_action.py
Normal file
@ -0,0 +1,47 @@
|
||||
# Generated by Django 3.2.5 on 2021-07-14 19:15
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_events", "0016_add_tenant"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("secret_view", "Secret View"),
|
||||
("secret_rotate", "Secret Rotate"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("system_exception", "System Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("email_sent", "Email Sent"),
|
||||
("update_available", "Update Available"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
]
|
@ -21,11 +21,12 @@ from authentik.core.middleware import (
|
||||
)
|
||||
from authentik.core.models import ExpiringModel, Group, User
|
||||
from authentik.events.geo import GEOIP_READER
|
||||
from authentik.events.utils import cleanse_dict, get_user, sanitize_dict
|
||||
from authentik.events.utils import cleanse_dict, get_user, model_to_dict, sanitize_dict
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import get_client_ip
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
from authentik.tenants.utils import DEFAULT_TENANT
|
||||
|
||||
LOGGER = get_logger("authentik.events")
|
||||
GAUGE_EVENTS = Gauge(
|
||||
@ -40,6 +41,11 @@ def default_event_duration():
|
||||
return now() + timedelta(days=365)
|
||||
|
||||
|
||||
def default_tenant():
|
||||
"""Get a default value for tenant"""
|
||||
return sanitize_dict(model_to_dict(DEFAULT_TENANT))
|
||||
|
||||
|
||||
class NotificationTransportError(SentryIgnoredException):
|
||||
"""Error raised when a notification fails to be delivered"""
|
||||
|
||||
@ -56,6 +62,7 @@ class EventAction(models.TextChoices):
|
||||
PASSWORD_SET = "password_set" # noqa # nosec
|
||||
|
||||
SECRET_VIEW = "secret_view" # noqa # nosec
|
||||
SECRET_ROTATE = "secret_rotate" # noqa # nosec
|
||||
|
||||
INVITE_USED = "invitation_used"
|
||||
|
||||
@ -71,6 +78,7 @@ class EventAction(models.TextChoices):
|
||||
|
||||
SYSTEM_TASK_EXECUTION = "system_task_execution"
|
||||
SYSTEM_TASK_EXCEPTION = "system_task_exception"
|
||||
SYSTEM_EXCEPTION = "system_exception"
|
||||
|
||||
CONFIGURATION_ERROR = "configuration_error"
|
||||
|
||||
@ -94,6 +102,7 @@ class Event(ExpiringModel):
|
||||
context = models.JSONField(default=dict, blank=True)
|
||||
client_ip = models.GenericIPAddressField(null=True)
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
tenant = models.JSONField(default=default_tenant, blank=True)
|
||||
|
||||
# Shadow the expires attribute from ExpiringModel to override the default duration
|
||||
expires = models.DateTimeField(default=default_event_duration)
|
||||
@ -132,6 +141,13 @@ class Event(ExpiringModel):
|
||||
"""Add data from a Django-HttpRequest, allowing the creation of
|
||||
Events independently from requests.
|
||||
`user` arguments optionally overrides user from requests."""
|
||||
if request:
|
||||
self.context["http_request"] = {
|
||||
"path": request.get_full_path(),
|
||||
"method": request.method,
|
||||
}
|
||||
if hasattr(request, "tenant"):
|
||||
self.tenant = sanitize_dict(model_to_dict(request.tenant))
|
||||
if hasattr(request, "user"):
|
||||
original_user = None
|
||||
if hasattr(request, "session"):
|
||||
@ -298,7 +314,8 @@ class NotificationTransport(models.Model):
|
||||
response = post(self.webhook_url, json=body)
|
||||
response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
raise NotificationTransportError(exc.response.text) from exc
|
||||
text = exc.response.text if exc.response else str(exc)
|
||||
raise NotificationTransportError(text) from exc
|
||||
return [
|
||||
response.status_code,
|
||||
response.text,
|
||||
|
@ -105,7 +105,11 @@ def notification_transport(
|
||||
"""Send notification over specified transport"""
|
||||
self.save_on_success = False
|
||||
try:
|
||||
notification: Notification = Notification.objects.get(pk=notification_pk)
|
||||
notification: Notification = Notification.objects.filter(
|
||||
pk=notification_pk
|
||||
).first()
|
||||
if not notification:
|
||||
return
|
||||
transport: NotificationTransport = NotificationTransport.objects.get(
|
||||
pk=transport_pk
|
||||
)
|
||||
|
@ -2,6 +2,7 @@
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.flows.api.stages import StageSerializer
|
||||
from authentik.flows.models import FlowStageBinding
|
||||
|
||||
@ -24,10 +25,11 @@ class FlowStageBindingSerializer(ModelSerializer):
|
||||
"re_evaluate_policies",
|
||||
"order",
|
||||
"policy_engine_mode",
|
||||
"invalid_response_action",
|
||||
]
|
||||
|
||||
|
||||
class FlowStageBindingViewSet(ModelViewSet):
|
||||
class FlowStageBindingViewSet(UsedByMixin, ModelViewSet):
|
||||
"""FlowStageBinding Viewset"""
|
||||
|
||||
queryset = FlowStageBinding.objects.all()
|
||||
|
@ -24,6 +24,7 @@ from rest_framework.viewsets import ModelViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import CacheSerializer, LinkSerializer
|
||||
from authentik.flows.exceptions import FlowNonApplicableException
|
||||
from authentik.flows.models import Flow
|
||||
@ -44,10 +45,16 @@ class FlowSerializer(ModelSerializer):
|
||||
|
||||
background = ReadOnlyField(source="background_url")
|
||||
|
||||
export_url = SerializerMethodField()
|
||||
|
||||
def get_cache_count(self, flow: Flow) -> int:
|
||||
"""Get count of cached flows"""
|
||||
return len(cache.keys(f"{cache_key(flow)}*"))
|
||||
|
||||
def get_export_url(self, flow: Flow) -> str:
|
||||
"""Get export URL for flow"""
|
||||
return reverse("authentik_api:flow-export", kwargs={"slug": flow.slug})
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Flow
|
||||
@ -64,6 +71,7 @@ class FlowSerializer(ModelSerializer):
|
||||
"cache_count",
|
||||
"policy_engine_mode",
|
||||
"compatibility_mode",
|
||||
"export_url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"background": {"read_only": True},
|
||||
@ -94,7 +102,7 @@ class DiagramElement:
|
||||
return f"{self.identifier}=>{self.type}: {self.rest}"
|
||||
|
||||
|
||||
class FlowViewSet(ModelViewSet):
|
||||
class FlowViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Flow Viewset"""
|
||||
|
||||
queryset = Flow.objects.all()
|
||||
@ -293,10 +301,14 @@ class FlowViewSet(ModelViewSet):
|
||||
"""Set Flow background"""
|
||||
flow: Flow = self.get_object()
|
||||
background = request.FILES.get("file", None)
|
||||
clear = request.data.get("clear", False)
|
||||
clear = request.data.get("clear", "false").lower() == "true"
|
||||
if clear:
|
||||
# .delete() saves the model by default
|
||||
flow.background.delete()
|
||||
if flow.background_url.startswith("/media"):
|
||||
# .delete() saves the model by default
|
||||
flow.background.delete()
|
||||
else:
|
||||
flow.background = None
|
||||
flow.save()
|
||||
return Response({})
|
||||
if background:
|
||||
flow.background = background
|
||||
|
@ -11,6 +11,7 @@ from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
||||
from authentik.core.types import UserSettingSerializer
|
||||
from authentik.flows.api.flows import FlowSerializer
|
||||
@ -49,6 +50,7 @@ class StageSerializer(ModelSerializer, MetaNameSerializer):
|
||||
class StageViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
UsedByMixin,
|
||||
mixins.ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
@ -91,10 +93,10 @@ class StageViewSet(
|
||||
if not user_settings:
|
||||
continue
|
||||
user_settings.initial_data["object_uid"] = str(stage.pk)
|
||||
if hasattr(stage, "configure_url"):
|
||||
if hasattr(stage, "configure_flow") and stage.configure_flow:
|
||||
user_settings.initial_data["configure_url"] = reverse(
|
||||
"authentik_flows:configure",
|
||||
kwargs={"stage_uuid": stage.uuid.hex},
|
||||
kwargs={"stage_uuid": stage.pk},
|
||||
)
|
||||
if not user_settings.is_valid():
|
||||
LOGGER.warning(user_settings.errors)
|
||||
|
@ -5,8 +5,7 @@ from typing import TYPE_CHECKING, Optional
|
||||
from django.http.request import HttpRequest
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.flows.models import Stage
|
||||
from authentik.flows.models import FlowStageBinding
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.models import PolicyBinding
|
||||
|
||||
@ -22,11 +21,14 @@ class StageMarker:
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def process(
|
||||
self, plan: "FlowPlan", stage: Stage, http_request: Optional[HttpRequest]
|
||||
) -> Optional[Stage]:
|
||||
self,
|
||||
plan: "FlowPlan",
|
||||
binding: FlowStageBinding,
|
||||
http_request: HttpRequest,
|
||||
) -> Optional[FlowStageBinding]:
|
||||
"""Process callback for this marker. This should be overridden by sub-classes.
|
||||
If a stage should be removed, return None."""
|
||||
return stage
|
||||
return binding
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -34,24 +36,34 @@ class ReevaluateMarker(StageMarker):
|
||||
"""Reevaluate Marker, forces stage's policies to be evaluated again."""
|
||||
|
||||
binding: PolicyBinding
|
||||
user: User
|
||||
|
||||
def process(
|
||||
self, plan: "FlowPlan", stage: Stage, http_request: Optional[HttpRequest]
|
||||
) -> Optional[Stage]:
|
||||
self,
|
||||
plan: "FlowPlan",
|
||||
binding: FlowStageBinding,
|
||||
http_request: HttpRequest,
|
||||
) -> Optional[FlowStageBinding]:
|
||||
"""Re-evaluate policies bound to stage, and if they fail, remove from plan"""
|
||||
engine = PolicyEngine(self.binding, self.user)
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||
|
||||
LOGGER.debug(
|
||||
"f(plan_inst)[re-eval marker]: running re-evaluation",
|
||||
binding=binding,
|
||||
policy_binding=self.binding,
|
||||
)
|
||||
engine = PolicyEngine(
|
||||
self.binding, plan.context.get(PLAN_CONTEXT_PENDING_USER, http_request.user)
|
||||
)
|
||||
engine.use_cache = False
|
||||
if http_request:
|
||||
engine.request.set_http_request(http_request)
|
||||
engine.request.set_http_request(http_request)
|
||||
engine.request.context = plan.context
|
||||
engine.build()
|
||||
result = engine.result
|
||||
if result.passing:
|
||||
return stage
|
||||
return binding
|
||||
LOGGER.warning(
|
||||
"f(plan_inst)[re-eval marker]: stage failed re-evaluation",
|
||||
stage=stage,
|
||||
"f(plan_inst)[re-eval marker]: binding failed re-evaluation",
|
||||
binding=binding,
|
||||
messages=result.messages,
|
||||
)
|
||||
return None
|
||||
|
@ -6,6 +6,7 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
from authentik.flows.models import FlowDesignation
|
||||
from authentik.stages.identification.models import UserFields
|
||||
from authentik.stages.password import BACKEND_DJANGO, BACKEND_LDAP
|
||||
|
||||
|
||||
def create_default_authentication_flow(
|
||||
@ -31,7 +32,7 @@ def create_default_authentication_flow(
|
||||
|
||||
password_stage, _ = PasswordStage.objects.using(db_alias).update_or_create(
|
||||
name="default-authentication-password",
|
||||
defaults={"backends": ["django.contrib.auth.backends.ModelBackend"]},
|
||||
defaults={"backends": [BACKEND_DJANGO, BACKEND_LDAP]},
|
||||
)
|
||||
|
||||
login_stage, _ = UserLoginStage.objects.using(db_alias).update_or_create(
|
||||
|
@ -15,9 +15,6 @@ PREFILL_POLICY_EXPRESSION = """# This policy sets the user for the currently run
|
||||
# by injecting "pending_user"
|
||||
akadmin = ak_user_by(username="akadmin")
|
||||
context["pending_user"] = akadmin
|
||||
# We're also setting the backend for the user, so we can
|
||||
# directly login without having to identify again
|
||||
context["user_backend"] = "django.contrib.auth.backends.ModelBackend"
|
||||
return True"""
|
||||
|
||||
|
||||
@ -138,7 +135,7 @@ class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_flows", "0017_auto_20210329_1334"),
|
||||
("authentik_stages_user_write", "__latest__"),
|
||||
("authentik_stages_user_write", "0002_auto_20200918_1653"),
|
||||
("authentik_stages_user_login", "__latest__"),
|
||||
("authentik_stages_password", "0002_passwordstage_change_flow"),
|
||||
("authentik_policies", "0001_initial"),
|
||||
|
@ -0,0 +1,22 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-27 16:20
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_flows", "0020_flow_compatibility_mode"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="flowstagebinding",
|
||||
name="invalid_response_action",
|
||||
field=models.TextField(
|
||||
choices=[("retry", "Retry"), ("continue", "Continue")],
|
||||
default="retry",
|
||||
help_text="Configure how the flow executor should handle an invalid response to a challenge. RETRY returns the error message and a similar challenge to the executor while CONTINUE continues with the next stage.",
|
||||
),
|
||||
),
|
||||
]
|
@ -0,0 +1,26 @@
|
||||
# Generated by Django 3.2.4 on 2021-07-03 13:13
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_flows", "0021_flowstagebinding_invalid_response_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="flowstagebinding",
|
||||
name="invalid_response_action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("retry", "Retry"),
|
||||
("restart", "Restart"),
|
||||
("restart_with_context", "Restart With Context"),
|
||||
],
|
||||
default="retry",
|
||||
help_text="Configure how the flow executor should handle an invalid response to a challenge. RETRY returns the error message and a similar challenge to the executor. RESTART restarts the flow from the beginning, and RESTART_WITH_CONTEXT restarts the flow while keeping the current context.",
|
||||
),
|
||||
),
|
||||
]
|
24
authentik/flows/migrations/0023_alter_flow_background.py
Normal file
24
authentik/flows/migrations/0023_alter_flow_background.py
Normal file
@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.2.5 on 2021-07-09 17:27
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_flows", "0022_alter_flowstagebinding_invalid_response_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="flow",
|
||||
name="background",
|
||||
field=models.FileField(
|
||||
default=None,
|
||||
help_text="Background shown during execution",
|
||||
max_length=500,
|
||||
null=True,
|
||||
upload_to="flow-backgrounds/",
|
||||
),
|
||||
),
|
||||
]
|
@ -27,6 +27,14 @@ class NotConfiguredAction(models.TextChoices):
|
||||
CONFIGURE = "configure"
|
||||
|
||||
|
||||
class InvalidResponseAction(models.TextChoices):
|
||||
"""Configure how the flow executor should handle invalid responses to challenges"""
|
||||
|
||||
RETRY = "retry"
|
||||
RESTART = "restart"
|
||||
RESTART_WITH_CONTEXT = "restart_with_context"
|
||||
|
||||
|
||||
class FlowDesignation(models.TextChoices):
|
||||
"""Designation of what a Flow should be used for. At a later point, this
|
||||
should be replaced by a database entry."""
|
||||
@ -72,7 +80,7 @@ class Stage(SerializerModel):
|
||||
def __str__(self):
|
||||
if hasattr(self, "__in_memory_type"):
|
||||
return f"In-memory Stage {getattr(self, '__in_memory_type')}"
|
||||
return self.name
|
||||
return f"Stage {self.name}"
|
||||
|
||||
|
||||
def in_memory_stage(view: Type["StageView"]) -> Stage:
|
||||
@ -113,6 +121,7 @@ class Flow(SerializerModel, PolicyBindingModel):
|
||||
default=None,
|
||||
null=True,
|
||||
help_text=_("Background shown during execution"),
|
||||
max_length=500,
|
||||
)
|
||||
|
||||
compatibility_mode = models.BooleanField(
|
||||
@ -201,6 +210,17 @@ class FlowStageBinding(SerializerModel, PolicyBindingModel):
|
||||
help_text=_("Evaluate policies when the Stage is present to the user."),
|
||||
)
|
||||
|
||||
invalid_response_action = models.TextField(
|
||||
choices=InvalidResponseAction.choices,
|
||||
default=InvalidResponseAction.RETRY,
|
||||
help_text=_(
|
||||
"Configure how the flow executor should handle an invalid response to a "
|
||||
"challenge. RETRY returns the error message and a similar challenge to the "
|
||||
"executor. RESTART restarts the flow from the beginning, and RESTART_WITH_CONTEXT "
|
||||
"restarts the flow while keeping the current context."
|
||||
),
|
||||
)
|
||||
|
||||
order = models.IntegerField()
|
||||
|
||||
objects = InheritanceManager()
|
||||
@ -212,7 +232,7 @@ class FlowStageBinding(SerializerModel, PolicyBindingModel):
|
||||
return FlowStageBindingSerializer
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.target} #{self.order}"
|
||||
return f"Flow-stage binding #{self.order} to {self.target}"
|
||||
|
||||
class Meta:
|
||||
|
||||
|
@ -14,6 +14,7 @@ from authentik.events.models import cleanse_dict
|
||||
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
||||
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
||||
from authentik.flows.models import Flow, FlowStageBinding, Stage
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.root.monitoring import UpdatingGauge
|
||||
|
||||
@ -33,6 +34,7 @@ HIST_FLOWS_PLAN_TIME = Histogram(
|
||||
"Duration to build a plan for a flow",
|
||||
["flow_slug"],
|
||||
)
|
||||
CACHE_TIMEOUT = int(CONFIG.y("redis.cache_timeout_flows"))
|
||||
|
||||
|
||||
def cache_key(flow: Flow, user: Optional[User] = None) -> str:
|
||||
@ -50,33 +52,41 @@ class FlowPlan:
|
||||
|
||||
flow_pk: str
|
||||
|
||||
stages: list[Stage] = field(default_factory=list)
|
||||
bindings: list[FlowStageBinding] = field(default_factory=list)
|
||||
context: dict[str, Any] = field(default_factory=dict)
|
||||
markers: list[StageMarker] = field(default_factory=list)
|
||||
|
||||
def append(self, stage: Stage, marker: Optional[StageMarker] = None):
|
||||
def append_stage(self, stage: Stage, marker: Optional[StageMarker] = None):
|
||||
"""Append `stage` to all stages, optionall with stage marker"""
|
||||
self.stages.append(stage)
|
||||
return self.append(FlowStageBinding(stage=stage), marker)
|
||||
|
||||
def append(self, binding: FlowStageBinding, marker: Optional[StageMarker] = None):
|
||||
"""Append `stage` to all stages, optionall with stage marker"""
|
||||
self.bindings.append(binding)
|
||||
self.markers.append(marker or StageMarker())
|
||||
|
||||
def insert(self, stage: Stage, marker: Optional[StageMarker] = None):
|
||||
def insert_stage(self, stage: Stage, marker: Optional[StageMarker] = None):
|
||||
"""Insert stage into plan, as immediate next stage"""
|
||||
self.stages.insert(1, stage)
|
||||
self.bindings.insert(1, FlowStageBinding(stage=stage, order=0))
|
||||
self.markers.insert(1, marker or StageMarker())
|
||||
|
||||
def next(self, http_request: Optional[HttpRequest]) -> Optional[Stage]:
|
||||
def next(self, http_request: Optional[HttpRequest]) -> Optional[FlowStageBinding]:
|
||||
"""Return next pending stage from the bottom of the list"""
|
||||
if not self.has_stages:
|
||||
return None
|
||||
stage = self.stages[0]
|
||||
binding = self.bindings[0]
|
||||
marker = self.markers[0]
|
||||
|
||||
if marker.__class__ is not StageMarker:
|
||||
LOGGER.debug("f(plan_inst): stage has marker", stage=stage, marker=marker)
|
||||
marked_stage = marker.process(self, stage, http_request)
|
||||
LOGGER.debug(
|
||||
"f(plan_inst): stage has marker", binding=binding, marker=marker
|
||||
)
|
||||
marked_stage = marker.process(self, binding, http_request)
|
||||
if not marked_stage:
|
||||
LOGGER.debug("f(plan_inst): marker returned none, next stage", stage=stage)
|
||||
self.stages.remove(stage)
|
||||
LOGGER.debug(
|
||||
"f(plan_inst): marker returned none, next stage", binding=binding
|
||||
)
|
||||
self.bindings.remove(binding)
|
||||
self.markers.remove(marker)
|
||||
if not self.has_stages:
|
||||
return None
|
||||
@ -87,12 +97,12 @@ class FlowPlan:
|
||||
def pop(self):
|
||||
"""Pop next pending stage from bottom of list"""
|
||||
self.markers.pop(0)
|
||||
self.stages.pop(0)
|
||||
self.bindings.pop(0)
|
||||
|
||||
@property
|
||||
def has_stages(self) -> bool:
|
||||
"""Check if there are any stages left in this plan"""
|
||||
return len(self.markers) + len(self.stages) > 0
|
||||
return len(self.markers) + len(self.bindings) > 0
|
||||
|
||||
|
||||
class FlowPlanner:
|
||||
@ -157,9 +167,9 @@ class FlowPlanner:
|
||||
"f(plan): building plan",
|
||||
)
|
||||
plan = self._build_plan(user, request, default_context)
|
||||
cache.set(cache_key(self.flow, user), plan)
|
||||
cache.set(cache_key(self.flow, user), plan, CACHE_TIMEOUT)
|
||||
GAUGE_FLOWS_CACHED.update()
|
||||
if not plan.stages and not self.allow_empty_flows:
|
||||
if not plan.bindings and not self.allow_empty_flows:
|
||||
raise EmptyFlowException()
|
||||
return plan
|
||||
|
||||
@ -214,9 +224,9 @@ class FlowPlanner:
|
||||
"f(plan): stage has re-evaluate marker",
|
||||
stage=binding.stage,
|
||||
)
|
||||
marker = ReevaluateMarker(binding=binding, user=user)
|
||||
marker = ReevaluateMarker(binding=binding)
|
||||
if stage:
|
||||
plan.append(stage, marker)
|
||||
plan.append(binding, marker)
|
||||
HIST_FLOWS_PLAN_TIME.labels(flow_slug=self.flow.slug)
|
||||
self._logger.debug(
|
||||
"f(plan): finished building",
|
||||
|
@ -16,29 +16,14 @@ from authentik.flows.challenge import (
|
||||
HttpChallengeResponse,
|
||||
WithUserInfoChallenge,
|
||||
)
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||
from authentik.flows.models import InvalidResponseAction
|
||||
from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, PLAN_CONTEXT_PENDING_USER
|
||||
from authentik.flows.views import FlowExecutorView
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
|
||||
PLAN_CONTEXT_PENDING_USER_IDENTIFIER = "pending_user_identifier"
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class InvalidChallengeError(SentryIgnoredException):
|
||||
"""Error raised when a challenge from a stage is not valid"""
|
||||
|
||||
def __init__(self, errors, stage_view: View, challenge: Challenge) -> None:
|
||||
super().__init__()
|
||||
self.errors = errors
|
||||
self.stage_view = stage_view
|
||||
self.challenge = challenge
|
||||
|
||||
def __str__(self) -> str:
|
||||
return (
|
||||
f"Invalid challenge from {self.stage_view}: {self.errors}\n{self.challenge}"
|
||||
)
|
||||
|
||||
|
||||
class StageView(View):
|
||||
"""Abstract Stage, inherits TemplateView but can be combined with FormView"""
|
||||
|
||||
@ -50,14 +35,17 @@ class StageView(View):
|
||||
self.executor = executor
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def get_pending_user(self) -> User:
|
||||
def get_pending_user(self, for_display=False) -> User:
|
||||
"""Either show the matched User object or show what the user entered,
|
||||
based on what the earlier stage (mostly IdentificationStage) set.
|
||||
_USER_IDENTIFIER overrides the first User, as PENDING_USER is used for
|
||||
other things besides the form display.
|
||||
|
||||
If no user is pending, returns request.user"""
|
||||
if PLAN_CONTEXT_PENDING_USER_IDENTIFIER in self.executor.plan.context:
|
||||
if (
|
||||
PLAN_CONTEXT_PENDING_USER_IDENTIFIER in self.executor.plan.context
|
||||
and for_display
|
||||
):
|
||||
return User(
|
||||
username=self.executor.plan.context.get(
|
||||
PLAN_CONTEXT_PENDING_USER_IDENTIFIER
|
||||
@ -82,7 +70,13 @@ class ChallengeStageView(StageView):
|
||||
"""Return a challenge for the frontend to solve"""
|
||||
challenge = self._get_challenge(*args, **kwargs)
|
||||
if not challenge.is_valid():
|
||||
LOGGER.warning(challenge.errors, stage_view=self, challenge=challenge)
|
||||
LOGGER.warning(
|
||||
"f(ch): Invalid challenge",
|
||||
binding=self.executor.current_binding,
|
||||
errors=challenge.errors,
|
||||
stage_view=self,
|
||||
challenge=challenge,
|
||||
)
|
||||
return HttpChallengeResponse(challenge)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@ -90,15 +84,36 @@ class ChallengeStageView(StageView):
|
||||
"""Handle challenge response"""
|
||||
challenge: ChallengeResponse = self.get_response_instance(data=request.data)
|
||||
if not challenge.is_valid():
|
||||
if self.executor.current_binding.invalid_response_action in [
|
||||
InvalidResponseAction.RESTART,
|
||||
InvalidResponseAction.RESTART_WITH_CONTEXT,
|
||||
]:
|
||||
keep_context = (
|
||||
self.executor.current_binding.invalid_response_action
|
||||
== InvalidResponseAction.RESTART_WITH_CONTEXT
|
||||
)
|
||||
LOGGER.debug(
|
||||
"f(ch): Invalid response, restarting flow",
|
||||
binding=self.executor.current_binding,
|
||||
stage_view=self,
|
||||
keep_context=keep_context,
|
||||
)
|
||||
return self.executor.restart_flow(keep_context)
|
||||
return self.challenge_invalid(challenge)
|
||||
return self.challenge_valid(challenge)
|
||||
|
||||
def format_title(self) -> str:
|
||||
"""Allow usage of placeholder in flow title."""
|
||||
return self.executor.flow.title % {
|
||||
"app": self.executor.plan.context.get(PLAN_CONTEXT_APPLICATION, "")
|
||||
}
|
||||
|
||||
def _get_challenge(self, *args, **kwargs) -> Challenge:
|
||||
challenge = self.get_challenge(*args, **kwargs)
|
||||
if "flow_info" not in challenge.initial_data:
|
||||
flow_info = ContextualFlowInfo(
|
||||
data={
|
||||
"title": self.executor.flow.title,
|
||||
"title": self.format_title(),
|
||||
"background": self.executor.flow.background_url,
|
||||
"cancel_url": reverse("authentik_flows:cancel"),
|
||||
}
|
||||
@ -109,7 +124,7 @@ class ChallengeStageView(StageView):
|
||||
# If there's a pending user, update the `username` field
|
||||
# this field is only used by password managers.
|
||||
# If there's no user set, an error is raised later.
|
||||
if user := self.get_pending_user():
|
||||
if user := self.get_pending_user(for_display=True):
|
||||
challenge.initial_data["pending_user"] = user.username
|
||||
challenge.initial_data["pending_user_avatar"] = DEFAULT_AVATAR
|
||||
if not isinstance(user, AnonymousUser):
|
||||
@ -139,5 +154,10 @@ class ChallengeStageView(StageView):
|
||||
)
|
||||
challenge_response.initial_data["response_errors"] = full_errors
|
||||
if not challenge_response.is_valid():
|
||||
LOGGER.warning(challenge_response.errors)
|
||||
LOGGER.warning(
|
||||
"f(ch): invalid challenge response",
|
||||
binding=self.executor.current_binding,
|
||||
errors=challenge_response.errors,
|
||||
stage_view=self,
|
||||
)
|
||||
return HttpChallengeResponse(challenge_response)
|
||||
|
@ -182,8 +182,8 @@ class TestFlowPlanner(TestCase):
|
||||
planner = FlowPlanner(flow)
|
||||
plan = planner.plan(request)
|
||||
|
||||
self.assertEqual(plan.stages[0], binding.stage)
|
||||
self.assertEqual(plan.stages[1], binding2.stage)
|
||||
self.assertEqual(plan.bindings[0], binding)
|
||||
self.assertEqual(plan.bindings[1], binding2)
|
||||
|
||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
||||
|
@ -11,15 +11,23 @@ from authentik.core.models import User
|
||||
from authentik.flows.challenge import ChallengeTypes
|
||||
from authentik.flows.exceptions import FlowNonApplicableException
|
||||
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||
from authentik.flows.models import (
|
||||
Flow,
|
||||
FlowDesignation,
|
||||
FlowStageBinding,
|
||||
InvalidResponseAction,
|
||||
)
|
||||
from authentik.flows.planner import FlowPlan, FlowPlanner
|
||||
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView
|
||||
from authentik.flows.views import NEXT_ARG_NAME, SESSION_KEY_PLAN, FlowExecutorView
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.policies.dummy.models import DummyPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.policies.reputation.models import ReputationPolicy
|
||||
from authentik.policies.types import PolicyResult
|
||||
from authentik.stages.deny.models import DenyStage
|
||||
from authentik.stages.dummy.models import DummyStage
|
||||
from authentik.stages.identification.models import IdentificationStage, UserFields
|
||||
|
||||
POLICY_RETURN_FALSE = PropertyMock(return_value=PolicyResult(False))
|
||||
POLICY_RETURN_TRUE = MagicMock(return_value=PolicyResult(True))
|
||||
@ -52,8 +60,9 @@ class TestFlowExecutor(TestCase):
|
||||
designation=FlowDesignation.AUTHENTICATION,
|
||||
)
|
||||
stage = DummyStage.objects.create(name="dummy")
|
||||
binding = FlowStageBinding(target=flow, stage=stage, order=0)
|
||||
plan = FlowPlan(
|
||||
flow_pk=flow.pk.hex + "a", stages=[stage], markers=[StageMarker()]
|
||||
flow_pk=flow.pk.hex + "a", bindings=[binding], markers=[StageMarker()]
|
||||
)
|
||||
session = self.client.session
|
||||
session[SESSION_KEY_PLAN] = plan
|
||||
@ -163,7 +172,7 @@ class TestFlowExecutor(TestCase):
|
||||
# Check that two stages are in plan
|
||||
session = self.client.session
|
||||
plan: FlowPlan = session[SESSION_KEY_PLAN]
|
||||
self.assertEqual(len(plan.stages), 2)
|
||||
self.assertEqual(len(plan.bindings), 2)
|
||||
# Second request, submit form, one stage left
|
||||
response = self.client.post(exec_url)
|
||||
# Second request redirects to the same URL
|
||||
@ -172,7 +181,7 @@ class TestFlowExecutor(TestCase):
|
||||
# Check that two stages are in plan
|
||||
session = self.client.session
|
||||
plan: FlowPlan = session[SESSION_KEY_PLAN]
|
||||
self.assertEqual(len(plan.stages), 1)
|
||||
self.assertEqual(len(plan.bindings), 1)
|
||||
|
||||
@patch(
|
||||
"authentik.flows.views.to_stage_response",
|
||||
@ -213,8 +222,8 @@ class TestFlowExecutor(TestCase):
|
||||
|
||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||
|
||||
self.assertEqual(plan.stages[0], binding.stage)
|
||||
self.assertEqual(plan.stages[1], binding2.stage)
|
||||
self.assertEqual(plan.bindings[0], binding)
|
||||
self.assertEqual(plan.bindings[1], binding2)
|
||||
|
||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
||||
@ -267,9 +276,9 @@ class TestFlowExecutor(TestCase):
|
||||
self.assertEqual(response.status_code, 200)
|
||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||
|
||||
self.assertEqual(plan.stages[0], binding.stage)
|
||||
self.assertEqual(plan.stages[1], binding2.stage)
|
||||
self.assertEqual(plan.stages[2], binding3.stage)
|
||||
self.assertEqual(plan.bindings[0], binding)
|
||||
self.assertEqual(plan.bindings[1], binding2)
|
||||
self.assertEqual(plan.bindings[2], binding3)
|
||||
|
||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
||||
@ -281,8 +290,8 @@ class TestFlowExecutor(TestCase):
|
||||
|
||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||
|
||||
self.assertEqual(plan.stages[0], binding2.stage)
|
||||
self.assertEqual(plan.stages[1], binding3.stage)
|
||||
self.assertEqual(plan.bindings[0], binding2)
|
||||
self.assertEqual(plan.bindings[1], binding3)
|
||||
|
||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||
self.assertIsInstance(plan.markers[1], StageMarker)
|
||||
@ -338,9 +347,9 @@ class TestFlowExecutor(TestCase):
|
||||
self.assertEqual(response.status_code, 200)
|
||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||
|
||||
self.assertEqual(plan.stages[0], binding.stage)
|
||||
self.assertEqual(plan.stages[1], binding2.stage)
|
||||
self.assertEqual(plan.stages[2], binding3.stage)
|
||||
self.assertEqual(plan.bindings[0], binding)
|
||||
self.assertEqual(plan.bindings[1], binding2)
|
||||
self.assertEqual(plan.bindings[2], binding3)
|
||||
|
||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
||||
@ -352,8 +361,8 @@ class TestFlowExecutor(TestCase):
|
||||
|
||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||
|
||||
self.assertEqual(plan.stages[0], binding2.stage)
|
||||
self.assertEqual(plan.stages[1], binding3.stage)
|
||||
self.assertEqual(plan.bindings[0], binding2)
|
||||
self.assertEqual(plan.bindings[1], binding3)
|
||||
|
||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||
self.assertIsInstance(plan.markers[1], StageMarker)
|
||||
@ -364,7 +373,7 @@ class TestFlowExecutor(TestCase):
|
||||
|
||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||
|
||||
self.assertEqual(plan.stages[0], binding3.stage)
|
||||
self.assertEqual(plan.bindings[0], binding3)
|
||||
|
||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||
|
||||
@ -438,10 +447,10 @@ class TestFlowExecutor(TestCase):
|
||||
|
||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||
|
||||
self.assertEqual(plan.stages[0], binding.stage)
|
||||
self.assertEqual(plan.stages[1], binding2.stage)
|
||||
self.assertEqual(plan.stages[2], binding3.stage)
|
||||
self.assertEqual(plan.stages[3], binding4.stage)
|
||||
self.assertEqual(plan.bindings[0], binding)
|
||||
self.assertEqual(plan.bindings[1], binding2)
|
||||
self.assertEqual(plan.bindings[2], binding3)
|
||||
self.assertEqual(plan.bindings[3], binding4)
|
||||
|
||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
||||
@ -511,4 +520,79 @@ class TestFlowExecutor(TestCase):
|
||||
executor.flow = flow
|
||||
|
||||
stage_view = StageView(executor)
|
||||
self.assertEqual(ident, stage_view.get_pending_user().username)
|
||||
self.assertEqual(ident, stage_view.get_pending_user(for_display=True).username)
|
||||
|
||||
def test_invalid_restart(self):
|
||||
"""Test flow that restarts on invalid entry"""
|
||||
flow = Flow.objects.create(
|
||||
name="restart-on-invalid",
|
||||
slug="restart-on-invalid",
|
||||
designation=FlowDesignation.AUTHENTICATION,
|
||||
)
|
||||
# Stage 0 is a deny stage that is added dynamically
|
||||
# when the reputation policy says so
|
||||
deny_stage = DenyStage.objects.create(name="deny")
|
||||
reputation_policy = ReputationPolicy.objects.create(
|
||||
name="reputation", threshold=-1, check_ip=False
|
||||
)
|
||||
deny_binding = FlowStageBinding.objects.create(
|
||||
target=flow,
|
||||
stage=deny_stage,
|
||||
order=0,
|
||||
evaluate_on_plan=False,
|
||||
re_evaluate_policies=True,
|
||||
)
|
||||
PolicyBinding.objects.create(
|
||||
policy=reputation_policy, target=deny_binding, order=0
|
||||
)
|
||||
|
||||
# Stage 1 is an identification stage
|
||||
ident_stage = IdentificationStage.objects.create(
|
||||
name="ident",
|
||||
user_fields=[UserFields.E_MAIL],
|
||||
)
|
||||
FlowStageBinding.objects.create(
|
||||
target=flow,
|
||||
stage=ident_stage,
|
||||
order=1,
|
||||
invalid_response_action=InvalidResponseAction.RESTART_WITH_CONTEXT,
|
||||
)
|
||||
exec_url = reverse(
|
||||
"authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}
|
||||
)
|
||||
# First request, run the planner
|
||||
response = self.client.get(exec_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
{
|
||||
"type": ChallengeTypes.NATIVE.value,
|
||||
"component": "ak-stage-identification",
|
||||
"flow_info": {
|
||||
"background": flow.background_url,
|
||||
"cancel_url": reverse("authentik_flows:cancel"),
|
||||
"title": "",
|
||||
},
|
||||
"password_fields": False,
|
||||
"primary_action": "Log in",
|
||||
"sources": [],
|
||||
"user_fields": [UserFields.E_MAIL],
|
||||
},
|
||||
)
|
||||
response = self.client.post(
|
||||
exec_url, {"uid_field": "invalid-string"}, follow=True
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
{
|
||||
"component": "ak-stage-access-denied",
|
||||
"error_message": None,
|
||||
"flow_info": {
|
||||
"background": flow.background_url,
|
||||
"cancel_url": reverse("authentik_flows:cancel"),
|
||||
"title": "",
|
||||
},
|
||||
"type": ChallengeTypes.NATIVE.value,
|
||||
},
|
||||
)
|
||||
|
@ -40,15 +40,11 @@ def transaction_rollback():
|
||||
class FlowImporter:
|
||||
"""Import Flow from json"""
|
||||
|
||||
__import: FlowBundle
|
||||
|
||||
__pk_map: dict[Any, Model]
|
||||
|
||||
logger: BoundLogger
|
||||
|
||||
def __init__(self, json_input: str):
|
||||
self.__pk_map: dict[Any, Model] = {}
|
||||
self.logger = get_logger()
|
||||
self.__pk_map = {}
|
||||
import_dict = loads(json_input)
|
||||
try:
|
||||
self.__import = from_dict(FlowBundle, import_dict)
|
||||
|
@ -4,6 +4,7 @@ from typing import Any, Optional
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.core.cache import cache
|
||||
from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect
|
||||
from django.http.request import QueryDict
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
@ -37,13 +38,20 @@ from authentik.flows.challenge import (
|
||||
WithUserInfoChallenge,
|
||||
)
|
||||
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
||||
from authentik.flows.models import ConfigurableStage, Flow, FlowDesignation, Stage
|
||||
from authentik.flows.models import (
|
||||
ConfigurableStage,
|
||||
Flow,
|
||||
FlowDesignation,
|
||||
FlowStageBinding,
|
||||
Stage,
|
||||
)
|
||||
from authentik.flows.planner import (
|
||||
PLAN_CONTEXT_PENDING_USER,
|
||||
PLAN_CONTEXT_REDIRECT,
|
||||
FlowPlan,
|
||||
FlowPlanner,
|
||||
)
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.reflection import all_subclasses, class_to_path
|
||||
from authentik.lib.utils.urls import is_url_absolute, redirect_with_qs
|
||||
from authentik.tenants.models import Tenant
|
||||
@ -93,6 +101,10 @@ def challenge_response_types():
|
||||
return Inner()
|
||||
|
||||
|
||||
class InvalidStageError(SentryIgnoredException):
|
||||
"""Error raised when a challenge from a stage is not valid"""
|
||||
|
||||
|
||||
@method_decorator(xframe_options_sameorigin, name="dispatch")
|
||||
class FlowExecutorView(APIView):
|
||||
"""Stage 1 Flow executor, passing requests to Stage Views"""
|
||||
@ -102,6 +114,7 @@ class FlowExecutorView(APIView):
|
||||
flow: Flow
|
||||
|
||||
plan: Optional[FlowPlan] = None
|
||||
current_binding: FlowStageBinding
|
||||
current_stage: Stage
|
||||
current_stage_view: View
|
||||
|
||||
@ -121,7 +134,7 @@ class FlowExecutorView(APIView):
|
||||
message = exc.__doc__ if exc.__doc__ else str(exc)
|
||||
return self.stage_invalid(error_message=message)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
# pylint: disable=unused-argument, too-many-return-statements
|
||||
def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse:
|
||||
# Early check if theres an active Plan for the current session
|
||||
if SESSION_KEY_PLAN in self.request.session:
|
||||
@ -154,27 +167,46 @@ class FlowExecutorView(APIView):
|
||||
request.session[SESSION_KEY_GET] = QueryDict(request.GET.get("query", ""))
|
||||
# We don't save the Plan after getting the next stage
|
||||
# as it hasn't been successfully passed yet
|
||||
next_stage = self.plan.next(self.request)
|
||||
if not next_stage:
|
||||
try:
|
||||
# This is the first time we actually access any attribute on the selected plan
|
||||
# if the cached plan is from an older version, it might have different attributes
|
||||
# in which case we just delete the plan and invalidate everything
|
||||
next_binding = self.plan.next(self.request)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
self._logger.warning(
|
||||
"f(exec): found incompatible flow plan, invalidating run", exc=exc
|
||||
)
|
||||
keys = cache.keys("flow_*")
|
||||
cache.delete_many(keys)
|
||||
return self.stage_invalid()
|
||||
if not next_binding:
|
||||
self._logger.debug("f(exec): no more stages, flow is done.")
|
||||
return self._flow_done()
|
||||
self.current_stage = next_stage
|
||||
self.current_binding = next_binding
|
||||
self.current_stage = next_binding.stage
|
||||
self._logger.debug(
|
||||
"f(exec): Current stage",
|
||||
current_stage=self.current_stage,
|
||||
flow_slug=self.flow.slug,
|
||||
)
|
||||
stage_cls = self.current_stage.type
|
||||
try:
|
||||
stage_cls = self.current_stage.type
|
||||
except NotImplementedError as exc:
|
||||
self._logger.debug("Error getting stage type", exc=exc)
|
||||
return self.stage_invalid()
|
||||
self.current_stage_view = stage_cls(self)
|
||||
self.current_stage_view.args = self.args
|
||||
self.current_stage_view.kwargs = self.kwargs
|
||||
self.current_stage_view.request = request
|
||||
return super().dispatch(request)
|
||||
try:
|
||||
return super().dispatch(request)
|
||||
except InvalidStageError as exc:
|
||||
return self.stage_invalid(str(exc))
|
||||
|
||||
@extend_schema(
|
||||
responses={
|
||||
200: PolymorphicProxySerializer(
|
||||
component_name="FlowChallengeRequest",
|
||||
component_name="ChallengeTypes",
|
||||
serializers=challenge_types(),
|
||||
resource_type_field_name="component",
|
||||
),
|
||||
@ -214,7 +246,7 @@ class FlowExecutorView(APIView):
|
||||
@extend_schema(
|
||||
responses={
|
||||
200: PolymorphicProxySerializer(
|
||||
component_name="FlowChallengeRequest",
|
||||
component_name="ChallengeTypes",
|
||||
serializers=challenge_types(),
|
||||
resource_type_field_name="component",
|
||||
),
|
||||
@ -256,8 +288,31 @@ class FlowExecutorView(APIView):
|
||||
planner = FlowPlanner(self.flow)
|
||||
plan = planner.plan(self.request)
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
try:
|
||||
# Call the has_stages getter to check that
|
||||
# there are no issues with the class we might've gotten
|
||||
# from the cache. If there are errors, just delete all cached flows
|
||||
_ = plan.has_stages
|
||||
except Exception: # pylint: disable=broad-except
|
||||
keys = cache.keys("flow_*")
|
||||
cache.delete_many(keys)
|
||||
return self._initiate_plan()
|
||||
return plan
|
||||
|
||||
def restart_flow(self, keep_context=False) -> HttpResponse:
|
||||
"""Restart the currently active flow, optionally keeping the current context"""
|
||||
planner = FlowPlanner(self.flow)
|
||||
default_context = None
|
||||
if keep_context:
|
||||
default_context = self.plan.context
|
||||
plan = planner.plan(self.request, default_context)
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
kwargs = self.kwargs
|
||||
kwargs.update({"flow_slug": self.flow.slug})
|
||||
return redirect_with_qs(
|
||||
"authentik_api:flow-executor", self.request.GET, **kwargs
|
||||
)
|
||||
|
||||
def _flow_done(self) -> HttpResponse:
|
||||
"""User Successfully passed all stages"""
|
||||
# Since this is wrapped by the ExecutorShell, the next argument is saved in the session
|
||||
@ -281,10 +336,10 @@ class FlowExecutorView(APIView):
|
||||
)
|
||||
self.plan.pop()
|
||||
self.request.session[SESSION_KEY_PLAN] = self.plan
|
||||
if self.plan.stages:
|
||||
if self.plan.bindings:
|
||||
self._logger.debug(
|
||||
"f(exec): Continuing with next stage",
|
||||
remaining=len(self.plan.stages),
|
||||
remaining=len(self.plan.bindings),
|
||||
)
|
||||
kwargs = self.kwargs
|
||||
kwargs.update({"flow_slug": self.flow.slug})
|
||||
@ -353,8 +408,11 @@ class FlowErrorResponse(TemplateResponse):
|
||||
context = {}
|
||||
context["error"] = self.error
|
||||
if self._request.user and self._request.user.is_authenticated:
|
||||
if self._request.user.is_superuser or self._request.user.attributes.get(
|
||||
USER_ATTRIBUTE_DEBUG, False
|
||||
if (
|
||||
self._request.user.is_superuser
|
||||
or self._request.user.group_attributes().get(
|
||||
USER_ATTRIBUTE_DEBUG, False
|
||||
)
|
||||
):
|
||||
context["tb"] = "".join(format_tb(self.error.__traceback__))
|
||||
return context
|
||||
|
@ -26,10 +26,9 @@ class ConfigLoader:
|
||||
|
||||
loaded_file = []
|
||||
|
||||
__config = {}
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.__config = {}
|
||||
base_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
for path in SEARCH_PATHS:
|
||||
# Check if path is relative, and if so join with base_dir
|
||||
@ -62,7 +61,7 @@ class ConfigLoader:
|
||||
output.update(kwargs)
|
||||
print(dumps(output))
|
||||
|
||||
def update(self, root, updatee):
|
||||
def update(self, root: dict[str, Any], updatee: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Recursively update dictionary"""
|
||||
for key, value in updatee.items():
|
||||
if isinstance(value, Mapping):
|
||||
@ -73,7 +72,7 @@ class ConfigLoader:
|
||||
root[key] = value
|
||||
return root
|
||||
|
||||
def parse_uri(self, value):
|
||||
def parse_uri(self, value: str) -> str:
|
||||
"""Parse string values which start with a URI"""
|
||||
url = urlparse(value)
|
||||
if url.scheme == "env":
|
||||
@ -99,7 +98,10 @@ class ConfigLoader:
|
||||
raise ImproperlyConfigured from exc
|
||||
except PermissionError as exc:
|
||||
self._log(
|
||||
"warning", "Permission denied while reading file", path=path, error=exc
|
||||
"warning",
|
||||
"Permission denied while reading file",
|
||||
path=path,
|
||||
error=str(exc),
|
||||
)
|
||||
|
||||
def update_from_dict(self, update: dict):
|
||||
|
@ -9,13 +9,21 @@ postgresql:
|
||||
web:
|
||||
listen: 0.0.0.0:9000
|
||||
listen_tls: 0.0.0.0:9443
|
||||
load_local_files: false
|
||||
|
||||
redis:
|
||||
host: localhost
|
||||
port: 6379
|
||||
password: ''
|
||||
tls: false
|
||||
tls_reqs: "none"
|
||||
cache_db: 0
|
||||
message_queue_db: 1
|
||||
ws_db: 2
|
||||
cache_timeout: 300
|
||||
cache_timeout_flows: 300
|
||||
cache_timeout_policies: 300
|
||||
cache_timeout_reputation: 300
|
||||
|
||||
debug: false
|
||||
|
||||
@ -45,12 +53,12 @@ outposts:
|
||||
# %(build_hash)s: Build hash if you're running a beta version
|
||||
docker_image_base: "ghcr.io/goauthentik/%(type)s:%(version)s"
|
||||
|
||||
authentik:
|
||||
avatars: gravatar # gravatar or none
|
||||
geoip: "./GeoLite2-City.mmdb"
|
||||
# Optionally add links to the footer on the login page
|
||||
footer_links:
|
||||
- name: Documentation
|
||||
href: https://goauthentik.io/docs/
|
||||
- name: authentik Website
|
||||
href: https://goauthentik.io/
|
||||
avatars: env://AUTHENTIK_AUTHENTIK__AVATARS?gravatar
|
||||
geoip: "./GeoLite2-City.mmdb"
|
||||
|
||||
# Can't currently be configured via environment variables, only yaml
|
||||
footer_links:
|
||||
- name: Documentation
|
||||
href: https://goauthentik.io/docs/
|
||||
- name: authentik Website
|
||||
href: https://goauthentik.io/
|
||||
|
@ -3,6 +3,7 @@ import re
|
||||
from textwrap import indent
|
||||
from typing import Any, Iterable, Optional
|
||||
|
||||
from django.core.exceptions import FieldError
|
||||
from requests import Session
|
||||
from rest_framework.serializers import ValidationError
|
||||
from sentry_sdk.hub import Hub
|
||||
@ -29,10 +30,10 @@ class BaseEvaluator:
|
||||
# update website/docs/expressions/_objects.md
|
||||
# update website/docs/expressions/_functions.md
|
||||
self._globals = {
|
||||
"regex_match": BaseEvaluator.expr_filter_regex_match,
|
||||
"regex_replace": BaseEvaluator.expr_filter_regex_replace,
|
||||
"ak_is_group_member": BaseEvaluator.expr_func_is_group_member,
|
||||
"ak_user_by": BaseEvaluator.expr_func_user_by,
|
||||
"regex_match": BaseEvaluator.expr_regex_match,
|
||||
"regex_replace": BaseEvaluator.expr_regex_replace,
|
||||
"ak_is_group_member": BaseEvaluator.expr_is_group_member,
|
||||
"ak_user_by": BaseEvaluator.expr_user_by,
|
||||
"ak_logger": get_logger(),
|
||||
"requests": Session(),
|
||||
}
|
||||
@ -40,25 +41,28 @@ class BaseEvaluator:
|
||||
self._filename = "BaseEvalautor"
|
||||
|
||||
@staticmethod
|
||||
def expr_filter_regex_match(value: Any, regex: str) -> bool:
|
||||
def expr_regex_match(value: Any, regex: str) -> bool:
|
||||
"""Expression Filter to run re.search"""
|
||||
return re.search(regex, value) is None
|
||||
return re.search(regex, value) is not None
|
||||
|
||||
@staticmethod
|
||||
def expr_filter_regex_replace(value: Any, regex: str, repl: str) -> str:
|
||||
def expr_regex_replace(value: Any, regex: str, repl: str) -> str:
|
||||
"""Expression Filter to run re.sub"""
|
||||
return re.sub(regex, repl, value)
|
||||
|
||||
@staticmethod
|
||||
def expr_func_user_by(**filters) -> Optional[User]:
|
||||
def expr_user_by(**filters) -> Optional[User]:
|
||||
"""Get user by filters"""
|
||||
users = User.objects.filter(**filters)
|
||||
if users:
|
||||
return users.first()
|
||||
return None
|
||||
try:
|
||||
users = User.objects.filter(**filters)
|
||||
if users:
|
||||
return users.first()
|
||||
return None
|
||||
except FieldError:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def expr_func_is_group_member(user: User, **group_filters) -> bool:
|
||||
def expr_is_group_member(user: User, **group_filters) -> bool:
|
||||
"""Check if `user` is member of group with name `group_name`"""
|
||||
return user.ak_groups.filter(**group_filters).exists()
|
||||
|
||||
|
61
authentik/lib/tests/test_config.py
Normal file
61
authentik/lib/tests/test_config.py
Normal file
@ -0,0 +1,61 @@
|
||||
"""Test config loader"""
|
||||
from os import chmod, environ, unlink, write
|
||||
from tempfile import mkstemp
|
||||
|
||||
from django.conf import ImproperlyConfigured
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.lib.config import ENV_PREFIX, ConfigLoader
|
||||
|
||||
|
||||
class TestConfig(TestCase):
|
||||
"""Test config loader"""
|
||||
|
||||
def test_env(self):
|
||||
"""Test simple instance"""
|
||||
config = ConfigLoader()
|
||||
environ[ENV_PREFIX + "_test__test"] = "bar"
|
||||
config.update_from_env()
|
||||
self.assertEqual(config.y("test.test"), "bar")
|
||||
|
||||
def test_patch(self):
|
||||
"""Test patch decorator"""
|
||||
config = ConfigLoader()
|
||||
config.y_set("foo.bar", "bar")
|
||||
self.assertEqual(config.y("foo.bar"), "bar")
|
||||
with config.patch("foo.bar", "baz"):
|
||||
self.assertEqual(config.y("foo.bar"), "baz")
|
||||
self.assertEqual(config.y("foo.bar"), "bar")
|
||||
|
||||
def test_uri_env(self):
|
||||
"""Test URI parsing (environment)"""
|
||||
config = ConfigLoader()
|
||||
environ["foo"] = "bar"
|
||||
self.assertEqual(config.parse_uri("env://foo"), "bar")
|
||||
self.assertEqual(config.parse_uri("env://fo?bar"), "bar")
|
||||
|
||||
def test_uri_file(self):
|
||||
"""Test URI parsing (file load)"""
|
||||
config = ConfigLoader()
|
||||
file, file_name = mkstemp()
|
||||
write(file, "foo".encode())
|
||||
_, file2_name = mkstemp()
|
||||
chmod(file2_name, 0o000) # Remove all permissions so we can't read the file
|
||||
self.assertEqual(config.parse_uri(f"file://{file_name}"), "foo")
|
||||
self.assertEqual(config.parse_uri(f"file://{file2_name}?def"), "def")
|
||||
unlink(file_name)
|
||||
unlink(file2_name)
|
||||
|
||||
def test_file_update(self):
|
||||
"""Test update_from_file"""
|
||||
config = ConfigLoader()
|
||||
file, file_name = mkstemp()
|
||||
write(file, "{".encode())
|
||||
file2, file2_name = mkstemp()
|
||||
write(file2, "{".encode())
|
||||
chmod(file2_name, 0o000) # Remove all permissions so we can't read the file
|
||||
with self.assertRaises(ImproperlyConfigured):
|
||||
config.update_from_file(file_name)
|
||||
config.update_from_file(file2_name)
|
||||
unlink(file_name)
|
||||
unlink(file2_name)
|
32
authentik/lib/tests/test_evaluator.py
Normal file
32
authentik/lib/tests/test_evaluator.py
Normal file
@ -0,0 +1,32 @@
|
||||
"""Test Evaluator base functions"""
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.lib.expression.evaluator import BaseEvaluator
|
||||
|
||||
|
||||
class TestEvaluator(TestCase):
|
||||
"""Test Evaluator base functions"""
|
||||
|
||||
def test_regex_match(self):
|
||||
"""Test expr_regex_match"""
|
||||
self.assertFalse(BaseEvaluator.expr_regex_match("foo", "bar"))
|
||||
self.assertTrue(BaseEvaluator.expr_regex_match("foo", "foo"))
|
||||
|
||||
def test_regex_replace(self):
|
||||
"""Test expr_regex_replace"""
|
||||
self.assertEqual(BaseEvaluator.expr_regex_replace("foo", "o", "a"), "faa")
|
||||
|
||||
def test_user_by(self):
|
||||
"""Test expr_user_by"""
|
||||
self.assertIsNotNone(BaseEvaluator.expr_user_by(username="akadmin"))
|
||||
self.assertIsNone(BaseEvaluator.expr_user_by(username="bar"))
|
||||
self.assertIsNone(BaseEvaluator.expr_user_by(foo="bar"))
|
||||
|
||||
def test_is_group_member(self):
|
||||
"""Test expr_is_group_member"""
|
||||
self.assertFalse(
|
||||
BaseEvaluator.expr_is_group_member(
|
||||
User.objects.get(username="akadmin"), name="test"
|
||||
)
|
||||
)
|
67
authentik/lib/tests/test_http.py
Normal file
67
authentik/lib/tests/test_http.py
Normal file
@ -0,0 +1,67 @@
|
||||
"""Test HTTP Helpers"""
|
||||
from django.test import RequestFactory, TestCase
|
||||
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP,
|
||||
Token,
|
||||
TokenIntents,
|
||||
User,
|
||||
)
|
||||
from authentik.lib.utils.http import (
|
||||
OUTPOST_REMOTE_IP_HEADER,
|
||||
OUTPOST_TOKEN_HEADER,
|
||||
get_client_ip,
|
||||
)
|
||||
|
||||
|
||||
class TestHTTP(TestCase):
|
||||
"""Test HTTP Helpers"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.user = User.objects.get(username="akadmin")
|
||||
self.factory = RequestFactory()
|
||||
|
||||
def test_normal(self):
|
||||
"""Test normal request"""
|
||||
request = self.factory.get("/")
|
||||
self.assertEqual(get_client_ip(request), "127.0.0.1")
|
||||
|
||||
def test_forward_for(self):
|
||||
"""Test x-forwarded-for request"""
|
||||
request = self.factory.get("/", HTTP_X_FORWARDED_FOR="127.0.0.2")
|
||||
self.assertEqual(get_client_ip(request), "127.0.0.2")
|
||||
|
||||
def test_fake_outpost(self):
|
||||
"""Test faked IP which is overridden by an outpost"""
|
||||
token = Token.objects.create(
|
||||
identifier="test", user=self.user, intent=TokenIntents.INTENT_API
|
||||
)
|
||||
# Invalid, non-existant token
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
**{
|
||||
OUTPOST_REMOTE_IP_HEADER: "1.2.3.4",
|
||||
OUTPOST_TOKEN_HEADER: "abc",
|
||||
},
|
||||
)
|
||||
self.assertEqual(get_client_ip(request), "127.0.0.1")
|
||||
# Invalid, user doesn't have permisions
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
**{
|
||||
OUTPOST_REMOTE_IP_HEADER: "1.2.3.4",
|
||||
OUTPOST_TOKEN_HEADER: token.key,
|
||||
},
|
||||
)
|
||||
self.assertEqual(get_client_ip(request), "127.0.0.1")
|
||||
# Valid
|
||||
self.user.attributes[USER_ATTRIBUTE_CAN_OVERRIDE_IP] = True
|
||||
self.user.save()
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
**{
|
||||
OUTPOST_REMOTE_IP_HEADER: "1.2.3.4",
|
||||
OUTPOST_TOKEN_HEADER: token.key,
|
||||
},
|
||||
)
|
||||
self.assertEqual(get_client_ip(request), "1.2.3.4")
|
10
authentik/lib/utils/errors.py
Normal file
10
authentik/lib/utils/errors.py
Normal file
@ -0,0 +1,10 @@
|
||||
"""error utils"""
|
||||
from traceback import format_tb
|
||||
|
||||
TRACEBACK_HEADER = "Traceback (most recent call last):\n"
|
||||
|
||||
|
||||
def exception_to_string(exc: Exception) -> str:
|
||||
"""Convert exception to string stackrace"""
|
||||
# Either use passed original exception or whatever we have
|
||||
return TRACEBACK_HEADER + "".join(format_tb(exc.__traceback__)) + str(exc)
|
@ -2,10 +2,12 @@
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.http import HttpRequest
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
OUTPOST_REMOTE_IP_HEADER = "HTTP_X_AUTHENTIK_REMOTE_IP"
|
||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips"
|
||||
OUTPOST_TOKEN_HEADER = "HTTP_X_AUTHENTIK_OUTPOST_TOKEN" # nosec
|
||||
DEFAULT_IP = "255.255.255.255"
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def _get_client_ip_from_meta(meta: dict[str, Any]) -> str:
|
||||
@ -27,23 +29,41 @@ def _get_outpost_override_ip(request: HttpRequest) -> Optional[str]:
|
||||
"""Get the actual remote IP when set by an outpost. Only
|
||||
allowed when the request is authenticated, by a user with USER_ATTRIBUTE_CAN_OVERRIDE_IP set
|
||||
to outpost"""
|
||||
if not hasattr(request, "user"):
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP,
|
||||
Token,
|
||||
TokenIntents,
|
||||
)
|
||||
|
||||
if (
|
||||
OUTPOST_REMOTE_IP_HEADER not in request.META
|
||||
or OUTPOST_TOKEN_HEADER not in request.META
|
||||
):
|
||||
return None
|
||||
if not request.user.is_authenticated:
|
||||
fake_ip = request.META[OUTPOST_REMOTE_IP_HEADER]
|
||||
tokens = Token.filter_not_expired(
|
||||
key=request.META.get(OUTPOST_TOKEN_HEADER), intent=TokenIntents.INTENT_API
|
||||
)
|
||||
if not tokens.exists():
|
||||
LOGGER.warning("Attempted remote-ip override without token", fake_ip=fake_ip)
|
||||
return None
|
||||
if OUTPOST_REMOTE_IP_HEADER not in request.META:
|
||||
user = tokens.first().user
|
||||
if not user.group_attributes().get(USER_ATTRIBUTE_CAN_OVERRIDE_IP, False):
|
||||
LOGGER.warning(
|
||||
"Remote-IP override: user doesn't have permission",
|
||||
user=user,
|
||||
fake_ip=fake_ip,
|
||||
)
|
||||
return None
|
||||
if request.user.attributes.get(USER_ATTRIBUTE_CAN_OVERRIDE_IP, False):
|
||||
return None
|
||||
return request.META[OUTPOST_REMOTE_IP_HEADER]
|
||||
return fake_ip
|
||||
|
||||
|
||||
def get_client_ip(request: Optional[HttpRequest]) -> str:
|
||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||
Returns none if no IP Could be found"""
|
||||
if request:
|
||||
override = _get_outpost_override_ip(request)
|
||||
if override:
|
||||
return override
|
||||
return _get_client_ip_from_meta(request.META)
|
||||
return DEFAULT_IP
|
||||
if not request:
|
||||
return DEFAULT_IP
|
||||
override = _get_outpost_override_ip(request)
|
||||
if override:
|
||||
return override
|
||||
return _get_client_ip_from_meta(request.META)
|
||||
|
@ -11,6 +11,7 @@ from rest_framework.serializers import JSONField, ModelSerializer, ValidationErr
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
from authentik.core.models import Provider
|
||||
from authentik.outposts.api.service_connections import ServiceConnectionSerializer
|
||||
@ -50,7 +51,7 @@ class OutpostSerializer(ModelSerializer):
|
||||
raise ValidationError(
|
||||
(
|
||||
f"Outpost type {self.initial_data['type']} can't be used with "
|
||||
f"{type(provider)} providers."
|
||||
f"{provider.__class__.__name__} providers."
|
||||
)
|
||||
)
|
||||
return providers
|
||||
@ -76,6 +77,7 @@ class OutpostSerializer(ModelSerializer):
|
||||
"service_connection_obj",
|
||||
"token_identifier",
|
||||
"config",
|
||||
"managed",
|
||||
]
|
||||
extra_kwargs = {"type": {"required": True}}
|
||||
|
||||
@ -95,7 +97,7 @@ class OutpostHealthSerializer(PassiveSerializer):
|
||||
version_outdated = BooleanField(read_only=True)
|
||||
|
||||
|
||||
class OutpostViewSet(ModelViewSet):
|
||||
class OutpostViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Outpost Viewset"""
|
||||
|
||||
queryset = Outpost.objects.all()
|
||||
|
@ -14,6 +14,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import GenericViewSet, ModelViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import (
|
||||
MetaNameSerializer,
|
||||
PassiveSerializer,
|
||||
@ -32,6 +33,13 @@ class ServiceConnectionSerializer(ModelSerializer, MetaNameSerializer):
|
||||
|
||||
component = ReadOnlyField()
|
||||
|
||||
def get_component(self, obj: OutpostServiceConnection) -> str:
|
||||
"""Get object type so that we know how to edit the object"""
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
if obj.__class__ == OutpostServiceConnection:
|
||||
return ""
|
||||
return obj.component
|
||||
|
||||
class Meta:
|
||||
|
||||
model = OutpostServiceConnection
|
||||
@ -55,6 +63,7 @@ class ServiceConnectionStateSerializer(PassiveSerializer):
|
||||
class ServiceConnectionViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
UsedByMixin,
|
||||
mixins.ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
@ -105,7 +114,7 @@ class DockerServiceConnectionSerializer(ServiceConnectionSerializer):
|
||||
]
|
||||
|
||||
|
||||
class DockerServiceConnectionViewSet(ModelViewSet):
|
||||
class DockerServiceConnectionViewSet(UsedByMixin, ModelViewSet):
|
||||
"""DockerServiceConnection Viewset"""
|
||||
|
||||
queryset = DockerServiceConnection.objects.all()
|
||||
@ -139,7 +148,7 @@ class KubernetesServiceConnectionSerializer(ServiceConnectionSerializer):
|
||||
fields = ServiceConnectionSerializer.Meta.fields + ["kubeconfig"]
|
||||
|
||||
|
||||
class KubernetesServiceConnectionViewSet(ModelViewSet):
|
||||
class KubernetesServiceConnectionViewSet(UsedByMixin, ModelViewSet):
|
||||
"""KubernetesServiceConnection Viewset"""
|
||||
|
||||
queryset = KubernetesServiceConnection.objects.all()
|
||||
|
@ -67,14 +67,9 @@ class OutpostConsumer(AuthJsonConsumer):
|
||||
self.accept()
|
||||
self.outpost = outpost.first()
|
||||
self.last_uid = self.channel_name
|
||||
LOGGER.debug(
|
||||
"added outpost instace to cache",
|
||||
outpost=self.outpost,
|
||||
channel_name=self.channel_name,
|
||||
)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def disconnect(self, close_code):
|
||||
def disconnect(self, code):
|
||||
if self.outpost and self.last_uid:
|
||||
state = OutpostState.for_instance_uid(self.outpost, self.last_uid)
|
||||
if self.channel_name in state.channel_ids:
|
||||
@ -108,6 +103,11 @@ class OutpostConsumer(AuthJsonConsumer):
|
||||
outpost=self.outpost.name,
|
||||
uid=self.last_uid,
|
||||
).inc()
|
||||
LOGGER.debug(
|
||||
"added outpost instace to cache",
|
||||
outpost=self.outpost,
|
||||
instance_uuid=self.last_uid,
|
||||
)
|
||||
self.first_msg = True
|
||||
|
||||
if msg.instruction == WebsocketMessageInstruction.HELLO:
|
||||
|
@ -36,8 +36,10 @@ class DockerController(BaseController):
|
||||
|
||||
def _get_env(self) -> dict[str, str]:
|
||||
return {
|
||||
"AUTHENTIK_HOST": self.outpost.config.authentik_host,
|
||||
"AUTHENTIK_INSECURE": str(self.outpost.config.authentik_host_insecure),
|
||||
"AUTHENTIK_HOST": self.outpost.config.authentik_host.lower(),
|
||||
"AUTHENTIK_INSECURE": str(
|
||||
self.outpost.config.authentik_host_insecure
|
||||
).lower(),
|
||||
"AUTHENTIK_TOKEN": self.outpost.token.key,
|
||||
}
|
||||
|
||||
@ -45,11 +47,34 @@ class DockerController(BaseController):
|
||||
"""Check if container's env is equal to what we would set. Return true if container needs
|
||||
to be rebuilt."""
|
||||
should_be = self._get_env()
|
||||
container_env = container.attrs.get("Config", {}).get("Env", {})
|
||||
container_env = container.attrs.get("Config", {}).get("Env", [])
|
||||
for key, expected_value in should_be.items():
|
||||
if key not in container_env:
|
||||
continue
|
||||
if container_env[key] != expected_value:
|
||||
entry = f"{key.upper()}={expected_value}"
|
||||
if entry not in container_env:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _comp_ports(self, container: Container) -> bool:
|
||||
"""Check that the container has the correct ports exposed. Return true if container needs
|
||||
to be rebuilt."""
|
||||
# with TEST enabled, we use host-network
|
||||
if settings.TEST:
|
||||
return False
|
||||
# When the container isn't running, the API doesn't report any port mappings
|
||||
if container.status != "running":
|
||||
return False
|
||||
# {'3389/tcp': [
|
||||
# {'HostIp': '0.0.0.0', 'HostPort': '389'},
|
||||
# {'HostIp': '::', 'HostPort': '389'}
|
||||
# ]}
|
||||
for port in self.deployment_ports:
|
||||
key = f"{port.inner_port or port.port}/{port.protocol.lower()}"
|
||||
if key not in container.ports:
|
||||
return True
|
||||
host_matching = False
|
||||
for host_port in container.ports[key]:
|
||||
host_matching = host_port.get("HostPort") == str(port.port)
|
||||
if not host_matching:
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -58,15 +83,15 @@ class DockerController(BaseController):
|
||||
try:
|
||||
return self.client.containers.get(container_name), False
|
||||
except NotFound:
|
||||
self.logger.info("Container does not exist, creating")
|
||||
self.logger.info("(Re-)creating container...")
|
||||
image_name = self.get_container_image()
|
||||
self.client.images.pull(image_name)
|
||||
container_args = {
|
||||
"image": image_name,
|
||||
"name": f"authentik-proxy-{self.outpost.uuid.hex}",
|
||||
"name": container_name,
|
||||
"detach": True,
|
||||
"ports": {
|
||||
f"{port.port}/{port.protocol.lower()}": port.inner_port or port.port
|
||||
f"{port.inner_port or port.port}/{port.protocol.lower()}": port.port
|
||||
for port in self.deployment_ports
|
||||
},
|
||||
"environment": self._get_env(),
|
||||
@ -86,6 +111,7 @@ class DockerController(BaseController):
|
||||
try:
|
||||
container, has_been_created = self._get_container()
|
||||
if has_been_created:
|
||||
container.start()
|
||||
return None
|
||||
# Check if the container is out of date, delete it and retry
|
||||
if len(container.image.tags) > 0:
|
||||
@ -98,6 +124,11 @@ class DockerController(BaseController):
|
||||
)
|
||||
self.down()
|
||||
return self.up()
|
||||
# Check container's ports
|
||||
if self._comp_ports(container):
|
||||
self.logger.info("Container has mis-matched ports, re-creating...")
|
||||
self.down()
|
||||
return self.up()
|
||||
# Check that container values match our values
|
||||
if self._comp_env(container):
|
||||
self.logger.info("Container has outdated config, re-creating...")
|
||||
@ -138,6 +169,7 @@ class DockerController(BaseController):
|
||||
self.logger.info("Container is not running, restarting...")
|
||||
container.start()
|
||||
return None
|
||||
self.logger.info("Container is running")
|
||||
return None
|
||||
except DockerException as exc:
|
||||
raise ControllerException(str(exc)) from exc
|
||||
|
18
authentik/outposts/managed.py
Normal file
18
authentik/outposts/managed.py
Normal file
@ -0,0 +1,18 @@
|
||||
"""Outpost managed objects"""
|
||||
from authentik.managed.manager import EnsureExists, ObjectManager
|
||||
from authentik.outposts.models import Outpost, OutpostType
|
||||
|
||||
|
||||
class OutpostManager(ObjectManager):
|
||||
"""Outpost managed objects"""
|
||||
|
||||
def reconcile(self):
|
||||
return [
|
||||
EnsureExists(
|
||||
Outpost,
|
||||
"goauthentik.io/outposts/inbuilt",
|
||||
name="authentik Bundeled Outpost",
|
||||
object_field="name",
|
||||
type=OutpostType.PROXY,
|
||||
),
|
||||
]
|
24
authentik/outposts/migrations/0017_outpost_managed.py
Normal file
24
authentik/outposts/migrations/0017_outpost_managed.py
Normal file
@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-23 19:25
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_outposts", "0016_alter_outpost_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="outpost",
|
||||
name="managed",
|
||||
field=models.TextField(
|
||||
default=None,
|
||||
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="Managed by authentik",
|
||||
),
|
||||
),
|
||||
]
|
@ -8,7 +8,7 @@ from uuid import uuid4
|
||||
from dacite import from_dict
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.core.cache import cache
|
||||
from django.db import models, transaction
|
||||
from django.db import IntegrityError, models, transaction
|
||||
from django.db.models.base import Model
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from docker.client import DockerClient
|
||||
@ -28,12 +28,19 @@ from structlog.stdlib import get_logger
|
||||
from urllib3.exceptions import HTTPError
|
||||
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
from authentik.core.models import USER_ATTRIBUTE_SA, Provider, Token, TokenIntents, User
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP,
|
||||
USER_ATTRIBUTE_SA,
|
||||
Provider,
|
||||
Token,
|
||||
TokenIntents,
|
||||
User,
|
||||
)
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.models import InheritanceForeignKey
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import USER_ATTRIBUTE_CAN_OVERRIDE_IP
|
||||
from authentik.managed.models import ManagedModel
|
||||
from authentik.outposts.controllers.k8s.utils import get_namespace
|
||||
from authentik.outposts.docker_tls import DockerInlineTLS
|
||||
|
||||
@ -50,6 +57,8 @@ class ServiceConnectionInvalid(SentryIgnoredException):
|
||||
class OutpostConfig:
|
||||
"""Configuration an outpost uses to configure it self"""
|
||||
|
||||
# update website/docs/outposts/outposts.md
|
||||
|
||||
authentik_host: str
|
||||
authentik_host_insecure: bool = False
|
||||
|
||||
@ -141,7 +150,9 @@ class OutpostServiceConnection(models.Model):
|
||||
@property
|
||||
def component(self) -> str:
|
||||
"""Return component used to edit this object"""
|
||||
raise NotImplementedError
|
||||
# This is called when creating an outpost with a service connection
|
||||
# since the response doesn't use the correct inheritance
|
||||
return ""
|
||||
|
||||
class Meta:
|
||||
|
||||
@ -277,7 +288,7 @@ class KubernetesServiceConnection(OutpostServiceConnection):
|
||||
verbose_name_plural = _("Kubernetes Service-Connections")
|
||||
|
||||
|
||||
class Outpost(models.Model):
|
||||
class Outpost(ManagedModel):
|
||||
"""Outpost instance which manages a service user and token"""
|
||||
|
||||
uuid = models.UUIDField(default=uuid4, editable=False, primary_key=True)
|
||||
@ -333,12 +344,13 @@ class Outpost(models.Model):
|
||||
users = User.objects.filter(username=self.user_identifier)
|
||||
if not users.exists():
|
||||
user: User = User.objects.create(username=self.user_identifier)
|
||||
user.attributes[USER_ATTRIBUTE_SA] = True
|
||||
user.attributes[USER_ATTRIBUTE_CAN_OVERRIDE_IP] = True
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
else:
|
||||
user = users.first()
|
||||
user.attributes[USER_ATTRIBUTE_SA] = True
|
||||
user.attributes[USER_ATTRIBUTE_CAN_OVERRIDE_IP] = True
|
||||
user.save()
|
||||
# To ensure the user only has the correct permissions, we delete all of them and re-add
|
||||
# the ones the user needs
|
||||
with transaction.atomic():
|
||||
@ -380,25 +392,31 @@ class Outpost(models.Model):
|
||||
tokens = Token.filter_not_expired(
|
||||
identifier=self.token_identifier,
|
||||
intent=TokenIntents.INTENT_API,
|
||||
)
|
||||
if tokens.exists():
|
||||
token = tokens.first()
|
||||
if not token.managed:
|
||||
token.managed = managed
|
||||
token.save()
|
||||
return token
|
||||
return Token.objects.create(
|
||||
user=self.user,
|
||||
identifier=self.token_identifier,
|
||||
intent=TokenIntents.INTENT_API,
|
||||
description=f"Autogenerated by authentik for Outpost {self.name}",
|
||||
expiring=False,
|
||||
managed=managed,
|
||||
)
|
||||
if tokens.exists():
|
||||
return tokens.first()
|
||||
try:
|
||||
return Token.objects.create(
|
||||
user=self.user,
|
||||
identifier=self.token_identifier,
|
||||
intent=TokenIntents.INTENT_API,
|
||||
description=f"Autogenerated by authentik for Outpost {self.name}",
|
||||
expiring=False,
|
||||
managed=managed,
|
||||
)
|
||||
except IntegrityError:
|
||||
# Integrity error happens mostly when managed is re-used
|
||||
Token.objects.filter(managed=managed).delete()
|
||||
Token.objects.filter(identifier=self.token_identifier).delete()
|
||||
return self.token
|
||||
|
||||
def get_required_objects(self) -> Iterable[Union[models.Model, str]]:
|
||||
"""Get an iterator of all objects the user needs read access to"""
|
||||
objects: list[Union[models.Model, str]] = [self]
|
||||
objects: list[Union[models.Model, str]] = [
|
||||
self,
|
||||
"authentik_events.add_event",
|
||||
]
|
||||
for provider in (
|
||||
Provider.objects.filter(outpost=self).select_related().select_subclasses()
|
||||
):
|
||||
|
@ -9,7 +9,7 @@ CELERY_BEAT_SCHEDULE = {
|
||||
},
|
||||
"outposts_service_connection_check": {
|
||||
"task": "authentik.outposts.tasks.outpost_service_connection_monitor",
|
||||
"schedule": crontab(minute="*/60"),
|
||||
"schedule": crontab(minute="*/5"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
"outpost_token_ensurer": {
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""authentik outpost signals"""
|
||||
from django.core.cache import cache
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_save, pre_delete, pre_save
|
||||
from django.db.models.signals import m2m_changed, post_save, pre_delete, pre_save
|
||||
from django.dispatch import receiver
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
@ -46,6 +46,14 @@ def pre_save_outpost(sender, instance: Outpost, **_):
|
||||
outpost_controller.delay(instance.pk.hex, action="down", from_cache=True)
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=Outpost.providers.through)
|
||||
# pylint: disable=unused-argument
|
||||
def m2m_changed_update(sender, instance: Model, action: str, **_):
|
||||
"""Update outpost on m2m change, when providers are added or removed"""
|
||||
if action in ["post_add", "post_remove", "post_clear"]:
|
||||
outpost_post_save.delay(class_to_path(instance.__class__), instance.pk)
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
# pylint: disable=unused-argument
|
||||
def post_save_update(sender, instance: Model, **_):
|
||||
|
@ -11,6 +11,7 @@ from rest_framework.viewsets import ModelViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.groups import GroupSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.users import UserSerializer
|
||||
from authentik.policies.api.policies import PolicySerializer
|
||||
from authentik.policies.models import PolicyBinding, PolicyBindingModel
|
||||
@ -81,13 +82,13 @@ class PolicyBindingSerializer(ModelSerializer):
|
||||
"timeout",
|
||||
]
|
||||
|
||||
def validate(self, data: OrderedDict) -> OrderedDict:
|
||||
def validate(self, attrs: OrderedDict) -> OrderedDict:
|
||||
"""Check that either policy, group or user is set."""
|
||||
count = sum(
|
||||
[
|
||||
bool(data.get("policy", None)),
|
||||
bool(data.get("group", None)),
|
||||
bool(data.get("user", None)),
|
||||
bool(attrs.get("policy", None)),
|
||||
bool(attrs.get("group", None)),
|
||||
bool(attrs.get("user", None)),
|
||||
]
|
||||
)
|
||||
invalid = count > 1
|
||||
@ -96,10 +97,10 @@ class PolicyBindingSerializer(ModelSerializer):
|
||||
raise ValidationError("Only one of 'policy', 'group' or 'user' can be set.")
|
||||
if empty:
|
||||
raise ValidationError("One of 'policy', 'group' or 'user' must be set.")
|
||||
return data
|
||||
return attrs
|
||||
|
||||
|
||||
class PolicyBindingViewSet(ModelViewSet):
|
||||
class PolicyBindingViewSet(UsedByMixin, ModelViewSet):
|
||||
"""PolicyBinding Viewset"""
|
||||
|
||||
queryset = (
|
||||
|
@ -14,6 +14,7 @@ from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.applications import user_app_cache_key
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import (
|
||||
CacheSerializer,
|
||||
MetaNameSerializer,
|
||||
@ -79,6 +80,7 @@ class PolicySerializer(ModelSerializer, MetaNameSerializer):
|
||||
class PolicyViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
UsedByMixin,
|
||||
mixins.ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
|
@ -37,7 +37,9 @@ class AccessDeniedResponse(TemplateResponse):
|
||||
if self._request.user and self._request.user.is_authenticated:
|
||||
if (
|
||||
self._request.user.is_superuser
|
||||
or self._request.user.attributes.get(USER_ATTRIBUTE_DEBUG, False)
|
||||
or self._request.user.group_attributes().get(
|
||||
USER_ATTRIBUTE_DEBUG, False
|
||||
)
|
||||
):
|
||||
context["policy_result"] = self.policy_result
|
||||
return context
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""Dummy Policy API Views"""
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.policies.api.policies import PolicySerializer
|
||||
from authentik.policies.dummy.models import DummyPolicy
|
||||
|
||||
@ -13,7 +14,7 @@ class DummyPolicySerializer(PolicySerializer):
|
||||
fields = PolicySerializer.Meta.fields + ["result", "wait_min", "wait_max"]
|
||||
|
||||
|
||||
class DummyPolicyViewSet(ModelViewSet):
|
||||
class DummyPolicyViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Dummy Viewset"""
|
||||
|
||||
queryset = DummyPolicy.objects.all()
|
||||
|
@ -62,12 +62,6 @@ class PolicyEngine:
|
||||
# Allow objects with no policies attached to pass
|
||||
empty_result: bool
|
||||
|
||||
__pbm: PolicyBindingModel
|
||||
__cached_policies: list[PolicyResult]
|
||||
__processes: list[PolicyProcessInfo]
|
||||
|
||||
__expected_result_count: int
|
||||
|
||||
def __init__(
|
||||
self, pbm: PolicyBindingModel, user: User, request: HttpRequest = None
|
||||
):
|
||||
@ -83,8 +77,8 @@ class PolicyEngine:
|
||||
self.request.obj = pbm
|
||||
if request:
|
||||
self.request.set_http_request(request)
|
||||
self.__cached_policies = []
|
||||
self.__processes = []
|
||||
self.__cached_policies: list[PolicyResult] = []
|
||||
self.__processes: list[PolicyProcessInfo] = []
|
||||
self.use_cache = True
|
||||
self.__expected_result_count = 0
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""Event Matcher Policy API"""
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.policies.api.policies import PolicySerializer
|
||||
from authentik.policies.event_matcher.models import EventMatcherPolicy
|
||||
|
||||
@ -17,7 +18,7 @@ class EventMatcherPolicySerializer(PolicySerializer):
|
||||
]
|
||||
|
||||
|
||||
class EventMatcherPolicyViewSet(ModelViewSet):
|
||||
class EventMatcherPolicyViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Event Matcher Policy Viewset"""
|
||||
|
||||
queryset = EventMatcherPolicy.objects.all()
|
||||
|
@ -0,0 +1,48 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-14 15:32
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_policies_event_matcher", "0016_alter_eventmatcherpolicy_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="eventmatcherpolicy",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("secret_view", "Secret View"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("system_exception", "System Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("email_sent", "Email Sent"),
|
||||
("update_available", "Update Available"),
|
||||
("custom_", "Custom Prefix"),
|
||||
],
|
||||
help_text="Match created events with this action type. When left empty, all action types will be matched.",
|
||||
),
|
||||
),
|
||||
]
|
@ -0,0 +1,49 @@
|
||||
# Generated by Django 3.2.5 on 2021-07-14 19:15
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_policies_event_matcher", "0017_alter_eventmatcherpolicy_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="eventmatcherpolicy",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("secret_view", "Secret View"),
|
||||
("secret_rotate", "Secret Rotate"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("system_exception", "System Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("email_sent", "Email Sent"),
|
||||
("update_available", "Update Available"),
|
||||
("custom_", "Custom Prefix"),
|
||||
],
|
||||
help_text="Match created events with this action type. When left empty, all action types will be matched.",
|
||||
),
|
||||
),
|
||||
]
|
@ -1,6 +1,7 @@
|
||||
"""Password Expiry Policy API Views"""
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.policies.api.policies import PolicySerializer
|
||||
from authentik.policies.expiry.models import PasswordExpiryPolicy
|
||||
|
||||
@ -13,7 +14,7 @@ class PasswordExpiryPolicySerializer(PolicySerializer):
|
||||
fields = PolicySerializer.Meta.fields + ["days", "deny_only"]
|
||||
|
||||
|
||||
class PasswordExpiryPolicyViewSet(ModelViewSet):
|
||||
class PasswordExpiryPolicyViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Password Expiry Viewset"""
|
||||
|
||||
queryset = PasswordExpiryPolicy.objects.all()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user