Compare commits
183 Commits
version/20
...
version/20
| Author | SHA1 | Date | |
|---|---|---|---|
| adc4cd9c0d | |||
| abed254ca1 | |||
| edfab0995f | |||
| 528dedf99d | |||
| 5d7eec3049 | |||
| ad44567ebe | |||
| ac82002339 | |||
| df92111296 | |||
| da8417a141 | |||
| 7f32355e3e | |||
| 5afe88a605 | |||
| 320dab3425 | |||
| ca44f8bd60 | |||
| 5fd408ca82 | |||
| becb9e34b5 | |||
| 4917ab9985 | |||
| bd92505bc2 | |||
| 30033d1f90 | |||
| 3e5dfcbd0f | |||
| bf0141acc6 | |||
| 0c8d513567 | |||
| d07704fdf1 | |||
| 086a8753c0 | |||
| ae7a6e2fd6 | |||
| 6a4ddcaba7 | |||
| 2c9b596f01 | |||
| 7257108091 | |||
| 91f7b289cc | |||
| 77a507d2f8 | |||
| 3e60e956f4 | |||
| 84ec70c2a2 | |||
| 72846f0ae1 | |||
| dd53e7e9b1 | |||
| 9df16a9ae0 | |||
| 02dd44eeec | |||
| 2f78e14381 | |||
| ef6f692526 | |||
| 2dd575874b | |||
| 84c2ebabaa | |||
| 3e26170f4b | |||
| 4709dca33c | |||
| 6064a481fb | |||
| 3979b0bde7 | |||
| 4280847bcc | |||
| ade8644da6 | |||
| 3c3fd53999 | |||
| 7b823f23ae | |||
| a67bea95d4 | |||
| 775e0ef2fa | |||
| d102c59654 | |||
| 03448a9169 | |||
| 1e6c081e5c | |||
| 8b9ce4a745 | |||
| 014d93d485 | |||
| 680b182d95 | |||
| b2a832175e | |||
| b3ce8331f5 | |||
| ef0f618234 | |||
| b8a7186a55 | |||
| b39530f873 | |||
| 7937c84f2b | |||
| 621843c60c | |||
| c19da839b1 | |||
| fea1f3be6f | |||
| 6f5ec7838f | |||
| 94300492e7 | |||
| 5d3931c128 | |||
| 262a8b5ae8 | |||
| fe069c5e55 | |||
| c6e60c0ebc | |||
| 90b457c5ee | |||
| 5e724e4299 | |||
| b4c8dd6b91 | |||
| 63d163cc65 | |||
| 2b1356bb91 | |||
| ba9edd6c44 | |||
| 3b2b3262d7 | |||
| 5431e7fe9d | |||
| 7d9c74ce04 | |||
| 60c3cf890a | |||
| 4ec5df6b12 | |||
| 0403f6d373 | |||
| b7f4d15a94 | |||
| 56450887ca | |||
| 9bd613a31d | |||
| 3fe0483dbf | |||
| 63a28ca1e9 | |||
| 2543b075be | |||
| b8bdf7a035 | |||
| a3ff7cea23 | |||
| bb776c2710 | |||
| c9ad87d419 | |||
| 0d81eaffff | |||
| 6930c84425 | |||
| eaaeaccf5d | |||
| efbbd0adcf | |||
| c8d9771640 | |||
| 2b98637ca5 | |||
| e3f7185564 | |||
| d1198fc6c1 | |||
| 8cb5f8fbee | |||
| 31a58e2c25 | |||
| 229715acb2 | |||
| fad5b09aee | |||
| 2a670afd02 | |||
| b69248dd55 | |||
| 5ff5edf769 | |||
| 939889e0ec | |||
| 19ae6585dc | |||
| a81c847392 | |||
| c6ede78fba | |||
| cea1289186 | |||
| c297f28552 | |||
| 35b25bd76e | |||
| 64d7610b13 | |||
| 2c8fcff832 | |||
| 054e76d02a | |||
| 80fa132dd9 | |||
| 4c59c3abef | |||
| 22d319c0e7 | |||
| 89edd77484 | |||
| 04e52d8ba6 | |||
| 9b5e3921cb | |||
| 2bbad64dc3 | |||
| f6026fdb13 | |||
| 49def45ca3 | |||
| a4856969f4 | |||
| 2aa7266688 | |||
| 25817cae6b | |||
| 5383ae2c19 | |||
| c0c246edab | |||
| 831b32c279 | |||
| 70ccc63702 | |||
| de954250e5 | |||
| f268bd4c69 | |||
| 57a48b6350 | |||
| 9aac114115 | |||
| 66e3cbdc46 | |||
| 2d76d23f7b | |||
| 4327b35bc3 | |||
| f7047df40e | |||
| ef77a4b64e | |||
| 5d7d21076f | |||
| ede072889e | |||
| 9cb7e6c606 | |||
| e7d36c095d | |||
| b88eb430c1 | |||
| 641872a33a | |||
| 405c690193 | |||
| 932cf48d2b | |||
| 402819107d | |||
| 41f135126b | |||
| 591a339302 | |||
| 35f2c5d96a | |||
| fe6963c428 | |||
| 19cac4bf43 | |||
| 4ca564490e | |||
| fcb795c273 | |||
| 14c70b3e4a | |||
| ac880c28d7 | |||
| f3c6b9a4f6 | |||
| cba0cf0d76 | |||
| 73b67cf0f0 | |||
| 23a8052cc8 | |||
| 57c49c3865 | |||
| cbea51ae5b | |||
| 8962081d92 | |||
| e743f13f81 | |||
| b20a8b7c17 | |||
| b53c94d76a | |||
| d4419d66c1 | |||
| 79044368d2 | |||
| 426686957d | |||
| 28cb803fd9 | |||
| 85c3a36b62 | |||
| 9ba8a715b1 | |||
| 358750f66e | |||
| b9918529b8 | |||
| a5673b4ec8 | |||
| d9287d0c0e | |||
| d9c2b64116 | |||
| 2b150d3077 | |||
| dec7a9cfb9 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2021.6.1-rc6
|
current_version = 2021.6.4
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
||||||
@ -21,6 +21,8 @@ values =
|
|||||||
|
|
||||||
[bumpversion:file:docker-compose.yml]
|
[bumpversion:file:docker-compose.yml]
|
||||||
|
|
||||||
|
[bumpversion:file:schema.yml]
|
||||||
|
|
||||||
[bumpversion:file:.github/workflows/release.yml]
|
[bumpversion:file:.github/workflows/release.yml]
|
||||||
|
|
||||||
[bumpversion:file:authentik/__init__.py]
|
[bumpversion:file:authentik/__init__.py]
|
||||||
|
|||||||
1
.github/stale.yml
vendored
1
.github/stale.yml
vendored
@ -6,6 +6,7 @@ daysUntilClose: 7
|
|||||||
exemptLabels:
|
exemptLabels:
|
||||||
- pinned
|
- pinned
|
||||||
- security
|
- security
|
||||||
|
- pr_wanted
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
markComment: >
|
markComment: >
|
||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had
|
||||||
|
|||||||
80
.github/workflows/release.yml
vendored
80
.github/workflows/release.yml
vendored
@ -33,22 +33,21 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
tags: |
|
tags: |
|
||||||
beryju/authentik:2021.6.1-rc6,
|
beryju/authentik:2021.6.4,
|
||||||
beryju/authentik:latest,
|
beryju/authentik:latest,
|
||||||
ghcr.io/goauthentik/server:2021.6.1-rc6,
|
ghcr.io/goauthentik/server:2021.6.4,
|
||||||
ghcr.io/goauthentik/server:latest
|
ghcr.io/goauthentik/server:latest
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
context: .
|
||||||
- name: Building Docker Image (stable)
|
- name: Building Docker Image (stable)
|
||||||
uses: docker/build-push-action@v2
|
if: ${{ github.event_name == 'release' && !contains('2021.6.4', 'rc') }}
|
||||||
if: ${{ github.event_name == 'release' && !contains('2021.6.1-rc6', 'rc') }}
|
run: |
|
||||||
with:
|
docker pull beryju/authentik:latest
|
||||||
push: true
|
docker tag beryju/authentik:latest beryju/authentik:stable
|
||||||
tags: |
|
docker push beryju/authentik:stable
|
||||||
beryju/authentik:stable,
|
docker pull ghcr.io/goauthentik/server:latest
|
||||||
ghcr.io/goauthentik/server:stable
|
docker tag ghcr.io/goauthentik/server:latest ghcr.io/goauthentik/server:stable
|
||||||
platforms: linux/amd64,linux/arm64
|
docker push ghcr.io/goauthentik/server:stable
|
||||||
context: .
|
|
||||||
build-proxy:
|
build-proxy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@ -76,22 +75,21 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
tags: |
|
tags: |
|
||||||
beryju/authentik-proxy:2021.6.1-rc6,
|
beryju/authentik-proxy:2021.6.4,
|
||||||
beryju/authentik-proxy:latest,
|
beryju/authentik-proxy:latest,
|
||||||
ghcr.io/goauthentik/proxy:2021.6.1-rc6,
|
ghcr.io/goauthentik/proxy:2021.6.4,
|
||||||
ghcr.io/goauthentik/proxy:latest
|
ghcr.io/goauthentik/proxy:latest
|
||||||
file: outpost/proxy.Dockerfile
|
file: outpost/proxy.Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
- name: Building Docker Image (stable)
|
- name: Building Docker Image (stable)
|
||||||
uses: docker/build-push-action@v2
|
if: ${{ github.event_name == 'release' && !contains('2021.6.4', 'rc') }}
|
||||||
if: ${{ github.event_name == 'release' && !contains('2021.6.1-rc6', 'rc') }}
|
run: |
|
||||||
with:
|
docker pull beryju/authentik-proxy:latest
|
||||||
push: true
|
docker tag beryju/authentik-proxy:latest beryju/authentik-proxy:stable
|
||||||
tags: |
|
docker push beryju/authentik-proxy:stable
|
||||||
beryju/authentik-proxy:stable,
|
docker pull ghcr.io/goauthentik/proxy:latest
|
||||||
ghcr.io/goauthentik/proxy:stable
|
docker tag ghcr.io/goauthentik/proxy:latest ghcr.io/goauthentik/proxy:stable
|
||||||
platforms: linux/amd64,linux/arm64
|
docker push ghcr.io/goauthentik/proxy:stable
|
||||||
context: .
|
|
||||||
build-ldap:
|
build-ldap:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@ -119,24 +117,22 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
tags: |
|
tags: |
|
||||||
beryju/authentik-ldap:2021.6.1-rc6,
|
beryju/authentik-ldap:2021.6.4,
|
||||||
beryju/authentik-ldap:latest,
|
beryju/authentik-ldap:latest,
|
||||||
ghcr.io/goauthentik/ldap:2021.6.1-rc6,
|
ghcr.io/goauthentik/ldap:2021.6.4,
|
||||||
ghcr.io/goauthentik/ldap:latest
|
ghcr.io/goauthentik/ldap:latest
|
||||||
file: outpost/ldap.Dockerfile
|
file: outpost/ldap.Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
- name: Building Docker Image (stable)
|
- name: Building Docker Image (stable)
|
||||||
uses: docker/build-push-action@v2
|
if: ${{ github.event_name == 'release' && !contains('2021.6.4', 'rc') }}
|
||||||
if: ${{ github.event_name == 'release' && !contains('2021.6.1-rc6', 'rc') }}
|
run: |
|
||||||
with:
|
docker pull beryju/authentik-ldap:latest
|
||||||
push: true
|
docker tag beryju/authentik-ldap:latest beryju/authentik-ldap:stable
|
||||||
tags: |
|
docker push beryju/authentik-ldap:stable
|
||||||
beryju/authentik-ldap:stable,
|
docker pull ghcr.io/goauthentik/ldap:latest
|
||||||
ghcr.io/goauthentik/ldap:stable
|
docker tag ghcr.io/goauthentik/ldap:latest ghcr.io/goauthentik/ldap:stable
|
||||||
platforms: linux/amd64,linux/arm64
|
docker push ghcr.io/goauthentik/ldap:stable
|
||||||
context: .
|
|
||||||
test-release:
|
test-release:
|
||||||
if: ${{ github.event_name == 'release' }}
|
|
||||||
needs:
|
needs:
|
||||||
- build-server
|
- build-server
|
||||||
- build-proxy
|
- build-proxy
|
||||||
@ -160,13 +156,27 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
- name: Setup Node.js environment
|
||||||
|
uses: actions/setup-node@v2.2.0
|
||||||
|
with:
|
||||||
|
node-version: 12.x
|
||||||
|
- name: Build web api client and web ui
|
||||||
|
run: |
|
||||||
|
export NODE_ENV=production
|
||||||
|
make gen-web
|
||||||
|
cd web
|
||||||
|
npm i
|
||||||
|
npm run build
|
||||||
- name: Create a Sentry.io release
|
- name: Create a Sentry.io release
|
||||||
uses: getsentry/action-release@v1
|
uses: getsentry/action-release@v1
|
||||||
|
if: ${{ github.event_name == 'release' }}
|
||||||
env:
|
env:
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
SENTRY_ORG: beryjuorg
|
SENTRY_ORG: beryjuorg
|
||||||
SENTRY_PROJECT: authentik
|
SENTRY_PROJECT: authentik
|
||||||
SENTRY_URL: https://sentry.beryju.org
|
SENTRY_URL: https://sentry.beryju.org
|
||||||
with:
|
with:
|
||||||
version: authentik@2021.6.1-rc6
|
version: authentik@2021.6.4
|
||||||
environment: beryjuorg-prod
|
environment: beryjuorg-prod
|
||||||
|
sourcemaps: './web/dist'
|
||||||
|
finalize: false
|
||||||
|
|||||||
1
Pipfile
1
Pipfile
@ -46,6 +46,7 @@ webauthn = "*"
|
|||||||
xmlsec = "*"
|
xmlsec = "*"
|
||||||
duo-client = "*"
|
duo-client = "*"
|
||||||
ua-parser = "*"
|
ua-parser = "*"
|
||||||
|
deepmerge = "*"
|
||||||
|
|
||||||
[requires]
|
[requires]
|
||||||
python_version = "3.9"
|
python_version = "3.9"
|
||||||
|
|||||||
294
Pipfile.lock
generated
294
Pipfile.lock
generated
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"_meta": {
|
"_meta": {
|
||||||
"hash": {
|
"hash": {
|
||||||
"sha256": "4fa1ad681762c867a95410074f31ac5d00119e187e0f38982cd59fdf301cccf5"
|
"sha256": "f90d9fb4713eaf9c5ffe6a3858e64843670f79ab5007e7debf914c1f094c8d63"
|
||||||
},
|
},
|
||||||
"pipfile-spec": 6,
|
"pipfile-spec": 6,
|
||||||
"requires": {
|
"requires": {
|
||||||
@ -76,11 +76,11 @@
|
|||||||
},
|
},
|
||||||
"asgiref": {
|
"asgiref": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:92906c611ce6c967347bbfea733f13d6313901d54dcca88195eaeb52b2a8e8ee",
|
"sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9",
|
||||||
"sha256:d1216dfbdfb63826470995d31caed36225dcaf34f182e0fa257a4dd9e86f1b78"
|
"sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '3.6'",
|
"markers": "python_version >= '3.6'",
|
||||||
"version": "==3.3.4"
|
"version": "==3.4.1"
|
||||||
},
|
},
|
||||||
"async-timeout": {
|
"async-timeout": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -122,19 +122,19 @@
|
|||||||
},
|
},
|
||||||
"boto3": {
|
"boto3": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:6180272094030bda3ee5c242881892cd3d9d19c05cb513945f530e396c7de1e4",
|
"sha256:3b35689c215c982fe9f7ef78d748aa9b0cd15c3b2eb04f9b460aaa63fe2fbd03",
|
||||||
"sha256:95d814d16fe55ae55e1e4a3db248596f9647a0c42f4796c6e05be0bfaffb1830"
|
"sha256:b1cbeb92123799001b97f2ee1cdf470e21f1be08314ae28fc7ea357925186f1c"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.17.94"
|
"version": "==1.17.105"
|
||||||
},
|
},
|
||||||
"botocore": {
|
"botocore": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:60a382a5b2f7d77b1b575d54fba819097526e3fdd0f3004f4d1142d50af0d642",
|
"sha256:b0fda4edf8eb105453890700d49011ada576d0cc7326a0699dfabe9e872f552c",
|
||||||
"sha256:ba8a7951be535e25219a82dea15c30d7bdf0c51e7c1623c3306248493c1616ac"
|
"sha256:b5ba72d22212b0355f339c2a98b3296b3b2202a48e6a2b1366e866bc65a64b67"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
||||||
"version": "==1.20.94"
|
"version": "==1.20.105"
|
||||||
},
|
},
|
||||||
"cachetools": {
|
"cachetools": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -165,11 +165,11 @@
|
|||||||
},
|
},
|
||||||
"celery": {
|
"celery": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:1329de1edeaf734ef859e630cb42df2c116d53e59d2f46433b13aed196e85620",
|
"sha256:8d9a3de9162965e97f8e8cc584c67aad83b3f7a267584fa47701ed11c3e0d4b0",
|
||||||
"sha256:65f061c04578cf189cd7352c192e1a79fdeb370b916bff792bcc769560e81184"
|
"sha256:9dab2170b4038f7bf10ef2861dbf486ddf1d20592290a1040f7b7a1259705d42"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==5.1.0"
|
"version": "==5.1.2"
|
||||||
},
|
},
|
||||||
"certifi": {
|
"certifi": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -242,11 +242,11 @@
|
|||||||
},
|
},
|
||||||
"channels-redis": {
|
"channels-redis": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:18d63f6462a58011740dc8eeb57ea4b31ec220eb551cb71b27de9c6779a549de",
|
"sha256:0a18ce279c15ba79b7985bb12b2d6dd0ac8a14e4ad6952681f4422a4cc4a5ea9",
|
||||||
"sha256:2fb31a63b05373f6402da2e6a91a22b9e66eb8b56626c6bfc93e156c734c5ae6"
|
"sha256:1abd5820ff1ed4ac627f8a219ad389e4c87e52e47a230929a7a474e95dd2c6c2"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==3.2.0"
|
"version": "==3.3.0"
|
||||||
},
|
},
|
||||||
"chardet": {
|
"chardet": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -324,6 +324,14 @@
|
|||||||
"markers": "python_version >= '3.6'",
|
"markers": "python_version >= '3.6'",
|
||||||
"version": "==3.0.2"
|
"version": "==3.0.2"
|
||||||
},
|
},
|
||||||
|
"deepmerge": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:87166dbe9ba1a3348a45c9d4ada6778f518d41afc0b85aa017ea3041facc3f9c",
|
||||||
|
"sha256:f6fd7f1293c535fb599e197e750dbe8674503c5d2a89759b3c72a3c46746d4fd"
|
||||||
|
],
|
||||||
|
"index": "pypi",
|
||||||
|
"version": "==0.3.0"
|
||||||
|
},
|
||||||
"defusedxml": {
|
"defusedxml": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69",
|
"sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69",
|
||||||
@ -334,11 +342,11 @@
|
|||||||
},
|
},
|
||||||
"django": {
|
"django": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:66c9d8db8cc6fe938a28b7887c1596e42d522e27618562517cc8929eb7e7f296",
|
"sha256:3da05fea54fdec2315b54a563d5b59f3b4e2b1e69c3a5841dda35019c01855cd",
|
||||||
"sha256:ea735cbbbb3b2fba6d4da4784a0043d84c67c92f1fdf15ad6db69900e792c10f"
|
"sha256:c58b5f19c5ae0afe6d75cbdd7df561e6eb929339985dbbda2565e1cabb19a62e"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==3.2.4"
|
"version": "==3.2.5"
|
||||||
},
|
},
|
||||||
"django-dbbackup": {
|
"django-dbbackup": {
|
||||||
"git": "https://github.com/django-dbbackup/django-dbbackup.git",
|
"git": "https://github.com/django-dbbackup/django-dbbackup.git",
|
||||||
@ -426,11 +434,11 @@
|
|||||||
},
|
},
|
||||||
"drf-spectacular": {
|
"drf-spectacular": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:146e8c21dc806a20c84c687811c30163970fbf620213ab87280f7403469d80bb",
|
"sha256:6ffbfde7d96a4a2febd19182cc405217e1e86a50280fc739402291c93d1a32b7",
|
||||||
"sha256:8a028d251a6d0b39739ebdec487fd43ee4ecba244d8ffaaac43ff06430728dd8"
|
"sha256:77593024bb899f69227abedcf87def7851a11c9978f781aa4b385a10f67a38b7"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==0.17.1"
|
"version": "==0.17.2"
|
||||||
},
|
},
|
||||||
"duo-client": {
|
"duo-client": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -465,11 +473,11 @@
|
|||||||
},
|
},
|
||||||
"google-auth": {
|
"google-auth": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:154f7889c5d679a6f626f36adb12afbd4dbb0a9a04ec575d989d6ba79c4fd65e",
|
"sha256:9266252e11393943410354cf14a77bcca24dd2ccd9c4e1aef23034fe0fbae630",
|
||||||
"sha256:6d47c79b5d09fbc7e8355fd9594cc4cf65fdde5d401c63951eaac4baa1ba2ae1"
|
"sha256:c7c215c74348ef24faef2f7b62f6d8e6b38824fe08b1e7b7b09a02d397eda7b3"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
||||||
"version": "==1.31.0"
|
"version": "==1.32.1"
|
||||||
},
|
},
|
||||||
"gunicorn": {
|
"gunicorn": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -770,11 +778,11 @@
|
|||||||
},
|
},
|
||||||
"packaging": {
|
"packaging": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5",
|
"sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7",
|
||||||
"sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"
|
"sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==20.9"
|
"version": "==21.0"
|
||||||
},
|
},
|
||||||
"prometheus-client": {
|
"prometheus-client": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -786,52 +794,46 @@
|
|||||||
},
|
},
|
||||||
"prompt-toolkit": {
|
"prompt-toolkit": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:bf00f22079f5fadc949f42ae8ff7f05702826a97059ffcc6281036ad40ac6f04",
|
"sha256:08360ee3a3148bdb5163621709ee322ec34fc4375099afa4bbf751e9b7b7fa4f",
|
||||||
"sha256:e1b4f11b9336a28fa11810bc623c357420f69dfdb6d2dac41ca2c21a55c033bc"
|
"sha256:7089d8d2938043508aa9420ec18ce0922885304cddae87fb96eebca942299f88"
|
||||||
],
|
],
|
||||||
"markers": "python_full_version >= '3.6.1'",
|
"markers": "python_full_version >= '3.6.1'",
|
||||||
"version": "==3.0.18"
|
"version": "==3.0.19"
|
||||||
},
|
},
|
||||||
"psycopg2-binary": {
|
"psycopg2-binary": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:0deac2af1a587ae12836aa07970f5cb91964f05a7c6cdb69d8425ff4c15d4e2c",
|
"sha256:0b7dae87f0b729922e06f85f667de7bf16455d411971b2043bbd9577af9d1975",
|
||||||
"sha256:0e4dc3d5996760104746e6cfcdb519d9d2cd27c738296525d5867ea695774e67",
|
"sha256:0f2e04bd2a2ab54fa44ee67fe2d002bb90cee1c0f1cc0ebc3148af7b02034cbd",
|
||||||
"sha256:11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0",
|
"sha256:123c3fb684e9abfc47218d3784c7b4c47c8587951ea4dd5bc38b6636ac57f616",
|
||||||
"sha256:15978a1fbd225583dd8cdaf37e67ccc278b5abecb4caf6b2d6b8e2b948e953f6",
|
"sha256:1473c0215b0613dd938db54a653f68251a45a78b05f6fc21af4326f40e8360a2",
|
||||||
"sha256:1fabed9ea2acc4efe4671b92c669a213db744d2af8a9fc5d69a8e9bc14b7a9db",
|
"sha256:14db1752acdd2187d99cb2ca0a1a6dfe57fc65c3281e0f20e597aac8d2a5bd90",
|
||||||
"sha256:2dac98e85565d5688e8ab7bdea5446674a83a3945a8f416ad0110018d1501b94",
|
"sha256:1e3a362790edc0a365385b1ac4cc0acc429a0c0d662d829a50b6ce743ae61b5a",
|
||||||
"sha256:42ec1035841b389e8cc3692277a0bd81cdfe0b65d575a2c8862cec7a80e62e52",
|
"sha256:1e85b74cbbb3056e3656f1cc4781294df03383127a8114cbc6531e8b8367bf1e",
|
||||||
"sha256:6422f2ff0919fd720195f64ffd8f924c1395d30f9a495f31e2392c2efafb5056",
|
"sha256:20f1ab44d8c352074e2d7ca67dc00843067788791be373e67a0911998787ce7d",
|
||||||
"sha256:6a32f3a4cb2f6e1a0b15215f448e8ce2da192fd4ff35084d80d5e39da683e79b",
|
"sha256:2f62c207d1740b0bde5c4e949f857b044818f734a3d57f1d0d0edc65050532ed",
|
||||||
"sha256:7312e931b90fe14f925729cde58022f5d034241918a5c4f9797cac62f6b3a9dd",
|
"sha256:3242b9619de955ab44581a03a64bdd7d5e470cc4183e8fcadd85ab9d3756ce7a",
|
||||||
"sha256:7d92a09b788cbb1aec325af5fcba9fed7203897bbd9269d5691bb1e3bce29550",
|
"sha256:35c4310f8febe41f442d3c65066ca93cccefd75013df3d8c736c5b93ec288140",
|
||||||
"sha256:833709a5c66ca52f1d21d41865a637223b368c0ee76ea54ca5bad6f2526c7679",
|
"sha256:4235f9d5ddcab0b8dbd723dca56ea2922b485ea00e1dafacf33b0c7e840b3d32",
|
||||||
"sha256:89705f45ce07b2dfa806ee84439ec67c5d9a0ef20154e0e475e2b2ed392a5b83",
|
"sha256:5ced67f1e34e1a450cdb48eb53ca73b60aa0af21c46b9b35ac3e581cf9f00e31",
|
||||||
"sha256:8cd0fb36c7412996859cb4606a35969dd01f4ea34d9812a141cd920c3b18be77",
|
"sha256:7360647ea04db2e7dff1648d1da825c8cf68dc5fbd80b8fb5b3ee9f068dcd21a",
|
||||||
"sha256:950bc22bb56ee6ff142a2cb9ee980b571dd0912b0334aa3fe0fe3788d860bea2",
|
"sha256:8c13d72ed6af7fd2c8acbd95661cf9477f94e381fce0792c04981a8283b52917",
|
||||||
"sha256:a0c50db33c32594305b0ef9abc0cb7db13de7621d2cadf8392a1d9b3c437ef77",
|
"sha256:988b47ac70d204aed01589ed342303da7c4d84b56c2f4c4b8b00deda123372bf",
|
||||||
"sha256:a0eb43a07386c3f1f1ebb4dc7aafb13f67188eab896e7397aa1ee95a9c884eb2",
|
"sha256:995fc41ebda5a7a663a254a1dcac52638c3e847f48307b5416ee373da15075d7",
|
||||||
"sha256:aaa4213c862f0ef00022751161df35804127b78adf4a2755b9f991a507e425fd",
|
"sha256:a36c7eb6152ba5467fb264d73844877be8b0847874d4822b7cf2d3c0cb8cdcb0",
|
||||||
"sha256:ac0c682111fbf404525dfc0f18a8b5f11be52657d4f96e9fcb75daf4f3984859",
|
"sha256:aed4a9a7e3221b3e252c39d0bf794c438dc5453bc2963e8befe9d4cd324dff72",
|
||||||
"sha256:ad20d2eb875aaa1ea6d0f2916949f5c08a19c74d05b16ce6ebf6d24f2c9f75d1",
|
"sha256:aef9aee84ec78af51107181d02fe8773b100b01c5dfde351184ad9223eab3698",
|
||||||
"sha256:b4afc542c0ac0db720cf516dd20c0846f71c248d2b3d21013aa0d4ef9c71ca25",
|
"sha256:b0221ca5a9837e040ebf61f48899926b5783668b7807419e4adae8175a31f773",
|
||||||
"sha256:b8a3715b3c4e604bcc94c90a825cd7f5635417453b253499664f784fc4da0152",
|
"sha256:b4d7679a08fea64573c969f6994a2631908bb2c0e69a7235648642f3d2e39a68",
|
||||||
"sha256:ba28584e6bca48c59eecbf7efb1576ca214b47f05194646b081717fa628dfddf",
|
"sha256:c250a7ec489b652c892e4f0a5d122cc14c3780f9f643e1a326754aedf82d9a76",
|
||||||
"sha256:ba381aec3a5dc29634f20692349d73f2d21f17653bda1decf0b52b11d694541f",
|
"sha256:ca86db5b561b894f9e5f115d6a159fff2a2570a652e07889d8a383b5fae66eb4",
|
||||||
"sha256:bd1be66dde2b82f80afb9459fc618216753f67109b859a361cf7def5c7968729",
|
"sha256:cfc523edecddaef56f6740d7de1ce24a2fdf94fd5e704091856a201872e37f9f",
|
||||||
"sha256:c2507d796fca339c8fb03216364cca68d87e037c1f774977c8fc377627d01c71",
|
"sha256:da113b70f6ec40e7d81b43d1b139b9db6a05727ab8be1ee559f3a69854a69d34",
|
||||||
"sha256:cec7e622ebc545dbb4564e483dd20e4e404da17ae07e06f3e780b2dacd5cee66",
|
"sha256:f6fac64a38f6768e7bc7b035b9e10d8a538a9fadce06b983fb3e6fa55ac5f5ce",
|
||||||
"sha256:d14b140a4439d816e3b1229a4a525df917d6ea22a0771a2a78332273fd9528a4",
|
"sha256:f8559617b1fcf59a9aedba2c9838b5b6aa211ffedecabca412b92a1ff75aac1a",
|
||||||
"sha256:d1b4ab59e02d9008efe10ceabd0b31e79519da6fb67f7d8e8977118832d0f449",
|
"sha256:fbb42a541b1093385a2d8c7eec94d26d30437d0e77c1d25dae1dcc46741a385e"
|
||||||
"sha256:d5227b229005a696cc67676e24c214740efd90b148de5733419ac9aaba3773da",
|
|
||||||
"sha256:e1f57aa70d3f7cc6947fd88636a481638263ba04a742b4a37dd25c373e41491a",
|
|
||||||
"sha256:e74a55f6bad0e7d3968399deb50f61f4db1926acf4a6d83beaaa7df986f48b1c",
|
|
||||||
"sha256:e82aba2188b9ba309fd8e271702bd0d0fc9148ae3150532bbb474f4590039ffb",
|
|
||||||
"sha256:ee69dad2c7155756ad114c02db06002f4cded41132cc51378e57aad79cc8e4f4",
|
|
||||||
"sha256:f5ab93a2cb2d8338b1674be43b442a7f544a0971da062a5da774ed40587f18f5"
|
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==2.8.6"
|
"version": "==2.9.1"
|
||||||
},
|
},
|
||||||
"pyasn1": {
|
"pyasn1": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -946,10 +948,30 @@
|
|||||||
},
|
},
|
||||||
"pyrsistent": {
|
"pyrsistent": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"
|
"sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2",
|
||||||
|
"sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7",
|
||||||
|
"sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea",
|
||||||
|
"sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426",
|
||||||
|
"sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710",
|
||||||
|
"sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1",
|
||||||
|
"sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396",
|
||||||
|
"sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2",
|
||||||
|
"sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680",
|
||||||
|
"sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35",
|
||||||
|
"sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427",
|
||||||
|
"sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b",
|
||||||
|
"sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b",
|
||||||
|
"sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f",
|
||||||
|
"sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef",
|
||||||
|
"sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c",
|
||||||
|
"sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4",
|
||||||
|
"sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d",
|
||||||
|
"sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78",
|
||||||
|
"sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b",
|
||||||
|
"sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '3.5'",
|
"markers": "python_version >= '3.6'",
|
||||||
"version": "==0.17.3"
|
"version": "==0.18.0"
|
||||||
},
|
},
|
||||||
"python-dateutil": {
|
"python-dateutil": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -961,10 +983,10 @@
|
|||||||
},
|
},
|
||||||
"python-dotenv": {
|
"python-dotenv": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:00aa34e92d992e9f8383730816359647f358f4a3be1ba45e5a5cefd27ee91544",
|
"sha256:dd8fe852847f4fbfadabf6183ddd4c824a9651f02d51714fa075c95561959c7d",
|
||||||
"sha256:b1ae5e9643d5ed987fc57cc2583021e38db531946518130777734f9589b3141f"
|
"sha256:effaac3c1e58d89b3ccb4d04a40dc7ad6e0275fda25fd75ae9d323e2465e202d"
|
||||||
],
|
],
|
||||||
"version": "==0.17.1"
|
"version": "==0.18.0"
|
||||||
},
|
},
|
||||||
"pytz": {
|
"pytz": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1165,11 +1187,11 @@
|
|||||||
"secure"
|
"secure"
|
||||||
],
|
],
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c",
|
"sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4",
|
||||||
"sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"
|
"sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.26.5"
|
"version": "==1.26.6"
|
||||||
},
|
},
|
||||||
"uvicorn": {
|
"uvicorn": {
|
||||||
"extras": [
|
"extras": [
|
||||||
@ -1401,11 +1423,11 @@
|
|||||||
},
|
},
|
||||||
"astroid": {
|
"astroid": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:4db03ab5fc3340cf619dbc25e42c2cc3755154ce6009469766d7143d1fc2ee4e",
|
"sha256:38b95085e9d92e2ca06cf8b35c12a74fa81da395a6f9e65803742e6509c05892",
|
||||||
"sha256:8a398dfce302c13f14bab13e2b14fe385d32b73f4e4853b9bdfb64598baa1975"
|
"sha256:606b2911d10c3dcf35e58d2ee5c97360e8477d7b9f3efc3f24811c93e6fc2cd9"
|
||||||
],
|
],
|
||||||
"markers": "python_version ~= '3.6'",
|
"markers": "python_version ~= '3.6'",
|
||||||
"version": "==2.5.6"
|
"version": "==2.6.2"
|
||||||
},
|
},
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1538,11 +1560,11 @@
|
|||||||
},
|
},
|
||||||
"gitpython": {
|
"gitpython": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:29fe82050709760081f588dd50ce83504feddbebdc4da6956d02351552b1c135",
|
"sha256:b838a895977b45ab6f0cc926a9045c8d1c44e2b653c1fcc39fe91f42c6e8f05b",
|
||||||
"sha256:ee24bdc93dce357630764db659edaf6b8d664d4ff5447ccfeedd2dc5c253f41e"
|
"sha256:fce760879cd2aebd2991b3542876dc5c4a909b30c9d69dfc488e504a8db37ee8"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '3.5'",
|
"markers": "python_version >= '3.6'",
|
||||||
"version": "==3.1.17"
|
"version": "==3.1.18"
|
||||||
},
|
},
|
||||||
"idna": {
|
"idna": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1560,11 +1582,11 @@
|
|||||||
},
|
},
|
||||||
"isort": {
|
"isort": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6",
|
"sha256:83510593e07e433b77bd5bff0f6f607dbafa06d1a89022616f02d8b699cfcd56",
|
||||||
"sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d"
|
"sha256:8e2c107091cfec7286bc0f68a547d0ba4c094d460b732075b6fba674f1035c0c"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '3.6' and python_version < '4'",
|
"markers": "python_version < '4.0' and python_full_version >= '3.6.1'",
|
||||||
"version": "==5.8.0"
|
"version": "==5.9.1"
|
||||||
},
|
},
|
||||||
"lazy-object-proxy": {
|
"lazy-object-proxy": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1610,11 +1632,11 @@
|
|||||||
},
|
},
|
||||||
"packaging": {
|
"packaging": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5",
|
"sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7",
|
||||||
"sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"
|
"sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==20.9"
|
"version": "==21.0"
|
||||||
},
|
},
|
||||||
"pathspec": {
|
"pathspec": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1649,11 +1671,11 @@
|
|||||||
},
|
},
|
||||||
"pylint": {
|
"pylint": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:0a049c5d47b629d9070c3932d13bff482b12119b6a241a93bc460b0be16953c8",
|
"sha256:23a1dc8b30459d78e9ff25942c61bb936108ccbe29dd9e71c01dc8274961709a",
|
||||||
"sha256:792b38ff30903884e4a9eab814ee3523731abd3c463f3ba48d7b627e87013484"
|
"sha256:5d46330e6b8886c31b5e3aba5ff48c10f4aa5e76cbf9002c6544306221e63fbc"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==2.8.3"
|
"version": "==2.9.3"
|
||||||
},
|
},
|
||||||
"pylint-django": {
|
"pylint-django": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1731,49 +1753,45 @@
|
|||||||
},
|
},
|
||||||
"regex": {
|
"regex": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5",
|
"sha256:0e46c1191b2eb293a6912269ed08b4512e7e241bbf591f97e527492e04c77e93",
|
||||||
"sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79",
|
"sha256:18040755606b0c21281493ec309214bd61e41a170509e5014f41d6a5a586e161",
|
||||||
"sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31",
|
"sha256:1806370b2bef4d4193eebe8ee59a9fd7547836a34917b7badbe6561a8594d9cb",
|
||||||
"sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500",
|
"sha256:1ccbd41dbee3a31e18938096510b7d4ee53aa9fce2ee3dcc8ec82ae264f6acfd",
|
||||||
"sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11",
|
"sha256:1d386402ae7f3c9b107ae5863f7ecccb0167762c82a687ae6526b040feaa5ac6",
|
||||||
"sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14",
|
"sha256:210c359e6ee5b83f7d8c529ba3c75ba405481d50f35a420609b0db827e2e3bb5",
|
||||||
"sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3",
|
"sha256:268fe9dd1deb4a30c8593cabd63f7a241dfdc5bd9dd0233906c718db22cdd49a",
|
||||||
"sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439",
|
"sha256:361be4d311ac995a8c7ad577025a3ae3a538531b1f2cf32efd8b7e5d33a13e5a",
|
||||||
"sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c",
|
"sha256:3f7a92e60930f8fca2623d9e326c173b7cf2c8b7e4fdcf984b75a1d2fb08114d",
|
||||||
"sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82",
|
"sha256:444723ebaeb7fa8125f29c01a31101a3854ac3de293e317944022ae5effa53a4",
|
||||||
"sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711",
|
"sha256:494d0172774dc0beeea984b94c95389143db029575f7ca908edd74469321ea99",
|
||||||
"sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093",
|
"sha256:4b1999ef60c45357598935c12508abf56edbbb9c380df6f336de38a6c3a294ae",
|
||||||
"sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a",
|
"sha256:4fc86b729ab88fe8ac3ec92287df253c64aa71560d76da5acd8a2e245839c629",
|
||||||
"sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb",
|
"sha256:5049d00dbb78f9d166d1c704e93934d42cce0570842bb1a61695123d6b01de09",
|
||||||
"sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8",
|
"sha256:56bef6b414949e2c9acf96cb5d78de8b529c7b99752619494e78dc76f99fd005",
|
||||||
"sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17",
|
"sha256:59845101de68fd5d3a1145df9ea022e85ecd1b49300ea68307ad4302320f6f61",
|
||||||
"sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000",
|
"sha256:6b8b629f93246e507287ee07e26744beaffb4c56ed520576deac8b615bd76012",
|
||||||
"sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d",
|
"sha256:6c72ebb72e64e9bd195cb35a9b9bbfb955fd953b295255b8ae3e4ad4a146b615",
|
||||||
"sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480",
|
"sha256:7743798dfb573d006f1143d745bf17efad39775a5190b347da5d83079646be56",
|
||||||
"sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc",
|
"sha256:78a2a885345a2d60b5e68099e877757d5ed12e46ba1e87507175f14f80892af3",
|
||||||
"sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0",
|
"sha256:849802379a660206277675aa5a5c327f5c910c690649535863ddf329b0ba8c87",
|
||||||
"sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9",
|
"sha256:8cf6728f89b071bd3ab37cb8a0e306f4de897553a0ed07442015ee65fbf53d62",
|
||||||
"sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765",
|
"sha256:a1b6a3f600d6aff97e3f28c34192c9ed93fee293bd96ef327b64adb51a74b2f6",
|
||||||
"sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e",
|
"sha256:a548bb51c4476332ce4139df8e637386730f79a92652a907d12c696b6252b64d",
|
||||||
"sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a",
|
"sha256:a8a5826d8a1b64e2ff9af488cc179e1a4d0f144d11ce486a9f34ea38ccedf4ef",
|
||||||
"sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07",
|
"sha256:b024ee43ee6b310fad5acaee23e6485b21468718cb792a9d1693eecacc3f0b7e",
|
||||||
"sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f",
|
"sha256:b092754c06852e8a8b022004aff56c24b06310189186805800d09313c37ce1f8",
|
||||||
"sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac",
|
"sha256:b1dbeef938281f240347d50f28ae53c4b046a23389cd1fc4acec5ea0eae646a1",
|
||||||
"sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7",
|
"sha256:bf819c5b77ff44accc9a24e31f1f7ceaaf6c960816913ed3ef8443b9d20d81b6",
|
||||||
"sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed",
|
"sha256:c11f2fca544b5e30a0e813023196a63b1cb9869106ef9a26e9dae28bce3e4e26",
|
||||||
"sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968",
|
"sha256:ce269e903b00d1ab4746793e9c50a57eec5d5388681abef074d7b9a65748fca5",
|
||||||
"sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7",
|
"sha256:d0cf2651a8804f6325747c7e55e3be0f90ee2848e25d6b817aa2728d263f9abb",
|
||||||
"sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2",
|
"sha256:e07e92935040c67f49571779d115ecb3e727016d42fb36ee0d8757db4ca12ee0",
|
||||||
"sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4",
|
"sha256:e80d2851109e56420b71f9702ad1646e2f0364528adbf6af85527bc61e49f394",
|
||||||
"sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87",
|
"sha256:ed77b97896312bc2deafe137ca2626e8b63808f5bedb944f73665c68093688a7",
|
||||||
"sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8",
|
"sha256:f32f47fb22c988c0b35756024b61d156e5c4011cb8004aa53d93b03323c45657",
|
||||||
"sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10",
|
"sha256:fdad3122b69cdabdb3da4c2a4107875913ac78dab0117fc73f988ad589c66b66"
|
||||||
"sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29",
|
|
||||||
"sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605",
|
|
||||||
"sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6",
|
|
||||||
"sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"
|
|
||||||
],
|
],
|
||||||
"version": "==2021.4.4"
|
"version": "==2021.7.1"
|
||||||
},
|
},
|
||||||
"requests": {
|
"requests": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1836,11 +1854,11 @@
|
|||||||
"secure"
|
"secure"
|
||||||
],
|
],
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c",
|
"sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4",
|
||||||
"sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"
|
"sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.26.5"
|
"version": "==1.26.6"
|
||||||
},
|
},
|
||||||
"wrapt": {
|
"wrapt": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
|||||||
@ -1,3 +1,3 @@
|
|||||||
"""authentik"""
|
"""authentik"""
|
||||||
__version__ = "2021.6.1-rc6"
|
__version__ = "2021.6.4"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|||||||
@ -19,7 +19,7 @@ def token_from_header(raw_header: bytes) -> Optional[Token]:
|
|||||||
auth_credentials = raw_header.decode()
|
auth_credentials = raw_header.decode()
|
||||||
if auth_credentials == "" or " " not in auth_credentials:
|
if auth_credentials == "" or " " not in auth_credentials:
|
||||||
return None
|
return None
|
||||||
auth_type, auth_credentials = auth_credentials.split()
|
auth_type, _, auth_credentials = auth_credentials.partition(" ")
|
||||||
if auth_type.lower() not in ["basic", "bearer"]:
|
if auth_type.lower() not in ["basic", "bearer"]:
|
||||||
LOGGER.debug("Unsupported authentication type, denying", type=auth_type.lower())
|
LOGGER.debug("Unsupported authentication type, denying", type=auth_type.lower())
|
||||||
raise AuthenticationFailed("Unsupported authentication type")
|
raise AuthenticationFailed("Unsupported authentication type")
|
||||||
|
|||||||
@ -2,12 +2,11 @@
|
|||||||
from json import loads
|
from json import loads
|
||||||
|
|
||||||
from django.db.models.query import QuerySet
|
from django.db.models.query import QuerySet
|
||||||
from django.http.response import Http404
|
|
||||||
from django.urls import reverse_lazy
|
from django.urls import reverse_lazy
|
||||||
from django.utils.http import urlencode
|
from django.utils.http import urlencode
|
||||||
from django_filters.filters import BooleanFilter, CharFilter
|
from django_filters.filters import BooleanFilter, CharFilter
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
from drf_spectacular.utils import OpenApiResponse, extend_schema, extend_schema_field
|
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||||
from guardian.utils import get_anonymous_user
|
from guardian.utils import get_anonymous_user
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import CharField, JSONField, SerializerMethodField
|
from rest_framework.fields import CharField, JSONField, SerializerMethodField
|
||||||
@ -173,7 +172,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
@extend_schema(
|
@extend_schema(
|
||||||
responses={
|
responses={
|
||||||
"200": LinkSerializer(many=False),
|
"200": LinkSerializer(many=False),
|
||||||
"404": OpenApiResponse(description="No recovery flow found."),
|
"404": LinkSerializer(many=False),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
@ -184,7 +183,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
# Check that there is a recovery flow, if not return an error
|
# Check that there is a recovery flow, if not return an error
|
||||||
flow = tenant.flow_recovery
|
flow = tenant.flow_recovery
|
||||||
if not flow:
|
if not flow:
|
||||||
raise Http404
|
return Response({"link": ""}, status=404)
|
||||||
user: User = self.get_object()
|
user: User = self.get_object()
|
||||||
token, __ = Token.objects.get_or_create(
|
token, __ = Token.objects.get_or_create(
|
||||||
identifier=f"{user.uid}-password-reset",
|
identifier=f"{user.uid}-password-reset",
|
||||||
|
|||||||
@ -14,7 +14,9 @@ def is_dict(value: Any):
|
|||||||
"""Ensure a value is a dictionary, useful for JSONFields"""
|
"""Ensure a value is a dictionary, useful for JSONFields"""
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
return
|
return
|
||||||
raise ValidationError("Value must be a dictionary.")
|
raise ValidationError(
|
||||||
|
"Value must be a dictionary, and not have any duplicate keys."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PassiveSerializer(Serializer):
|
class PassiveSerializer(Serializer):
|
||||||
|
|||||||
@ -3,23 +3,33 @@ from traceback import format_tb
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
|
from guardian.utils import get_anonymous_user
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import PropertyMapping, User
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.lib.expression.evaluator import BaseEvaluator
|
from authentik.lib.expression.evaluator import BaseEvaluator
|
||||||
|
from authentik.policies.types import PolicyRequest
|
||||||
|
|
||||||
|
|
||||||
class PropertyMappingEvaluator(BaseEvaluator):
|
class PropertyMappingEvaluator(BaseEvaluator):
|
||||||
"""Custom Evalautor that adds some different context variables."""
|
"""Custom Evalautor that adds some different context variables."""
|
||||||
|
|
||||||
def set_context(
|
def set_context(
|
||||||
self, user: Optional[User], request: Optional[HttpRequest], **kwargs
|
self,
|
||||||
|
user: Optional[User],
|
||||||
|
request: Optional[HttpRequest],
|
||||||
|
mapping: PropertyMapping,
|
||||||
|
**kwargs,
|
||||||
):
|
):
|
||||||
"""Update context with context from PropertyMapping's evaluate"""
|
"""Update context with context from PropertyMapping's evaluate"""
|
||||||
|
req = PolicyRequest(user=get_anonymous_user())
|
||||||
|
req.obj = mapping
|
||||||
if user:
|
if user:
|
||||||
|
req.user = user
|
||||||
self._context["user"] = user
|
self._context["user"] = user
|
||||||
if request:
|
if request:
|
||||||
self._context["request"] = request
|
req.http_request = request
|
||||||
|
self._context["request"] = req
|
||||||
self._context.update(**kwargs)
|
self._context.update(**kwargs)
|
||||||
|
|
||||||
def handle_error(self, exc: Exception, expression_source: str):
|
def handle_error(self, exc: Exception, expression_source: str):
|
||||||
@ -30,9 +40,8 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
|||||||
expression=expression_source,
|
expression=expression_source,
|
||||||
message=error_string,
|
message=error_string,
|
||||||
)
|
)
|
||||||
if "user" in self._context:
|
|
||||||
event.set_user(self._context["user"])
|
|
||||||
if "request" in self._context:
|
if "request" in self._context:
|
||||||
event.from_http(self._context["request"])
|
req: PolicyRequest = self._context["request"]
|
||||||
|
event.from_http(req.http_request, req.user)
|
||||||
return
|
return
|
||||||
event.save()
|
event.save()
|
||||||
|
|||||||
@ -5,13 +5,13 @@ from typing import Any, Optional, Type
|
|||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
import django.db.models.options as options
|
from deepmerge import always_merger
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth.models import AbstractUser
|
from django.contrib.auth.models import AbstractUser
|
||||||
from django.contrib.auth.models import UserManager as DjangoUserManager
|
from django.contrib.auth.models import UserManager as DjangoUserManager
|
||||||
from django.core import validators
|
from django.core import validators
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import Q, QuerySet
|
from django.db.models import Q, QuerySet, options
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from django.templatetags.static import static
|
from django.templatetags.static import static
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
@ -114,8 +114,8 @@ class User(GuardianUserMixin, AbstractUser):
|
|||||||
including the users attributes"""
|
including the users attributes"""
|
||||||
final_attributes = {}
|
final_attributes = {}
|
||||||
for group in self.ak_groups.all().order_by("name"):
|
for group in self.ak_groups.all().order_by("name"):
|
||||||
final_attributes.update(group.attributes)
|
always_merger.merge(final_attributes, group.attributes)
|
||||||
final_attributes.update(self.attributes)
|
always_merger.merge(final_attributes, self.attributes)
|
||||||
return final_attributes
|
return final_attributes
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
@ -142,21 +142,25 @@ class User(GuardianUserMixin, AbstractUser):
|
|||||||
@property
|
@property
|
||||||
def avatar(self) -> str:
|
def avatar(self) -> str:
|
||||||
"""Get avatar, depending on authentik.avatar setting"""
|
"""Get avatar, depending on authentik.avatar setting"""
|
||||||
mode = CONFIG.raw.get("authentik").get("avatars")
|
mode: str = CONFIG.y("avatars", "none")
|
||||||
if mode == "none":
|
if mode == "none":
|
||||||
return DEFAULT_AVATAR
|
return DEFAULT_AVATAR
|
||||||
|
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
||||||
|
mail_hash = md5(self.email.encode("utf-8")).hexdigest() # nosec
|
||||||
if mode == "gravatar":
|
if mode == "gravatar":
|
||||||
parameters = [
|
parameters = [
|
||||||
("s", "158"),
|
("s", "158"),
|
||||||
("r", "g"),
|
("r", "g"),
|
||||||
]
|
]
|
||||||
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
|
||||||
mail_hash = md5(self.email.encode("utf-8")).hexdigest() # nosec
|
|
||||||
gravatar_url = (
|
gravatar_url = (
|
||||||
f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}"
|
f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}"
|
||||||
)
|
)
|
||||||
return escape(gravatar_url)
|
return escape(gravatar_url)
|
||||||
raise ValueError(f"Invalid avatar mode {mode}")
|
return mode % {
|
||||||
|
"username": self.username,
|
||||||
|
"mail_hash": mail_hash,
|
||||||
|
"upn": self.attributes.get("upn", ""),
|
||||||
|
}
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
@ -460,7 +464,7 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
|||||||
from authentik.core.expression import PropertyMappingEvaluator
|
from authentik.core.expression import PropertyMappingEvaluator
|
||||||
|
|
||||||
evaluator = PropertyMappingEvaluator()
|
evaluator = PropertyMappingEvaluator()
|
||||||
evaluator.set_context(user, request, **kwargs)
|
evaluator.set_context(user, request, self, **kwargs)
|
||||||
try:
|
try:
|
||||||
return evaluator.evaluate(self.expression)
|
return evaluator.evaluate(self.expression)
|
||||||
except (ValueError, SyntaxError) as exc:
|
except (ValueError, SyntaxError) as exc:
|
||||||
@ -494,8 +498,12 @@ class AuthenticatedSession(ExpiringModel):
|
|||||||
last_used = models.DateTimeField(auto_now=True)
|
last_used = models.DateTimeField(auto_now=True)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_request(request: HttpRequest, user: User) -> "AuthenticatedSession":
|
def from_request(
|
||||||
|
request: HttpRequest, user: User
|
||||||
|
) -> Optional["AuthenticatedSession"]:
|
||||||
"""Create a new session from a http request"""
|
"""Create a new session from a http request"""
|
||||||
|
if not hasattr(request, "session") or not request.session.session_key:
|
||||||
|
return None
|
||||||
return AuthenticatedSession(
|
return AuthenticatedSession(
|
||||||
session_key=request.session.session_key,
|
session_key=request.session.session_key,
|
||||||
user=user,
|
user=user,
|
||||||
|
|||||||
@ -49,7 +49,9 @@ def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
|
|||||||
"""Create an AuthenticatedSession from request"""
|
"""Create an AuthenticatedSession from request"""
|
||||||
from authentik.core.models import AuthenticatedSession
|
from authentik.core.models import AuthenticatedSession
|
||||||
|
|
||||||
AuthenticatedSession.from_request(request, user).save()
|
session = AuthenticatedSession.from_request(request, user)
|
||||||
|
if session:
|
||||||
|
session.save()
|
||||||
|
|
||||||
|
|
||||||
@receiver(user_logged_out)
|
@receiver(user_logged_out)
|
||||||
|
|||||||
@ -183,6 +183,8 @@ class SourceFlowManager:
|
|||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def get_stages_to_append(self, flow: Flow) -> list[Stage]:
|
def get_stages_to_append(self, flow: Flow) -> list[Stage]:
|
||||||
"""Hook to override stages which are appended to the flow"""
|
"""Hook to override stages which are appended to the flow"""
|
||||||
|
if not self.source.enrollment_flow:
|
||||||
|
return []
|
||||||
if flow.slug == self.source.enrollment_flow.slug:
|
if flow.slug == self.source.enrollment_flow.slug:
|
||||||
return [
|
return [
|
||||||
in_memory_stage(PostUserEnrollmentStage),
|
in_memory_stage(PostUserEnrollmentStage),
|
||||||
@ -211,7 +213,7 @@ class SourceFlowManager:
|
|||||||
planner = FlowPlanner(flow)
|
planner = FlowPlanner(flow)
|
||||||
plan = planner.plan(self.request, kwargs)
|
plan = planner.plan(self.request, kwargs)
|
||||||
for stage in self.get_stages_to_append(flow):
|
for stage in self.get_stages_to_append(flow):
|
||||||
plan.append(stage)
|
plan.append_stage(stage=stage)
|
||||||
self.request.session[SESSION_KEY_PLAN] = plan
|
self.request.session[SESSION_KEY_PLAN] = plan
|
||||||
return redirect_with_qs(
|
return redirect_with_qs(
|
||||||
"authentik_core:if-flow",
|
"authentik_core:if-flow",
|
||||||
|
|||||||
@ -11,6 +11,11 @@
|
|||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
<script src="{% static 'dist/FlowInterface.js' %}?v={{ ak_version }}" type="module"></script>
|
<script src="{% static 'dist/FlowInterface.js' %}?v={{ ak_version }}" type="module"></script>
|
||||||
|
<style>
|
||||||
|
.pf-c-background-image::before {
|
||||||
|
--ak-flow-background: url("{{ flow.background_url }}");
|
||||||
|
}
|
||||||
|
</style>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
|
|||||||
@ -7,6 +7,14 @@
|
|||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}?v={{ ak_version }}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}?v={{ ak_version }}">
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<style>
|
||||||
|
.pf-c-background-image::before {
|
||||||
|
--ak-flow-background: url("/static/dist/assets/images/flow_background.jpg");
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
<div class="pf-c-background-image">
|
<div class="pf-c-background-image">
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" class="pf-c-background-image__filter" width="0" height="0">
|
<svg xmlns="http://www.w3.org/2000/svg" class="pf-c-background-image__filter" width="0" height="0">
|
||||||
|
|||||||
@ -97,7 +97,8 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
|||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
"name",
|
"name",
|
||||||
"fingerprint",
|
"fingerprint_sha256",
|
||||||
|
"fingerprint_sha1",
|
||||||
"certificate_data",
|
"certificate_data",
|
||||||
"key_data",
|
"key_data",
|
||||||
"cert_expiry",
|
"cert_expiry",
|
||||||
|
|||||||
@ -16,11 +16,6 @@ from authentik.crypto.models import CertificateKeyPair
|
|||||||
class CertificateBuilder:
|
class CertificateBuilder:
|
||||||
"""Build self-signed certificates"""
|
"""Build self-signed certificates"""
|
||||||
|
|
||||||
__public_key = None
|
|
||||||
__private_key = None
|
|
||||||
__builder = None
|
|
||||||
__certificate = None
|
|
||||||
|
|
||||||
common_name: str
|
common_name: str
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|||||||
@ -55,20 +55,32 @@ class CertificateKeyPair(CreatedUpdatedModel):
|
|||||||
def private_key(self) -> Optional[RSAPrivateKey]:
|
def private_key(self) -> Optional[RSAPrivateKey]:
|
||||||
"""Get python cryptography PrivateKey instance"""
|
"""Get python cryptography PrivateKey instance"""
|
||||||
if not self._private_key and self._private_key != "":
|
if not self._private_key and self._private_key != "":
|
||||||
self._private_key = load_pem_private_key(
|
try:
|
||||||
str.encode("\n".join([x.strip() for x in self.key_data.split("\n")])),
|
self._private_key = load_pem_private_key(
|
||||||
password=None,
|
str.encode(
|
||||||
backend=default_backend(),
|
"\n".join([x.strip() for x in self.key_data.split("\n")])
|
||||||
)
|
),
|
||||||
|
password=None,
|
||||||
|
backend=default_backend(),
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
return self._private_key
|
return self._private_key
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fingerprint(self) -> str:
|
def fingerprint_sha256(self) -> str:
|
||||||
"""Get SHA256 Fingerprint of certificate_data"""
|
"""Get SHA256 Fingerprint of certificate_data"""
|
||||||
return hexlify(self.certificate.fingerprint(hashes.SHA256()), ":").decode(
|
return hexlify(self.certificate.fingerprint(hashes.SHA256()), ":").decode(
|
||||||
"utf-8"
|
"utf-8"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fingerprint_sha1(self) -> str:
|
||||||
|
"""Get SHA1 Fingerprint of certificate_data"""
|
||||||
|
return hexlify(
|
||||||
|
self.certificate.fingerprint(hashes.SHA1()), ":" # nosec
|
||||||
|
).decode("utf-8")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def kid(self):
|
def kid(self):
|
||||||
"""Get Key ID used for JWKS"""
|
"""Get Key ID used for JWKS"""
|
||||||
|
|||||||
@ -6,11 +6,11 @@ from drf_spectacular.types import OpenApiTypes
|
|||||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||||
from guardian.shortcuts import get_objects_for_user
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import CharField, DictField, IntegerField
|
from rest_framework.fields import DictField, IntegerField
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from authentik.core.api.utils import PassiveSerializer, TypeCreateSerializer
|
from authentik.core.api.utils import PassiveSerializer, TypeCreateSerializer
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
@ -19,11 +19,6 @@ from authentik.events.models import Event, EventAction
|
|||||||
class EventSerializer(ModelSerializer):
|
class EventSerializer(ModelSerializer):
|
||||||
"""Event Serializer"""
|
"""Event Serializer"""
|
||||||
|
|
||||||
# Since we only use this serializer for read-only operations,
|
|
||||||
# no checking of the action is done here.
|
|
||||||
# This allows clients to check wildcards, prefixes and custom types
|
|
||||||
action = CharField()
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Event
|
model = Event
|
||||||
@ -96,7 +91,7 @@ class EventsFilter(django_filters.FilterSet):
|
|||||||
fields = ["action", "client_ip", "username"]
|
fields = ["action", "client_ip", "username"]
|
||||||
|
|
||||||
|
|
||||||
class EventViewSet(ReadOnlyModelViewSet):
|
class EventViewSet(ModelViewSet):
|
||||||
"""Event Read-Only Viewset"""
|
"""Event Read-Only Viewset"""
|
||||||
|
|
||||||
queryset = Event.objects.all()
|
queryset = Event.objects.all()
|
||||||
|
|||||||
@ -46,7 +46,7 @@ class NotificationTransportTestSerializer(Serializer):
|
|||||||
|
|
||||||
messages = ListField(child=CharField())
|
messages = ListField(child=CharField())
|
||||||
|
|
||||||
def create(self, request: Request) -> Response:
|
def create(self, validated_data: Request) -> Response:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def update(self, request: Request) -> Response:
|
def update(self, request: Request) -> Response:
|
||||||
|
|||||||
@ -27,10 +27,9 @@ class GeoIPDict(TypedDict):
|
|||||||
class GeoIPReader:
|
class GeoIPReader:
|
||||||
"""Slim wrapper around GeoIP API"""
|
"""Slim wrapper around GeoIP API"""
|
||||||
|
|
||||||
__reader: Optional[Reader] = None
|
|
||||||
__last_mtime: float = 0.0
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
self.__reader: Optional[Reader] = None
|
||||||
|
self.__last_mtime: float = 0.0
|
||||||
self.__open()
|
self.__open()
|
||||||
|
|
||||||
def __open(self):
|
def __open(self):
|
||||||
|
|||||||
@ -3,6 +3,7 @@ from functools import partial
|
|||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.core.exceptions import SuspiciousOperation
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.signals import post_save, pre_delete
|
from django.db.models.signals import post_save, pre_delete
|
||||||
from django.http import HttpRequest, HttpResponse
|
from django.http import HttpRequest, HttpResponse
|
||||||
@ -13,6 +14,7 @@ from authentik.core.models import User
|
|||||||
from authentik.events.models import Event, EventAction, Notification
|
from authentik.events.models import Event, EventAction, Notification
|
||||||
from authentik.events.signals import EventNewThread
|
from authentik.events.signals import EventNewThread
|
||||||
from authentik.events.utils import model_to_dict
|
from authentik.events.utils import model_to_dict
|
||||||
|
from authentik.lib.sentry import before_send
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
|
|
||||||
|
|
||||||
@ -62,12 +64,21 @@ class AuditMiddleware:
|
|||||||
|
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
return
|
return
|
||||||
thread = EventNewThread(
|
# Special case for SuspiciousOperation, we have a special event action for that
|
||||||
EventAction.SYSTEM_EXCEPTION,
|
if isinstance(exception, SuspiciousOperation):
|
||||||
request,
|
thread = EventNewThread(
|
||||||
message=exception_to_string(exception),
|
EventAction.SUSPICIOUS_REQUEST,
|
||||||
)
|
request,
|
||||||
thread.run()
|
message=str(exception),
|
||||||
|
)
|
||||||
|
thread.run()
|
||||||
|
elif before_send({}, {"exc_info": (None, exception, None)}) is not None:
|
||||||
|
thread = EventNewThread(
|
||||||
|
EventAction.SYSTEM_EXCEPTION,
|
||||||
|
request,
|
||||||
|
message=exception_to_string(exception),
|
||||||
|
)
|
||||||
|
thread.run()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
|
|||||||
@ -105,7 +105,11 @@ def notification_transport(
|
|||||||
"""Send notification over specified transport"""
|
"""Send notification over specified transport"""
|
||||||
self.save_on_success = False
|
self.save_on_success = False
|
||||||
try:
|
try:
|
||||||
notification: Notification = Notification.objects.get(pk=notification_pk)
|
notification: Notification = Notification.objects.filter(
|
||||||
|
pk=notification_pk
|
||||||
|
).first()
|
||||||
|
if not notification:
|
||||||
|
return
|
||||||
transport: NotificationTransport = NotificationTransport.objects.get(
|
transport: NotificationTransport = NotificationTransport.objects.get(
|
||||||
pk=transport_pk
|
pk=transport_pk
|
||||||
)
|
)
|
||||||
|
|||||||
@ -25,6 +25,7 @@ class FlowStageBindingSerializer(ModelSerializer):
|
|||||||
"re_evaluate_policies",
|
"re_evaluate_policies",
|
||||||
"order",
|
"order",
|
||||||
"policy_engine_mode",
|
"policy_engine_mode",
|
||||||
|
"invalid_response_action",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -5,8 +5,7 @@ from typing import TYPE_CHECKING, Optional
|
|||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.flows.models import FlowStageBinding
|
||||||
from authentik.flows.models import Stage
|
|
||||||
from authentik.policies.engine import PolicyEngine
|
from authentik.policies.engine import PolicyEngine
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
|
|
||||||
@ -22,11 +21,14 @@ class StageMarker:
|
|||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def process(
|
def process(
|
||||||
self, plan: "FlowPlan", stage: Stage, http_request: Optional[HttpRequest]
|
self,
|
||||||
) -> Optional[Stage]:
|
plan: "FlowPlan",
|
||||||
|
binding: FlowStageBinding,
|
||||||
|
http_request: HttpRequest,
|
||||||
|
) -> Optional[FlowStageBinding]:
|
||||||
"""Process callback for this marker. This should be overridden by sub-classes.
|
"""Process callback for this marker. This should be overridden by sub-classes.
|
||||||
If a stage should be removed, return None."""
|
If a stage should be removed, return None."""
|
||||||
return stage
|
return binding
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -34,24 +36,34 @@ class ReevaluateMarker(StageMarker):
|
|||||||
"""Reevaluate Marker, forces stage's policies to be evaluated again."""
|
"""Reevaluate Marker, forces stage's policies to be evaluated again."""
|
||||||
|
|
||||||
binding: PolicyBinding
|
binding: PolicyBinding
|
||||||
user: User
|
|
||||||
|
|
||||||
def process(
|
def process(
|
||||||
self, plan: "FlowPlan", stage: Stage, http_request: Optional[HttpRequest]
|
self,
|
||||||
) -> Optional[Stage]:
|
plan: "FlowPlan",
|
||||||
|
binding: FlowStageBinding,
|
||||||
|
http_request: HttpRequest,
|
||||||
|
) -> Optional[FlowStageBinding]:
|
||||||
"""Re-evaluate policies bound to stage, and if they fail, remove from plan"""
|
"""Re-evaluate policies bound to stage, and if they fail, remove from plan"""
|
||||||
engine = PolicyEngine(self.binding, self.user)
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||||
|
|
||||||
|
LOGGER.debug(
|
||||||
|
"f(plan_inst)[re-eval marker]: running re-evaluation",
|
||||||
|
binding=binding,
|
||||||
|
policy_binding=self.binding,
|
||||||
|
)
|
||||||
|
engine = PolicyEngine(
|
||||||
|
self.binding, plan.context.get(PLAN_CONTEXT_PENDING_USER, http_request.user)
|
||||||
|
)
|
||||||
engine.use_cache = False
|
engine.use_cache = False
|
||||||
if http_request:
|
engine.request.set_http_request(http_request)
|
||||||
engine.request.set_http_request(http_request)
|
|
||||||
engine.request.context = plan.context
|
engine.request.context = plan.context
|
||||||
engine.build()
|
engine.build()
|
||||||
result = engine.result
|
result = engine.result
|
||||||
if result.passing:
|
if result.passing:
|
||||||
return stage
|
return binding
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"f(plan_inst)[re-eval marker]: stage failed re-evaluation",
|
"f(plan_inst)[re-eval marker]: binding failed re-evaluation",
|
||||||
stage=stage,
|
binding=binding,
|
||||||
messages=result.messages,
|
messages=result.messages,
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|||||||
@ -135,7 +135,7 @@ class Migration(migrations.Migration):
|
|||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_flows", "0017_auto_20210329_1334"),
|
("authentik_flows", "0017_auto_20210329_1334"),
|
||||||
("authentik_stages_user_write", "__latest__"),
|
("authentik_stages_user_write", "0002_auto_20200918_1653"),
|
||||||
("authentik_stages_user_login", "__latest__"),
|
("authentik_stages_user_login", "__latest__"),
|
||||||
("authentik_stages_password", "0002_passwordstage_change_flow"),
|
("authentik_stages_password", "0002_passwordstage_change_flow"),
|
||||||
("authentik_policies", "0001_initial"),
|
("authentik_policies", "0001_initial"),
|
||||||
|
|||||||
@ -0,0 +1,22 @@
|
|||||||
|
# Generated by Django 3.2.4 on 2021-06-27 16:20
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_flows", "0020_flow_compatibility_mode"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="flowstagebinding",
|
||||||
|
name="invalid_response_action",
|
||||||
|
field=models.TextField(
|
||||||
|
choices=[("retry", "Retry"), ("continue", "Continue")],
|
||||||
|
default="retry",
|
||||||
|
help_text="Configure how the flow executor should handle an invalid response to a challenge. RETRY returns the error message and a similar challenge to the executor while CONTINUE continues with the next stage.",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 3.2.4 on 2021-07-03 13:13
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_flows", "0021_flowstagebinding_invalid_response_action"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="flowstagebinding",
|
||||||
|
name="invalid_response_action",
|
||||||
|
field=models.TextField(
|
||||||
|
choices=[
|
||||||
|
("retry", "Retry"),
|
||||||
|
("restart", "Restart"),
|
||||||
|
("restart_with_context", "Restart With Context"),
|
||||||
|
],
|
||||||
|
default="retry",
|
||||||
|
help_text="Configure how the flow executor should handle an invalid response to a challenge. RETRY returns the error message and a similar challenge to the executor. RESTART restarts the flow from the beginning, and RESTART_WITH_CONTEXT restarts the flow while keeping the current context.",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -27,6 +27,14 @@ class NotConfiguredAction(models.TextChoices):
|
|||||||
CONFIGURE = "configure"
|
CONFIGURE = "configure"
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidResponseAction(models.TextChoices):
|
||||||
|
"""Configure how the flow executor should handle invalid responses to challenges"""
|
||||||
|
|
||||||
|
RETRY = "retry"
|
||||||
|
RESTART = "restart"
|
||||||
|
RESTART_WITH_CONTEXT = "restart_with_context"
|
||||||
|
|
||||||
|
|
||||||
class FlowDesignation(models.TextChoices):
|
class FlowDesignation(models.TextChoices):
|
||||||
"""Designation of what a Flow should be used for. At a later point, this
|
"""Designation of what a Flow should be used for. At a later point, this
|
||||||
should be replaced by a database entry."""
|
should be replaced by a database entry."""
|
||||||
@ -201,6 +209,17 @@ class FlowStageBinding(SerializerModel, PolicyBindingModel):
|
|||||||
help_text=_("Evaluate policies when the Stage is present to the user."),
|
help_text=_("Evaluate policies when the Stage is present to the user."),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
invalid_response_action = models.TextField(
|
||||||
|
choices=InvalidResponseAction.choices,
|
||||||
|
default=InvalidResponseAction.RETRY,
|
||||||
|
help_text=_(
|
||||||
|
"Configure how the flow executor should handle an invalid response to a "
|
||||||
|
"challenge. RETRY returns the error message and a similar challenge to the "
|
||||||
|
"executor. RESTART restarts the flow from the beginning, and RESTART_WITH_CONTEXT "
|
||||||
|
"restarts the flow while keeping the current context."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
order = models.IntegerField()
|
order = models.IntegerField()
|
||||||
|
|
||||||
objects = InheritanceManager()
|
objects = InheritanceManager()
|
||||||
|
|||||||
@ -14,6 +14,7 @@ from authentik.events.models import cleanse_dict
|
|||||||
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
||||||
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
||||||
from authentik.flows.models import Flow, FlowStageBinding, Stage
|
from authentik.flows.models import Flow, FlowStageBinding, Stage
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.policies.engine import PolicyEngine
|
from authentik.policies.engine import PolicyEngine
|
||||||
from authentik.root.monitoring import UpdatingGauge
|
from authentik.root.monitoring import UpdatingGauge
|
||||||
|
|
||||||
@ -33,6 +34,7 @@ HIST_FLOWS_PLAN_TIME = Histogram(
|
|||||||
"Duration to build a plan for a flow",
|
"Duration to build a plan for a flow",
|
||||||
["flow_slug"],
|
["flow_slug"],
|
||||||
)
|
)
|
||||||
|
CACHE_TIMEOUT = int(CONFIG.y("redis.cache_timeout_flows"))
|
||||||
|
|
||||||
|
|
||||||
def cache_key(flow: Flow, user: Optional[User] = None) -> str:
|
def cache_key(flow: Flow, user: Optional[User] = None) -> str:
|
||||||
@ -50,33 +52,41 @@ class FlowPlan:
|
|||||||
|
|
||||||
flow_pk: str
|
flow_pk: str
|
||||||
|
|
||||||
stages: list[Stage] = field(default_factory=list)
|
bindings: list[FlowStageBinding] = field(default_factory=list)
|
||||||
context: dict[str, Any] = field(default_factory=dict)
|
context: dict[str, Any] = field(default_factory=dict)
|
||||||
markers: list[StageMarker] = field(default_factory=list)
|
markers: list[StageMarker] = field(default_factory=list)
|
||||||
|
|
||||||
def append(self, stage: Stage, marker: Optional[StageMarker] = None):
|
def append_stage(self, stage: Stage, marker: Optional[StageMarker] = None):
|
||||||
"""Append `stage` to all stages, optionall with stage marker"""
|
"""Append `stage` to all stages, optionall with stage marker"""
|
||||||
self.stages.append(stage)
|
return self.append(FlowStageBinding(stage=stage), marker)
|
||||||
|
|
||||||
|
def append(self, binding: FlowStageBinding, marker: Optional[StageMarker] = None):
|
||||||
|
"""Append `stage` to all stages, optionall with stage marker"""
|
||||||
|
self.bindings.append(binding)
|
||||||
self.markers.append(marker or StageMarker())
|
self.markers.append(marker or StageMarker())
|
||||||
|
|
||||||
def insert(self, stage: Stage, marker: Optional[StageMarker] = None):
|
def insert_stage(self, stage: Stage, marker: Optional[StageMarker] = None):
|
||||||
"""Insert stage into plan, as immediate next stage"""
|
"""Insert stage into plan, as immediate next stage"""
|
||||||
self.stages.insert(1, stage)
|
self.bindings.insert(1, FlowStageBinding(stage=stage, order=0))
|
||||||
self.markers.insert(1, marker or StageMarker())
|
self.markers.insert(1, marker or StageMarker())
|
||||||
|
|
||||||
def next(self, http_request: Optional[HttpRequest]) -> Optional[Stage]:
|
def next(self, http_request: Optional[HttpRequest]) -> Optional[FlowStageBinding]:
|
||||||
"""Return next pending stage from the bottom of the list"""
|
"""Return next pending stage from the bottom of the list"""
|
||||||
if not self.has_stages:
|
if not self.has_stages:
|
||||||
return None
|
return None
|
||||||
stage = self.stages[0]
|
binding = self.bindings[0]
|
||||||
marker = self.markers[0]
|
marker = self.markers[0]
|
||||||
|
|
||||||
if marker.__class__ is not StageMarker:
|
if marker.__class__ is not StageMarker:
|
||||||
LOGGER.debug("f(plan_inst): stage has marker", stage=stage, marker=marker)
|
LOGGER.debug(
|
||||||
marked_stage = marker.process(self, stage, http_request)
|
"f(plan_inst): stage has marker", binding=binding, marker=marker
|
||||||
|
)
|
||||||
|
marked_stage = marker.process(self, binding, http_request)
|
||||||
if not marked_stage:
|
if not marked_stage:
|
||||||
LOGGER.debug("f(plan_inst): marker returned none, next stage", stage=stage)
|
LOGGER.debug(
|
||||||
self.stages.remove(stage)
|
"f(plan_inst): marker returned none, next stage", binding=binding
|
||||||
|
)
|
||||||
|
self.bindings.remove(binding)
|
||||||
self.markers.remove(marker)
|
self.markers.remove(marker)
|
||||||
if not self.has_stages:
|
if not self.has_stages:
|
||||||
return None
|
return None
|
||||||
@ -87,12 +97,12 @@ class FlowPlan:
|
|||||||
def pop(self):
|
def pop(self):
|
||||||
"""Pop next pending stage from bottom of list"""
|
"""Pop next pending stage from bottom of list"""
|
||||||
self.markers.pop(0)
|
self.markers.pop(0)
|
||||||
self.stages.pop(0)
|
self.bindings.pop(0)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def has_stages(self) -> bool:
|
def has_stages(self) -> bool:
|
||||||
"""Check if there are any stages left in this plan"""
|
"""Check if there are any stages left in this plan"""
|
||||||
return len(self.markers) + len(self.stages) > 0
|
return len(self.markers) + len(self.bindings) > 0
|
||||||
|
|
||||||
|
|
||||||
class FlowPlanner:
|
class FlowPlanner:
|
||||||
@ -157,9 +167,9 @@ class FlowPlanner:
|
|||||||
"f(plan): building plan",
|
"f(plan): building plan",
|
||||||
)
|
)
|
||||||
plan = self._build_plan(user, request, default_context)
|
plan = self._build_plan(user, request, default_context)
|
||||||
cache.set(cache_key(self.flow, user), plan)
|
cache.set(cache_key(self.flow, user), plan, CACHE_TIMEOUT)
|
||||||
GAUGE_FLOWS_CACHED.update()
|
GAUGE_FLOWS_CACHED.update()
|
||||||
if not plan.stages and not self.allow_empty_flows:
|
if not plan.bindings and not self.allow_empty_flows:
|
||||||
raise EmptyFlowException()
|
raise EmptyFlowException()
|
||||||
return plan
|
return plan
|
||||||
|
|
||||||
@ -214,9 +224,9 @@ class FlowPlanner:
|
|||||||
"f(plan): stage has re-evaluate marker",
|
"f(plan): stage has re-evaluate marker",
|
||||||
stage=binding.stage,
|
stage=binding.stage,
|
||||||
)
|
)
|
||||||
marker = ReevaluateMarker(binding=binding, user=user)
|
marker = ReevaluateMarker(binding=binding)
|
||||||
if stage:
|
if stage:
|
||||||
plan.append(stage, marker)
|
plan.append(binding, marker)
|
||||||
HIST_FLOWS_PLAN_TIME.labels(flow_slug=self.flow.slug)
|
HIST_FLOWS_PLAN_TIME.labels(flow_slug=self.flow.slug)
|
||||||
self._logger.debug(
|
self._logger.debug(
|
||||||
"f(plan): finished building",
|
"f(plan): finished building",
|
||||||
|
|||||||
@ -16,29 +16,14 @@ from authentik.flows.challenge import (
|
|||||||
HttpChallengeResponse,
|
HttpChallengeResponse,
|
||||||
WithUserInfoChallenge,
|
WithUserInfoChallenge,
|
||||||
)
|
)
|
||||||
|
from authentik.flows.models import InvalidResponseAction
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||||
from authentik.flows.views import FlowExecutorView
|
from authentik.flows.views import FlowExecutorView
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
|
||||||
|
|
||||||
PLAN_CONTEXT_PENDING_USER_IDENTIFIER = "pending_user_identifier"
|
PLAN_CONTEXT_PENDING_USER_IDENTIFIER = "pending_user_identifier"
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
class InvalidChallengeError(SentryIgnoredException):
|
|
||||||
"""Error raised when a challenge from a stage is not valid"""
|
|
||||||
|
|
||||||
def __init__(self, errors, stage_view: View, challenge: Challenge) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self.errors = errors
|
|
||||||
self.stage_view = stage_view
|
|
||||||
self.challenge = challenge
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return (
|
|
||||||
f"Invalid challenge from {self.stage_view}: {self.errors}\n{self.challenge}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class StageView(View):
|
class StageView(View):
|
||||||
"""Abstract Stage, inherits TemplateView but can be combined with FormView"""
|
"""Abstract Stage, inherits TemplateView but can be combined with FormView"""
|
||||||
|
|
||||||
@ -85,7 +70,13 @@ class ChallengeStageView(StageView):
|
|||||||
"""Return a challenge for the frontend to solve"""
|
"""Return a challenge for the frontend to solve"""
|
||||||
challenge = self._get_challenge(*args, **kwargs)
|
challenge = self._get_challenge(*args, **kwargs)
|
||||||
if not challenge.is_valid():
|
if not challenge.is_valid():
|
||||||
LOGGER.warning(challenge.errors, stage_view=self, challenge=challenge)
|
LOGGER.warning(
|
||||||
|
"f(ch): Invalid challenge",
|
||||||
|
binding=self.executor.current_binding,
|
||||||
|
errors=challenge.errors,
|
||||||
|
stage_view=self,
|
||||||
|
challenge=challenge,
|
||||||
|
)
|
||||||
return HttpChallengeResponse(challenge)
|
return HttpChallengeResponse(challenge)
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
@ -93,6 +84,21 @@ class ChallengeStageView(StageView):
|
|||||||
"""Handle challenge response"""
|
"""Handle challenge response"""
|
||||||
challenge: ChallengeResponse = self.get_response_instance(data=request.data)
|
challenge: ChallengeResponse = self.get_response_instance(data=request.data)
|
||||||
if not challenge.is_valid():
|
if not challenge.is_valid():
|
||||||
|
if self.executor.current_binding.invalid_response_action in [
|
||||||
|
InvalidResponseAction.RESTART,
|
||||||
|
InvalidResponseAction.RESTART_WITH_CONTEXT,
|
||||||
|
]:
|
||||||
|
keep_context = (
|
||||||
|
self.executor.current_binding.invalid_response_action
|
||||||
|
== InvalidResponseAction.RESTART_WITH_CONTEXT
|
||||||
|
)
|
||||||
|
LOGGER.debug(
|
||||||
|
"f(ch): Invalid response, restarting flow",
|
||||||
|
binding=self.executor.current_binding,
|
||||||
|
stage_view=self,
|
||||||
|
keep_context=keep_context,
|
||||||
|
)
|
||||||
|
return self.executor.restart_flow(keep_context)
|
||||||
return self.challenge_invalid(challenge)
|
return self.challenge_invalid(challenge)
|
||||||
return self.challenge_valid(challenge)
|
return self.challenge_valid(challenge)
|
||||||
|
|
||||||
@ -142,5 +148,10 @@ class ChallengeStageView(StageView):
|
|||||||
)
|
)
|
||||||
challenge_response.initial_data["response_errors"] = full_errors
|
challenge_response.initial_data["response_errors"] = full_errors
|
||||||
if not challenge_response.is_valid():
|
if not challenge_response.is_valid():
|
||||||
LOGGER.warning(challenge_response.errors)
|
LOGGER.warning(
|
||||||
|
"f(ch): invalid challenge response",
|
||||||
|
binding=self.executor.current_binding,
|
||||||
|
errors=challenge_response.errors,
|
||||||
|
stage_view=self,
|
||||||
|
)
|
||||||
return HttpChallengeResponse(challenge_response)
|
return HttpChallengeResponse(challenge_response)
|
||||||
|
|||||||
@ -182,8 +182,8 @@ class TestFlowPlanner(TestCase):
|
|||||||
planner = FlowPlanner(flow)
|
planner = FlowPlanner(flow)
|
||||||
plan = planner.plan(request)
|
plan = planner.plan(request)
|
||||||
|
|
||||||
self.assertEqual(plan.stages[0], binding.stage)
|
self.assertEqual(plan.bindings[0], binding)
|
||||||
self.assertEqual(plan.stages[1], binding2.stage)
|
self.assertEqual(plan.bindings[1], binding2)
|
||||||
|
|
||||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||||
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
||||||
|
|||||||
@ -11,15 +11,23 @@ from authentik.core.models import User
|
|||||||
from authentik.flows.challenge import ChallengeTypes
|
from authentik.flows.challenge import ChallengeTypes
|
||||||
from authentik.flows.exceptions import FlowNonApplicableException
|
from authentik.flows.exceptions import FlowNonApplicableException
|
||||||
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
||||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
from authentik.flows.models import (
|
||||||
|
Flow,
|
||||||
|
FlowDesignation,
|
||||||
|
FlowStageBinding,
|
||||||
|
InvalidResponseAction,
|
||||||
|
)
|
||||||
from authentik.flows.planner import FlowPlan, FlowPlanner
|
from authentik.flows.planner import FlowPlan, FlowPlanner
|
||||||
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView
|
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView
|
||||||
from authentik.flows.views import NEXT_ARG_NAME, SESSION_KEY_PLAN, FlowExecutorView
|
from authentik.flows.views import NEXT_ARG_NAME, SESSION_KEY_PLAN, FlowExecutorView
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.policies.dummy.models import DummyPolicy
|
from authentik.policies.dummy.models import DummyPolicy
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
|
from authentik.policies.reputation.models import ReputationPolicy
|
||||||
from authentik.policies.types import PolicyResult
|
from authentik.policies.types import PolicyResult
|
||||||
|
from authentik.stages.deny.models import DenyStage
|
||||||
from authentik.stages.dummy.models import DummyStage
|
from authentik.stages.dummy.models import DummyStage
|
||||||
|
from authentik.stages.identification.models import IdentificationStage, UserFields
|
||||||
|
|
||||||
POLICY_RETURN_FALSE = PropertyMock(return_value=PolicyResult(False))
|
POLICY_RETURN_FALSE = PropertyMock(return_value=PolicyResult(False))
|
||||||
POLICY_RETURN_TRUE = MagicMock(return_value=PolicyResult(True))
|
POLICY_RETURN_TRUE = MagicMock(return_value=PolicyResult(True))
|
||||||
@ -52,8 +60,9 @@ class TestFlowExecutor(TestCase):
|
|||||||
designation=FlowDesignation.AUTHENTICATION,
|
designation=FlowDesignation.AUTHENTICATION,
|
||||||
)
|
)
|
||||||
stage = DummyStage.objects.create(name="dummy")
|
stage = DummyStage.objects.create(name="dummy")
|
||||||
|
binding = FlowStageBinding(target=flow, stage=stage, order=0)
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=flow.pk.hex + "a", stages=[stage], markers=[StageMarker()]
|
flow_pk=flow.pk.hex + "a", bindings=[binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
@ -163,7 +172,7 @@ class TestFlowExecutor(TestCase):
|
|||||||
# Check that two stages are in plan
|
# Check that two stages are in plan
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
plan: FlowPlan = session[SESSION_KEY_PLAN]
|
plan: FlowPlan = session[SESSION_KEY_PLAN]
|
||||||
self.assertEqual(len(plan.stages), 2)
|
self.assertEqual(len(plan.bindings), 2)
|
||||||
# Second request, submit form, one stage left
|
# Second request, submit form, one stage left
|
||||||
response = self.client.post(exec_url)
|
response = self.client.post(exec_url)
|
||||||
# Second request redirects to the same URL
|
# Second request redirects to the same URL
|
||||||
@ -172,7 +181,7 @@ class TestFlowExecutor(TestCase):
|
|||||||
# Check that two stages are in plan
|
# Check that two stages are in plan
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
plan: FlowPlan = session[SESSION_KEY_PLAN]
|
plan: FlowPlan = session[SESSION_KEY_PLAN]
|
||||||
self.assertEqual(len(plan.stages), 1)
|
self.assertEqual(len(plan.bindings), 1)
|
||||||
|
|
||||||
@patch(
|
@patch(
|
||||||
"authentik.flows.views.to_stage_response",
|
"authentik.flows.views.to_stage_response",
|
||||||
@ -213,8 +222,8 @@ class TestFlowExecutor(TestCase):
|
|||||||
|
|
||||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||||
|
|
||||||
self.assertEqual(plan.stages[0], binding.stage)
|
self.assertEqual(plan.bindings[0], binding)
|
||||||
self.assertEqual(plan.stages[1], binding2.stage)
|
self.assertEqual(plan.bindings[1], binding2)
|
||||||
|
|
||||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||||
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
||||||
@ -267,9 +276,9 @@ class TestFlowExecutor(TestCase):
|
|||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||||
|
|
||||||
self.assertEqual(plan.stages[0], binding.stage)
|
self.assertEqual(plan.bindings[0], binding)
|
||||||
self.assertEqual(plan.stages[1], binding2.stage)
|
self.assertEqual(plan.bindings[1], binding2)
|
||||||
self.assertEqual(plan.stages[2], binding3.stage)
|
self.assertEqual(plan.bindings[2], binding3)
|
||||||
|
|
||||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||||
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
||||||
@ -281,8 +290,8 @@ class TestFlowExecutor(TestCase):
|
|||||||
|
|
||||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||||
|
|
||||||
self.assertEqual(plan.stages[0], binding2.stage)
|
self.assertEqual(plan.bindings[0], binding2)
|
||||||
self.assertEqual(plan.stages[1], binding3.stage)
|
self.assertEqual(plan.bindings[1], binding3)
|
||||||
|
|
||||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||||
self.assertIsInstance(plan.markers[1], StageMarker)
|
self.assertIsInstance(plan.markers[1], StageMarker)
|
||||||
@ -338,9 +347,9 @@ class TestFlowExecutor(TestCase):
|
|||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||||
|
|
||||||
self.assertEqual(plan.stages[0], binding.stage)
|
self.assertEqual(plan.bindings[0], binding)
|
||||||
self.assertEqual(plan.stages[1], binding2.stage)
|
self.assertEqual(plan.bindings[1], binding2)
|
||||||
self.assertEqual(plan.stages[2], binding3.stage)
|
self.assertEqual(plan.bindings[2], binding3)
|
||||||
|
|
||||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||||
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
||||||
@ -352,8 +361,8 @@ class TestFlowExecutor(TestCase):
|
|||||||
|
|
||||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||||
|
|
||||||
self.assertEqual(plan.stages[0], binding2.stage)
|
self.assertEqual(plan.bindings[0], binding2)
|
||||||
self.assertEqual(plan.stages[1], binding3.stage)
|
self.assertEqual(plan.bindings[1], binding3)
|
||||||
|
|
||||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||||
self.assertIsInstance(plan.markers[1], StageMarker)
|
self.assertIsInstance(plan.markers[1], StageMarker)
|
||||||
@ -364,7 +373,7 @@ class TestFlowExecutor(TestCase):
|
|||||||
|
|
||||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||||
|
|
||||||
self.assertEqual(plan.stages[0], binding3.stage)
|
self.assertEqual(plan.bindings[0], binding3)
|
||||||
|
|
||||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||||
|
|
||||||
@ -438,10 +447,10 @@ class TestFlowExecutor(TestCase):
|
|||||||
|
|
||||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||||
|
|
||||||
self.assertEqual(plan.stages[0], binding.stage)
|
self.assertEqual(plan.bindings[0], binding)
|
||||||
self.assertEqual(plan.stages[1], binding2.stage)
|
self.assertEqual(plan.bindings[1], binding2)
|
||||||
self.assertEqual(plan.stages[2], binding3.stage)
|
self.assertEqual(plan.bindings[2], binding3)
|
||||||
self.assertEqual(plan.stages[3], binding4.stage)
|
self.assertEqual(plan.bindings[3], binding4)
|
||||||
|
|
||||||
self.assertIsInstance(plan.markers[0], StageMarker)
|
self.assertIsInstance(plan.markers[0], StageMarker)
|
||||||
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
|
||||||
@ -512,3 +521,78 @@ class TestFlowExecutor(TestCase):
|
|||||||
|
|
||||||
stage_view = StageView(executor)
|
stage_view = StageView(executor)
|
||||||
self.assertEqual(ident, stage_view.get_pending_user(for_display=True).username)
|
self.assertEqual(ident, stage_view.get_pending_user(for_display=True).username)
|
||||||
|
|
||||||
|
def test_invalid_restart(self):
|
||||||
|
"""Test flow that restarts on invalid entry"""
|
||||||
|
flow = Flow.objects.create(
|
||||||
|
name="restart-on-invalid",
|
||||||
|
slug="restart-on-invalid",
|
||||||
|
designation=FlowDesignation.AUTHENTICATION,
|
||||||
|
)
|
||||||
|
# Stage 0 is a deny stage that is added dynamically
|
||||||
|
# when the reputation policy says so
|
||||||
|
deny_stage = DenyStage.objects.create(name="deny")
|
||||||
|
reputation_policy = ReputationPolicy.objects.create(
|
||||||
|
name="reputation", threshold=-1, check_ip=False
|
||||||
|
)
|
||||||
|
deny_binding = FlowStageBinding.objects.create(
|
||||||
|
target=flow,
|
||||||
|
stage=deny_stage,
|
||||||
|
order=0,
|
||||||
|
evaluate_on_plan=False,
|
||||||
|
re_evaluate_policies=True,
|
||||||
|
)
|
||||||
|
PolicyBinding.objects.create(
|
||||||
|
policy=reputation_policy, target=deny_binding, order=0
|
||||||
|
)
|
||||||
|
|
||||||
|
# Stage 1 is an identification stage
|
||||||
|
ident_stage = IdentificationStage.objects.create(
|
||||||
|
name="ident",
|
||||||
|
user_fields=[UserFields.E_MAIL],
|
||||||
|
)
|
||||||
|
FlowStageBinding.objects.create(
|
||||||
|
target=flow,
|
||||||
|
stage=ident_stage,
|
||||||
|
order=1,
|
||||||
|
invalid_response_action=InvalidResponseAction.RESTART_WITH_CONTEXT,
|
||||||
|
)
|
||||||
|
exec_url = reverse(
|
||||||
|
"authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}
|
||||||
|
)
|
||||||
|
# First request, run the planner
|
||||||
|
response = self.client.get(exec_url)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
force_str(response.content),
|
||||||
|
{
|
||||||
|
"type": ChallengeTypes.NATIVE.value,
|
||||||
|
"component": "ak-stage-identification",
|
||||||
|
"flow_info": {
|
||||||
|
"background": flow.background_url,
|
||||||
|
"cancel_url": reverse("authentik_flows:cancel"),
|
||||||
|
"title": "",
|
||||||
|
},
|
||||||
|
"password_fields": False,
|
||||||
|
"primary_action": "Log in",
|
||||||
|
"sources": [],
|
||||||
|
"user_fields": [UserFields.E_MAIL],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
response = self.client.post(
|
||||||
|
exec_url, {"uid_field": "invalid-string"}, follow=True
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
force_str(response.content),
|
||||||
|
{
|
||||||
|
"component": "ak-stage-access-denied",
|
||||||
|
"error_message": None,
|
||||||
|
"flow_info": {
|
||||||
|
"background": flow.background_url,
|
||||||
|
"cancel_url": reverse("authentik_flows:cancel"),
|
||||||
|
"title": "",
|
||||||
|
},
|
||||||
|
"type": ChallengeTypes.NATIVE.value,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|||||||
@ -40,15 +40,11 @@ def transaction_rollback():
|
|||||||
class FlowImporter:
|
class FlowImporter:
|
||||||
"""Import Flow from json"""
|
"""Import Flow from json"""
|
||||||
|
|
||||||
__import: FlowBundle
|
|
||||||
|
|
||||||
__pk_map: dict[Any, Model]
|
|
||||||
|
|
||||||
logger: BoundLogger
|
logger: BoundLogger
|
||||||
|
|
||||||
def __init__(self, json_input: str):
|
def __init__(self, json_input: str):
|
||||||
|
self.__pk_map: dict[Any, Model] = {}
|
||||||
self.logger = get_logger()
|
self.logger = get_logger()
|
||||||
self.__pk_map = {}
|
|
||||||
import_dict = loads(json_input)
|
import_dict = loads(json_input)
|
||||||
try:
|
try:
|
||||||
self.__import = from_dict(FlowBundle, import_dict)
|
self.__import = from_dict(FlowBundle, import_dict)
|
||||||
|
|||||||
@ -4,6 +4,7 @@ from typing import Any, Optional
|
|||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||||
|
from django.core.cache import cache
|
||||||
from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect
|
from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect
|
||||||
from django.http.request import QueryDict
|
from django.http.request import QueryDict
|
||||||
from django.shortcuts import get_object_or_404, redirect
|
from django.shortcuts import get_object_or_404, redirect
|
||||||
@ -37,13 +38,20 @@ from authentik.flows.challenge import (
|
|||||||
WithUserInfoChallenge,
|
WithUserInfoChallenge,
|
||||||
)
|
)
|
||||||
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
||||||
from authentik.flows.models import ConfigurableStage, Flow, FlowDesignation, Stage
|
from authentik.flows.models import (
|
||||||
|
ConfigurableStage,
|
||||||
|
Flow,
|
||||||
|
FlowDesignation,
|
||||||
|
FlowStageBinding,
|
||||||
|
Stage,
|
||||||
|
)
|
||||||
from authentik.flows.planner import (
|
from authentik.flows.planner import (
|
||||||
PLAN_CONTEXT_PENDING_USER,
|
PLAN_CONTEXT_PENDING_USER,
|
||||||
PLAN_CONTEXT_REDIRECT,
|
PLAN_CONTEXT_REDIRECT,
|
||||||
FlowPlan,
|
FlowPlan,
|
||||||
FlowPlanner,
|
FlowPlanner,
|
||||||
)
|
)
|
||||||
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
from authentik.lib.utils.reflection import all_subclasses, class_to_path
|
from authentik.lib.utils.reflection import all_subclasses, class_to_path
|
||||||
from authentik.lib.utils.urls import is_url_absolute, redirect_with_qs
|
from authentik.lib.utils.urls import is_url_absolute, redirect_with_qs
|
||||||
from authentik.tenants.models import Tenant
|
from authentik.tenants.models import Tenant
|
||||||
@ -93,6 +101,10 @@ def challenge_response_types():
|
|||||||
return Inner()
|
return Inner()
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidStageError(SentryIgnoredException):
|
||||||
|
"""Error raised when a challenge from a stage is not valid"""
|
||||||
|
|
||||||
|
|
||||||
@method_decorator(xframe_options_sameorigin, name="dispatch")
|
@method_decorator(xframe_options_sameorigin, name="dispatch")
|
||||||
class FlowExecutorView(APIView):
|
class FlowExecutorView(APIView):
|
||||||
"""Stage 1 Flow executor, passing requests to Stage Views"""
|
"""Stage 1 Flow executor, passing requests to Stage Views"""
|
||||||
@ -102,6 +114,7 @@ class FlowExecutorView(APIView):
|
|||||||
flow: Flow
|
flow: Flow
|
||||||
|
|
||||||
plan: Optional[FlowPlan] = None
|
plan: Optional[FlowPlan] = None
|
||||||
|
current_binding: FlowStageBinding
|
||||||
current_stage: Stage
|
current_stage: Stage
|
||||||
current_stage_view: View
|
current_stage_view: View
|
||||||
|
|
||||||
@ -121,7 +134,7 @@ class FlowExecutorView(APIView):
|
|||||||
message = exc.__doc__ if exc.__doc__ else str(exc)
|
message = exc.__doc__ if exc.__doc__ else str(exc)
|
||||||
return self.stage_invalid(error_message=message)
|
return self.stage_invalid(error_message=message)
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument, too-many-return-statements
|
||||||
def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse:
|
def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse:
|
||||||
# Early check if theres an active Plan for the current session
|
# Early check if theres an active Plan for the current session
|
||||||
if SESSION_KEY_PLAN in self.request.session:
|
if SESSION_KEY_PLAN in self.request.session:
|
||||||
@ -154,22 +167,41 @@ class FlowExecutorView(APIView):
|
|||||||
request.session[SESSION_KEY_GET] = QueryDict(request.GET.get("query", ""))
|
request.session[SESSION_KEY_GET] = QueryDict(request.GET.get("query", ""))
|
||||||
# We don't save the Plan after getting the next stage
|
# We don't save the Plan after getting the next stage
|
||||||
# as it hasn't been successfully passed yet
|
# as it hasn't been successfully passed yet
|
||||||
next_stage = self.plan.next(self.request)
|
try:
|
||||||
if not next_stage:
|
# This is the first time we actually access any attribute on the selected plan
|
||||||
|
# if the cached plan is from an older version, it might have different attributes
|
||||||
|
# in which case we just delete the plan and invalidate everything
|
||||||
|
next_binding = self.plan.next(self.request)
|
||||||
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
|
self._logger.warning(
|
||||||
|
"f(exec): found incompatible flow plan, invalidating run", exc=exc
|
||||||
|
)
|
||||||
|
keys = cache.keys("flow_*")
|
||||||
|
cache.delete_many(keys)
|
||||||
|
return self.stage_invalid()
|
||||||
|
if not next_binding:
|
||||||
self._logger.debug("f(exec): no more stages, flow is done.")
|
self._logger.debug("f(exec): no more stages, flow is done.")
|
||||||
return self._flow_done()
|
return self._flow_done()
|
||||||
self.current_stage = next_stage
|
self.current_binding = next_binding
|
||||||
|
self.current_stage = next_binding.stage
|
||||||
self._logger.debug(
|
self._logger.debug(
|
||||||
"f(exec): Current stage",
|
"f(exec): Current stage",
|
||||||
current_stage=self.current_stage,
|
current_stage=self.current_stage,
|
||||||
flow_slug=self.flow.slug,
|
flow_slug=self.flow.slug,
|
||||||
)
|
)
|
||||||
stage_cls = self.current_stage.type
|
try:
|
||||||
|
stage_cls = self.current_stage.type
|
||||||
|
except NotImplementedError as exc:
|
||||||
|
self._logger.debug("Error getting stage type", exc=exc)
|
||||||
|
return self.stage_invalid()
|
||||||
self.current_stage_view = stage_cls(self)
|
self.current_stage_view = stage_cls(self)
|
||||||
self.current_stage_view.args = self.args
|
self.current_stage_view.args = self.args
|
||||||
self.current_stage_view.kwargs = self.kwargs
|
self.current_stage_view.kwargs = self.kwargs
|
||||||
self.current_stage_view.request = request
|
self.current_stage_view.request = request
|
||||||
return super().dispatch(request)
|
try:
|
||||||
|
return super().dispatch(request)
|
||||||
|
except InvalidStageError as exc:
|
||||||
|
return self.stage_invalid(str(exc))
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
responses={
|
responses={
|
||||||
@ -256,8 +288,31 @@ class FlowExecutorView(APIView):
|
|||||||
planner = FlowPlanner(self.flow)
|
planner = FlowPlanner(self.flow)
|
||||||
plan = planner.plan(self.request)
|
plan = planner.plan(self.request)
|
||||||
self.request.session[SESSION_KEY_PLAN] = plan
|
self.request.session[SESSION_KEY_PLAN] = plan
|
||||||
|
try:
|
||||||
|
# Call the has_stages getter to check that
|
||||||
|
# there are no issues with the class we might've gotten
|
||||||
|
# from the cache. If there are errors, just delete all cached flows
|
||||||
|
_ = plan.has_stages
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
keys = cache.keys("flow_*")
|
||||||
|
cache.delete_many(keys)
|
||||||
|
return self._initiate_plan()
|
||||||
return plan
|
return plan
|
||||||
|
|
||||||
|
def restart_flow(self, keep_context=False) -> HttpResponse:
|
||||||
|
"""Restart the currently active flow, optionally keeping the current context"""
|
||||||
|
planner = FlowPlanner(self.flow)
|
||||||
|
default_context = None
|
||||||
|
if keep_context:
|
||||||
|
default_context = self.plan.context
|
||||||
|
plan = planner.plan(self.request, default_context)
|
||||||
|
self.request.session[SESSION_KEY_PLAN] = plan
|
||||||
|
kwargs = self.kwargs
|
||||||
|
kwargs.update({"flow_slug": self.flow.slug})
|
||||||
|
return redirect_with_qs(
|
||||||
|
"authentik_api:flow-executor", self.request.GET, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
def _flow_done(self) -> HttpResponse:
|
def _flow_done(self) -> HttpResponse:
|
||||||
"""User Successfully passed all stages"""
|
"""User Successfully passed all stages"""
|
||||||
# Since this is wrapped by the ExecutorShell, the next argument is saved in the session
|
# Since this is wrapped by the ExecutorShell, the next argument is saved in the session
|
||||||
@ -281,10 +336,10 @@ class FlowExecutorView(APIView):
|
|||||||
)
|
)
|
||||||
self.plan.pop()
|
self.plan.pop()
|
||||||
self.request.session[SESSION_KEY_PLAN] = self.plan
|
self.request.session[SESSION_KEY_PLAN] = self.plan
|
||||||
if self.plan.stages:
|
if self.plan.bindings:
|
||||||
self._logger.debug(
|
self._logger.debug(
|
||||||
"f(exec): Continuing with next stage",
|
"f(exec): Continuing with next stage",
|
||||||
remaining=len(self.plan.stages),
|
remaining=len(self.plan.bindings),
|
||||||
)
|
)
|
||||||
kwargs = self.kwargs
|
kwargs = self.kwargs
|
||||||
kwargs.update({"flow_slug": self.flow.slug})
|
kwargs.update({"flow_slug": self.flow.slug})
|
||||||
@ -353,8 +408,11 @@ class FlowErrorResponse(TemplateResponse):
|
|||||||
context = {}
|
context = {}
|
||||||
context["error"] = self.error
|
context["error"] = self.error
|
||||||
if self._request.user and self._request.user.is_authenticated:
|
if self._request.user and self._request.user.is_authenticated:
|
||||||
if self._request.user.is_superuser or self._request.user.attributes.get(
|
if (
|
||||||
USER_ATTRIBUTE_DEBUG, False
|
self._request.user.is_superuser
|
||||||
|
or self._request.user.group_attributes().get(
|
||||||
|
USER_ATTRIBUTE_DEBUG, False
|
||||||
|
)
|
||||||
):
|
):
|
||||||
context["tb"] = "".join(format_tb(self.error.__traceback__))
|
context["tb"] = "".join(format_tb(self.error.__traceback__))
|
||||||
return context
|
return context
|
||||||
|
|||||||
@ -26,10 +26,9 @@ class ConfigLoader:
|
|||||||
|
|
||||||
loaded_file = []
|
loaded_file = []
|
||||||
|
|
||||||
__config = {}
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
self.__config = {}
|
||||||
base_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "../.."))
|
base_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||||
for path in SEARCH_PATHS:
|
for path in SEARCH_PATHS:
|
||||||
# Check if path is relative, and if so join with base_dir
|
# Check if path is relative, and if so join with base_dir
|
||||||
@ -62,7 +61,7 @@ class ConfigLoader:
|
|||||||
output.update(kwargs)
|
output.update(kwargs)
|
||||||
print(dumps(output))
|
print(dumps(output))
|
||||||
|
|
||||||
def update(self, root, updatee):
|
def update(self, root: dict[str, Any], updatee: dict[str, Any]) -> dict[str, Any]:
|
||||||
"""Recursively update dictionary"""
|
"""Recursively update dictionary"""
|
||||||
for key, value in updatee.items():
|
for key, value in updatee.items():
|
||||||
if isinstance(value, Mapping):
|
if isinstance(value, Mapping):
|
||||||
@ -73,7 +72,7 @@ class ConfigLoader:
|
|||||||
root[key] = value
|
root[key] = value
|
||||||
return root
|
return root
|
||||||
|
|
||||||
def parse_uri(self, value):
|
def parse_uri(self, value: str) -> str:
|
||||||
"""Parse string values which start with a URI"""
|
"""Parse string values which start with a URI"""
|
||||||
url = urlparse(value)
|
url = urlparse(value)
|
||||||
if url.scheme == "env":
|
if url.scheme == "env":
|
||||||
@ -99,7 +98,10 @@ class ConfigLoader:
|
|||||||
raise ImproperlyConfigured from exc
|
raise ImproperlyConfigured from exc
|
||||||
except PermissionError as exc:
|
except PermissionError as exc:
|
||||||
self._log(
|
self._log(
|
||||||
"warning", "Permission denied while reading file", path=path, error=exc
|
"warning",
|
||||||
|
"Permission denied while reading file",
|
||||||
|
path=path,
|
||||||
|
error=str(exc),
|
||||||
)
|
)
|
||||||
|
|
||||||
def update_from_dict(self, update: dict):
|
def update_from_dict(self, update: dict):
|
||||||
|
|||||||
@ -9,6 +9,7 @@ postgresql:
|
|||||||
web:
|
web:
|
||||||
listen: 0.0.0.0:9000
|
listen: 0.0.0.0:9000
|
||||||
listen_tls: 0.0.0.0:9443
|
listen_tls: 0.0.0.0:9443
|
||||||
|
load_local_files: false
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
host: localhost
|
host: localhost
|
||||||
@ -16,6 +17,10 @@ redis:
|
|||||||
cache_db: 0
|
cache_db: 0
|
||||||
message_queue_db: 1
|
message_queue_db: 1
|
||||||
ws_db: 2
|
ws_db: 2
|
||||||
|
cache_timeout: 300
|
||||||
|
cache_timeout_flows: 300
|
||||||
|
cache_timeout_policies: 300
|
||||||
|
cache_timeout_reputation: 300
|
||||||
|
|
||||||
debug: false
|
debug: false
|
||||||
|
|
||||||
@ -45,12 +50,12 @@ outposts:
|
|||||||
# %(build_hash)s: Build hash if you're running a beta version
|
# %(build_hash)s: Build hash if you're running a beta version
|
||||||
docker_image_base: "ghcr.io/goauthentik/%(type)s:%(version)s"
|
docker_image_base: "ghcr.io/goauthentik/%(type)s:%(version)s"
|
||||||
|
|
||||||
authentik:
|
avatars: env://AUTHENTIK_AUTHENTIK__AVATARS?gravatar
|
||||||
avatars: gravatar # gravatar or none
|
geoip: "./GeoLite2-City.mmdb"
|
||||||
geoip: "./GeoLite2-City.mmdb"
|
|
||||||
# Optionally add links to the footer on the login page
|
# Can't currently be configured via environment variables, only yaml
|
||||||
footer_links:
|
footer_links:
|
||||||
- name: Documentation
|
- name: Documentation
|
||||||
href: https://goauthentik.io/docs/
|
href: https://goauthentik.io/docs/
|
||||||
- name: authentik Website
|
- name: authentik Website
|
||||||
href: https://goauthentik.io/
|
href: https://goauthentik.io/
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import re
|
|||||||
from textwrap import indent
|
from textwrap import indent
|
||||||
from typing import Any, Iterable, Optional
|
from typing import Any, Iterable, Optional
|
||||||
|
|
||||||
|
from django.core.exceptions import FieldError
|
||||||
from requests import Session
|
from requests import Session
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
from sentry_sdk.hub import Hub
|
from sentry_sdk.hub import Hub
|
||||||
@ -29,10 +30,10 @@ class BaseEvaluator:
|
|||||||
# update website/docs/expressions/_objects.md
|
# update website/docs/expressions/_objects.md
|
||||||
# update website/docs/expressions/_functions.md
|
# update website/docs/expressions/_functions.md
|
||||||
self._globals = {
|
self._globals = {
|
||||||
"regex_match": BaseEvaluator.expr_filter_regex_match,
|
"regex_match": BaseEvaluator.expr_regex_match,
|
||||||
"regex_replace": BaseEvaluator.expr_filter_regex_replace,
|
"regex_replace": BaseEvaluator.expr_regex_replace,
|
||||||
"ak_is_group_member": BaseEvaluator.expr_func_is_group_member,
|
"ak_is_group_member": BaseEvaluator.expr_is_group_member,
|
||||||
"ak_user_by": BaseEvaluator.expr_func_user_by,
|
"ak_user_by": BaseEvaluator.expr_user_by,
|
||||||
"ak_logger": get_logger(),
|
"ak_logger": get_logger(),
|
||||||
"requests": Session(),
|
"requests": Session(),
|
||||||
}
|
}
|
||||||
@ -40,25 +41,28 @@ class BaseEvaluator:
|
|||||||
self._filename = "BaseEvalautor"
|
self._filename = "BaseEvalautor"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def expr_filter_regex_match(value: Any, regex: str) -> bool:
|
def expr_regex_match(value: Any, regex: str) -> bool:
|
||||||
"""Expression Filter to run re.search"""
|
"""Expression Filter to run re.search"""
|
||||||
return re.search(regex, value) is None
|
return re.search(regex, value) is not None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def expr_filter_regex_replace(value: Any, regex: str, repl: str) -> str:
|
def expr_regex_replace(value: Any, regex: str, repl: str) -> str:
|
||||||
"""Expression Filter to run re.sub"""
|
"""Expression Filter to run re.sub"""
|
||||||
return re.sub(regex, repl, value)
|
return re.sub(regex, repl, value)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def expr_func_user_by(**filters) -> Optional[User]:
|
def expr_user_by(**filters) -> Optional[User]:
|
||||||
"""Get user by filters"""
|
"""Get user by filters"""
|
||||||
users = User.objects.filter(**filters)
|
try:
|
||||||
if users:
|
users = User.objects.filter(**filters)
|
||||||
return users.first()
|
if users:
|
||||||
return None
|
return users.first()
|
||||||
|
return None
|
||||||
|
except FieldError:
|
||||||
|
return None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def expr_func_is_group_member(user: User, **group_filters) -> bool:
|
def expr_is_group_member(user: User, **group_filters) -> bool:
|
||||||
"""Check if `user` is member of group with name `group_name`"""
|
"""Check if `user` is member of group with name `group_name`"""
|
||||||
return user.ak_groups.filter(**group_filters).exists()
|
return user.ak_groups.filter(**group_filters).exists()
|
||||||
|
|
||||||
|
|||||||
61
authentik/lib/tests/test_config.py
Normal file
61
authentik/lib/tests/test_config.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
"""Test config loader"""
|
||||||
|
from os import chmod, environ, unlink, write
|
||||||
|
from tempfile import mkstemp
|
||||||
|
|
||||||
|
from django.conf import ImproperlyConfigured
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from authentik.lib.config import ENV_PREFIX, ConfigLoader
|
||||||
|
|
||||||
|
|
||||||
|
class TestConfig(TestCase):
|
||||||
|
"""Test config loader"""
|
||||||
|
|
||||||
|
def test_env(self):
|
||||||
|
"""Test simple instance"""
|
||||||
|
config = ConfigLoader()
|
||||||
|
environ[ENV_PREFIX + "_test__test"] = "bar"
|
||||||
|
config.update_from_env()
|
||||||
|
self.assertEqual(config.y("test.test"), "bar")
|
||||||
|
|
||||||
|
def test_patch(self):
|
||||||
|
"""Test patch decorator"""
|
||||||
|
config = ConfigLoader()
|
||||||
|
config.y_set("foo.bar", "bar")
|
||||||
|
self.assertEqual(config.y("foo.bar"), "bar")
|
||||||
|
with config.patch("foo.bar", "baz"):
|
||||||
|
self.assertEqual(config.y("foo.bar"), "baz")
|
||||||
|
self.assertEqual(config.y("foo.bar"), "bar")
|
||||||
|
|
||||||
|
def test_uri_env(self):
|
||||||
|
"""Test URI parsing (environment)"""
|
||||||
|
config = ConfigLoader()
|
||||||
|
environ["foo"] = "bar"
|
||||||
|
self.assertEqual(config.parse_uri("env://foo"), "bar")
|
||||||
|
self.assertEqual(config.parse_uri("env://fo?bar"), "bar")
|
||||||
|
|
||||||
|
def test_uri_file(self):
|
||||||
|
"""Test URI parsing (file load)"""
|
||||||
|
config = ConfigLoader()
|
||||||
|
file, file_name = mkstemp()
|
||||||
|
write(file, "foo".encode())
|
||||||
|
_, file2_name = mkstemp()
|
||||||
|
chmod(file2_name, 0o000) # Remove all permissions so we can't read the file
|
||||||
|
self.assertEqual(config.parse_uri(f"file://{file_name}"), "foo")
|
||||||
|
self.assertEqual(config.parse_uri(f"file://{file2_name}?def"), "def")
|
||||||
|
unlink(file_name)
|
||||||
|
unlink(file2_name)
|
||||||
|
|
||||||
|
def test_file_update(self):
|
||||||
|
"""Test update_from_file"""
|
||||||
|
config = ConfigLoader()
|
||||||
|
file, file_name = mkstemp()
|
||||||
|
write(file, "{".encode())
|
||||||
|
file2, file2_name = mkstemp()
|
||||||
|
write(file2, "{".encode())
|
||||||
|
chmod(file2_name, 0o000) # Remove all permissions so we can't read the file
|
||||||
|
with self.assertRaises(ImproperlyConfigured):
|
||||||
|
config.update_from_file(file_name)
|
||||||
|
config.update_from_file(file2_name)
|
||||||
|
unlink(file_name)
|
||||||
|
unlink(file2_name)
|
||||||
32
authentik/lib/tests/test_evaluator.py
Normal file
32
authentik/lib/tests/test_evaluator.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
"""Test Evaluator base functions"""
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from authentik.core.models import User
|
||||||
|
from authentik.lib.expression.evaluator import BaseEvaluator
|
||||||
|
|
||||||
|
|
||||||
|
class TestEvaluator(TestCase):
|
||||||
|
"""Test Evaluator base functions"""
|
||||||
|
|
||||||
|
def test_regex_match(self):
|
||||||
|
"""Test expr_regex_match"""
|
||||||
|
self.assertFalse(BaseEvaluator.expr_regex_match("foo", "bar"))
|
||||||
|
self.assertTrue(BaseEvaluator.expr_regex_match("foo", "foo"))
|
||||||
|
|
||||||
|
def test_regex_replace(self):
|
||||||
|
"""Test expr_regex_replace"""
|
||||||
|
self.assertEqual(BaseEvaluator.expr_regex_replace("foo", "o", "a"), "faa")
|
||||||
|
|
||||||
|
def test_user_by(self):
|
||||||
|
"""Test expr_user_by"""
|
||||||
|
self.assertIsNotNone(BaseEvaluator.expr_user_by(username="akadmin"))
|
||||||
|
self.assertIsNone(BaseEvaluator.expr_user_by(username="bar"))
|
||||||
|
self.assertIsNone(BaseEvaluator.expr_user_by(foo="bar"))
|
||||||
|
|
||||||
|
def test_is_group_member(self):
|
||||||
|
"""Test expr_is_group_member"""
|
||||||
|
self.assertFalse(
|
||||||
|
BaseEvaluator.expr_is_group_member(
|
||||||
|
User.objects.get(username="akadmin"), name="test"
|
||||||
|
)
|
||||||
|
)
|
||||||
@ -33,7 +33,7 @@ def _get_outpost_override_ip(request: HttpRequest) -> Optional[str]:
|
|||||||
return None
|
return None
|
||||||
if OUTPOST_REMOTE_IP_HEADER not in request.META:
|
if OUTPOST_REMOTE_IP_HEADER not in request.META:
|
||||||
return None
|
return None
|
||||||
if request.user.attributes.get(USER_ATTRIBUTE_CAN_OVERRIDE_IP, False):
|
if request.user.group_attributes().get(USER_ATTRIBUTE_CAN_OVERRIDE_IP, False):
|
||||||
return None
|
return None
|
||||||
return request.META[OUTPOST_REMOTE_IP_HEADER]
|
return request.META[OUTPOST_REMOTE_IP_HEADER]
|
||||||
|
|
||||||
|
|||||||
@ -51,7 +51,7 @@ class OutpostSerializer(ModelSerializer):
|
|||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
(
|
(
|
||||||
f"Outpost type {self.initial_data['type']} can't be used with "
|
f"Outpost type {self.initial_data['type']} can't be used with "
|
||||||
f"{type(provider)} providers."
|
f"{provider.__class__.__name__} providers."
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return providers
|
return providers
|
||||||
|
|||||||
@ -67,14 +67,9 @@ class OutpostConsumer(AuthJsonConsumer):
|
|||||||
self.accept()
|
self.accept()
|
||||||
self.outpost = outpost.first()
|
self.outpost = outpost.first()
|
||||||
self.last_uid = self.channel_name
|
self.last_uid = self.channel_name
|
||||||
LOGGER.debug(
|
|
||||||
"added outpost instace to cache",
|
|
||||||
outpost=self.outpost,
|
|
||||||
channel_name=self.channel_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def disconnect(self, close_code):
|
def disconnect(self, code):
|
||||||
if self.outpost and self.last_uid:
|
if self.outpost and self.last_uid:
|
||||||
state = OutpostState.for_instance_uid(self.outpost, self.last_uid)
|
state = OutpostState.for_instance_uid(self.outpost, self.last_uid)
|
||||||
if self.channel_name in state.channel_ids:
|
if self.channel_name in state.channel_ids:
|
||||||
@ -108,6 +103,11 @@ class OutpostConsumer(AuthJsonConsumer):
|
|||||||
outpost=self.outpost.name,
|
outpost=self.outpost.name,
|
||||||
uid=self.last_uid,
|
uid=self.last_uid,
|
||||||
).inc()
|
).inc()
|
||||||
|
LOGGER.debug(
|
||||||
|
"added outpost instace to cache",
|
||||||
|
outpost=self.outpost,
|
||||||
|
instance_uuid=self.last_uid,
|
||||||
|
)
|
||||||
self.first_msg = True
|
self.first_msg = True
|
||||||
|
|
||||||
if msg.instruction == WebsocketMessageInstruction.HELLO:
|
if msg.instruction == WebsocketMessageInstruction.HELLO:
|
||||||
|
|||||||
@ -36,8 +36,10 @@ class DockerController(BaseController):
|
|||||||
|
|
||||||
def _get_env(self) -> dict[str, str]:
|
def _get_env(self) -> dict[str, str]:
|
||||||
return {
|
return {
|
||||||
"AUTHENTIK_HOST": self.outpost.config.authentik_host,
|
"AUTHENTIK_HOST": self.outpost.config.authentik_host.lower(),
|
||||||
"AUTHENTIK_INSECURE": str(self.outpost.config.authentik_host_insecure),
|
"AUTHENTIK_INSECURE": str(
|
||||||
|
self.outpost.config.authentik_host_insecure
|
||||||
|
).lower(),
|
||||||
"AUTHENTIK_TOKEN": self.outpost.token.key,
|
"AUTHENTIK_TOKEN": self.outpost.token.key,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -45,11 +47,34 @@ class DockerController(BaseController):
|
|||||||
"""Check if container's env is equal to what we would set. Return true if container needs
|
"""Check if container's env is equal to what we would set. Return true if container needs
|
||||||
to be rebuilt."""
|
to be rebuilt."""
|
||||||
should_be = self._get_env()
|
should_be = self._get_env()
|
||||||
container_env = container.attrs.get("Config", {}).get("Env", {})
|
container_env = container.attrs.get("Config", {}).get("Env", [])
|
||||||
for key, expected_value in should_be.items():
|
for key, expected_value in should_be.items():
|
||||||
if key not in container_env:
|
entry = f"{key.upper()}={expected_value}"
|
||||||
continue
|
if entry not in container_env:
|
||||||
if container_env[key] != expected_value:
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _comp_ports(self, container: Container) -> bool:
|
||||||
|
"""Check that the container has the correct ports exposed. Return true if container needs
|
||||||
|
to be rebuilt."""
|
||||||
|
# with TEST enabled, we use host-network
|
||||||
|
if settings.TEST:
|
||||||
|
return False
|
||||||
|
# When the container isn't running, the API doesn't report any port mappings
|
||||||
|
if container.status != "running":
|
||||||
|
return False
|
||||||
|
# {'3389/tcp': [
|
||||||
|
# {'HostIp': '0.0.0.0', 'HostPort': '389'},
|
||||||
|
# {'HostIp': '::', 'HostPort': '389'}
|
||||||
|
# ]}
|
||||||
|
for port in self.deployment_ports:
|
||||||
|
key = f"{port.inner_port or port.port}/{port.protocol.lower()}"
|
||||||
|
if key not in container.ports:
|
||||||
|
return True
|
||||||
|
host_matching = False
|
||||||
|
for host_port in container.ports[key]:
|
||||||
|
host_matching = host_port.get("HostPort") == str(port.port)
|
||||||
|
if not host_matching:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -58,7 +83,7 @@ class DockerController(BaseController):
|
|||||||
try:
|
try:
|
||||||
return self.client.containers.get(container_name), False
|
return self.client.containers.get(container_name), False
|
||||||
except NotFound:
|
except NotFound:
|
||||||
self.logger.info("Container does not exist, creating")
|
self.logger.info("(Re-)creating container...")
|
||||||
image_name = self.get_container_image()
|
image_name = self.get_container_image()
|
||||||
self.client.images.pull(image_name)
|
self.client.images.pull(image_name)
|
||||||
container_args = {
|
container_args = {
|
||||||
@ -66,7 +91,7 @@ class DockerController(BaseController):
|
|||||||
"name": container_name,
|
"name": container_name,
|
||||||
"detach": True,
|
"detach": True,
|
||||||
"ports": {
|
"ports": {
|
||||||
f"{port.port}/{port.protocol.lower()}": port.inner_port or port.port
|
f"{port.inner_port or port.port}/{port.protocol.lower()}": port.port
|
||||||
for port in self.deployment_ports
|
for port in self.deployment_ports
|
||||||
},
|
},
|
||||||
"environment": self._get_env(),
|
"environment": self._get_env(),
|
||||||
@ -86,6 +111,7 @@ class DockerController(BaseController):
|
|||||||
try:
|
try:
|
||||||
container, has_been_created = self._get_container()
|
container, has_been_created = self._get_container()
|
||||||
if has_been_created:
|
if has_been_created:
|
||||||
|
container.start()
|
||||||
return None
|
return None
|
||||||
# Check if the container is out of date, delete it and retry
|
# Check if the container is out of date, delete it and retry
|
||||||
if len(container.image.tags) > 0:
|
if len(container.image.tags) > 0:
|
||||||
@ -98,6 +124,11 @@ class DockerController(BaseController):
|
|||||||
)
|
)
|
||||||
self.down()
|
self.down()
|
||||||
return self.up()
|
return self.up()
|
||||||
|
# Check container's ports
|
||||||
|
if self._comp_ports(container):
|
||||||
|
self.logger.info("Container has mis-matched ports, re-creating...")
|
||||||
|
self.down()
|
||||||
|
return self.up()
|
||||||
# Check that container values match our values
|
# Check that container values match our values
|
||||||
if self._comp_env(container):
|
if self._comp_env(container):
|
||||||
self.logger.info("Container has outdated config, re-creating...")
|
self.logger.info("Container has outdated config, re-creating...")
|
||||||
@ -138,6 +169,7 @@ class DockerController(BaseController):
|
|||||||
self.logger.info("Container is not running, restarting...")
|
self.logger.info("Container is not running, restarting...")
|
||||||
container.start()
|
container.start()
|
||||||
return None
|
return None
|
||||||
|
self.logger.info("Container is running")
|
||||||
return None
|
return None
|
||||||
except DockerException as exc:
|
except DockerException as exc:
|
||||||
raise ControllerException(str(exc)) from exc
|
raise ControllerException(str(exc)) from exc
|
||||||
|
|||||||
@ -405,7 +405,10 @@ class Outpost(models.Model):
|
|||||||
|
|
||||||
def get_required_objects(self) -> Iterable[Union[models.Model, str]]:
|
def get_required_objects(self) -> Iterable[Union[models.Model, str]]:
|
||||||
"""Get an iterator of all objects the user needs read access to"""
|
"""Get an iterator of all objects the user needs read access to"""
|
||||||
objects: list[Union[models.Model, str]] = [self]
|
objects: list[Union[models.Model, str]] = [
|
||||||
|
self,
|
||||||
|
"authentik_events.add_event",
|
||||||
|
]
|
||||||
for provider in (
|
for provider in (
|
||||||
Provider.objects.filter(outpost=self).select_related().select_subclasses()
|
Provider.objects.filter(outpost=self).select_related().select_subclasses()
|
||||||
):
|
):
|
||||||
|
|||||||
@ -9,7 +9,7 @@ CELERY_BEAT_SCHEDULE = {
|
|||||||
},
|
},
|
||||||
"outposts_service_connection_check": {
|
"outposts_service_connection_check": {
|
||||||
"task": "authentik.outposts.tasks.outpost_service_connection_monitor",
|
"task": "authentik.outposts.tasks.outpost_service_connection_monitor",
|
||||||
"schedule": crontab(minute="*/60"),
|
"schedule": crontab(minute="*/5"),
|
||||||
"options": {"queue": "authentik_scheduled"},
|
"options": {"queue": "authentik_scheduled"},
|
||||||
},
|
},
|
||||||
"outpost_token_ensurer": {
|
"outpost_token_ensurer": {
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
"""authentik outpost signals"""
|
"""authentik outpost signals"""
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.signals import post_save, pre_delete, pre_save
|
from django.db.models.signals import m2m_changed, post_save, pre_delete, pre_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
@ -46,6 +46,14 @@ def pre_save_outpost(sender, instance: Outpost, **_):
|
|||||||
outpost_controller.delay(instance.pk.hex, action="down", from_cache=True)
|
outpost_controller.delay(instance.pk.hex, action="down", from_cache=True)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(m2m_changed, sender=Outpost.providers.through)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def m2m_changed_update(sender, instance: Model, action: str, **_):
|
||||||
|
"""Update outpost on m2m change, when providers are added or removed"""
|
||||||
|
if action in ["post_add", "post_remove", "post_clear"]:
|
||||||
|
outpost_post_save.delay(class_to_path(instance.__class__), instance.pk)
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save)
|
@receiver(post_save)
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def post_save_update(sender, instance: Model, **_):
|
def post_save_update(sender, instance: Model, **_):
|
||||||
|
|||||||
@ -82,13 +82,13 @@ class PolicyBindingSerializer(ModelSerializer):
|
|||||||
"timeout",
|
"timeout",
|
||||||
]
|
]
|
||||||
|
|
||||||
def validate(self, data: OrderedDict) -> OrderedDict:
|
def validate(self, attrs: OrderedDict) -> OrderedDict:
|
||||||
"""Check that either policy, group or user is set."""
|
"""Check that either policy, group or user is set."""
|
||||||
count = sum(
|
count = sum(
|
||||||
[
|
[
|
||||||
bool(data.get("policy", None)),
|
bool(attrs.get("policy", None)),
|
||||||
bool(data.get("group", None)),
|
bool(attrs.get("group", None)),
|
||||||
bool(data.get("user", None)),
|
bool(attrs.get("user", None)),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
invalid = count > 1
|
invalid = count > 1
|
||||||
@ -97,7 +97,7 @@ class PolicyBindingSerializer(ModelSerializer):
|
|||||||
raise ValidationError("Only one of 'policy', 'group' or 'user' can be set.")
|
raise ValidationError("Only one of 'policy', 'group' or 'user' can be set.")
|
||||||
if empty:
|
if empty:
|
||||||
raise ValidationError("One of 'policy', 'group' or 'user' must be set.")
|
raise ValidationError("One of 'policy', 'group' or 'user' must be set.")
|
||||||
return data
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
class PolicyBindingViewSet(UsedByMixin, ModelViewSet):
|
class PolicyBindingViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
|||||||
@ -37,7 +37,9 @@ class AccessDeniedResponse(TemplateResponse):
|
|||||||
if self._request.user and self._request.user.is_authenticated:
|
if self._request.user and self._request.user.is_authenticated:
|
||||||
if (
|
if (
|
||||||
self._request.user.is_superuser
|
self._request.user.is_superuser
|
||||||
or self._request.user.attributes.get(USER_ATTRIBUTE_DEBUG, False)
|
or self._request.user.group_attributes().get(
|
||||||
|
USER_ATTRIBUTE_DEBUG, False
|
||||||
|
)
|
||||||
):
|
):
|
||||||
context["policy_result"] = self.policy_result
|
context["policy_result"] = self.policy_result
|
||||||
return context
|
return context
|
||||||
|
|||||||
@ -62,12 +62,6 @@ class PolicyEngine:
|
|||||||
# Allow objects with no policies attached to pass
|
# Allow objects with no policies attached to pass
|
||||||
empty_result: bool
|
empty_result: bool
|
||||||
|
|
||||||
__pbm: PolicyBindingModel
|
|
||||||
__cached_policies: list[PolicyResult]
|
|
||||||
__processes: list[PolicyProcessInfo]
|
|
||||||
|
|
||||||
__expected_result_count: int
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, pbm: PolicyBindingModel, user: User, request: HttpRequest = None
|
self, pbm: PolicyBindingModel, user: User, request: HttpRequest = None
|
||||||
):
|
):
|
||||||
@ -83,8 +77,8 @@ class PolicyEngine:
|
|||||||
self.request.obj = pbm
|
self.request.obj = pbm
|
||||||
if request:
|
if request:
|
||||||
self.request.set_http_request(request)
|
self.request.set_http_request(request)
|
||||||
self.__cached_policies = []
|
self.__cached_policies: list[PolicyResult] = []
|
||||||
self.__processes = []
|
self.__processes: list[PolicyProcessInfo] = []
|
||||||
self.use_cache = True
|
self.use_cache = True
|
||||||
self.__expected_result_count = 0
|
self.__expected_result_count = 0
|
||||||
|
|
||||||
|
|||||||
@ -10,6 +10,7 @@ from sentry_sdk.tracing import Span
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
from authentik.policies.exceptions import PolicyException
|
from authentik.policies.exceptions import PolicyException
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
@ -18,6 +19,7 @@ from authentik.policies.types import PolicyRequest, PolicyResult
|
|||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
FORK_CTX = get_context("fork")
|
FORK_CTX = get_context("fork")
|
||||||
|
CACHE_TIMEOUT = int(CONFIG.y("redis.cache_timeout_policies"))
|
||||||
PROCESS_CLASS = FORK_CTX.Process
|
PROCESS_CLASS = FORK_CTX.Process
|
||||||
HIST_POLICIES_EXECUTION_TIME = Histogram(
|
HIST_POLICIES_EXECUTION_TIME = Histogram(
|
||||||
"authentik_policies_execution_time",
|
"authentik_policies_execution_time",
|
||||||
@ -114,7 +116,7 @@ class PolicyProcess(PROCESS_CLASS):
|
|||||||
policy_result.source_binding = self.binding
|
policy_result.source_binding = self.binding
|
||||||
if not self.request.debug:
|
if not self.request.debug:
|
||||||
key = cache_key(self.binding, self.request)
|
key = cache_key(self.binding, self.request)
|
||||||
cache.set(key, policy_result)
|
cache.set(key, policy_result, CACHE_TIMEOUT)
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"P_ENG(proc): finished and cached ",
|
"P_ENG(proc): finished and cached ",
|
||||||
policy=self.binding.policy,
|
policy=self.binding.policy,
|
||||||
|
|||||||
@ -33,21 +33,21 @@ class ReputationPolicy(Policy):
|
|||||||
|
|
||||||
def passes(self, request: PolicyRequest) -> PolicyResult:
|
def passes(self, request: PolicyRequest) -> PolicyResult:
|
||||||
remote_ip = get_client_ip(request.http_request)
|
remote_ip = get_client_ip(request.http_request)
|
||||||
passing = True
|
passing = False
|
||||||
if self.check_ip:
|
if self.check_ip:
|
||||||
score = cache.get_or_set(CACHE_KEY_IP_PREFIX + remote_ip, 0)
|
score = cache.get_or_set(CACHE_KEY_IP_PREFIX + remote_ip, 0)
|
||||||
passing = passing and score <= self.threshold
|
passing += passing or score <= self.threshold
|
||||||
LOGGER.debug("Score for IP", ip=remote_ip, score=score, passing=passing)
|
LOGGER.debug("Score for IP", ip=remote_ip, score=score, passing=passing)
|
||||||
if self.check_username:
|
if self.check_username:
|
||||||
score = cache.get_or_set(CACHE_KEY_USER_PREFIX + request.user.username, 0)
|
score = cache.get_or_set(CACHE_KEY_USER_PREFIX + request.user.username, 0)
|
||||||
passing = passing and score <= self.threshold
|
passing += passing or score <= self.threshold
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"Score for Username",
|
"Score for Username",
|
||||||
username=request.user.username,
|
username=request.user.username,
|
||||||
score=score,
|
score=score,
|
||||||
passing=passing,
|
passing=passing,
|
||||||
)
|
)
|
||||||
return PolicyResult(passing)
|
return PolicyResult(bool(passing))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
|
|||||||
@ -5,6 +5,7 @@ from django.dispatch import receiver
|
|||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.utils.http import get_client_ip
|
from authentik.lib.utils.http import get_client_ip
|
||||||
from authentik.policies.reputation.models import (
|
from authentik.policies.reputation.models import (
|
||||||
CACHE_KEY_IP_PREFIX,
|
CACHE_KEY_IP_PREFIX,
|
||||||
@ -13,6 +14,7 @@ from authentik.policies.reputation.models import (
|
|||||||
from authentik.stages.identification.signals import identification_failed
|
from authentik.stages.identification.signals import identification_failed
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
CACHE_TIMEOUT = int(CONFIG.y("redis.cache_timeout_reputation"))
|
||||||
|
|
||||||
|
|
||||||
def update_score(request: HttpRequest, username: str, amount: int):
|
def update_score(request: HttpRequest, username: str, amount: int):
|
||||||
@ -20,10 +22,10 @@ def update_score(request: HttpRequest, username: str, amount: int):
|
|||||||
remote_ip = get_client_ip(request)
|
remote_ip = get_client_ip(request)
|
||||||
|
|
||||||
# We only update the cache here, as its faster than writing to the DB
|
# We only update the cache here, as its faster than writing to the DB
|
||||||
cache.get_or_set(CACHE_KEY_IP_PREFIX + remote_ip, 0)
|
cache.get_or_set(CACHE_KEY_IP_PREFIX + remote_ip, 0, CACHE_TIMEOUT)
|
||||||
cache.incr(CACHE_KEY_IP_PREFIX + remote_ip, amount)
|
cache.incr(CACHE_KEY_IP_PREFIX + remote_ip, amount)
|
||||||
|
|
||||||
cache.get_or_set(CACHE_KEY_USER_PREFIX + username, 0)
|
cache.get_or_set(CACHE_KEY_USER_PREFIX + username, 0, CACHE_TIMEOUT)
|
||||||
cache.incr(CACHE_KEY_USER_PREFIX + username, amount)
|
cache.incr(CACHE_KEY_USER_PREFIX + username, amount)
|
||||||
|
|
||||||
LOGGER.debug("Updated score", amount=amount, for_user=username, for_ip=remote_ip)
|
LOGGER.debug("Updated score", amount=amount, for_user=username, for_ip=remote_ip)
|
||||||
|
|||||||
@ -105,6 +105,7 @@ class PolicyAccessView(AccessMixin, View):
|
|||||||
policy_engine = PolicyEngine(
|
policy_engine = PolicyEngine(
|
||||||
self.application, user or self.request.user, self.request
|
self.application, user or self.request.user, self.request
|
||||||
)
|
)
|
||||||
|
policy_engine.use_cache = False
|
||||||
policy_engine.build()
|
policy_engine.build()
|
||||||
result = policy_engine.result
|
result = policy_engine.result
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
|
|||||||
@ -51,6 +51,7 @@ class RefreshTokenModelSerializer(ExpiringBaseGrantModelSerializer):
|
|||||||
"expires",
|
"expires",
|
||||||
"scope",
|
"scope",
|
||||||
"id_token",
|
"id_token",
|
||||||
|
"revoked",
|
||||||
]
|
]
|
||||||
depth = 2
|
depth = 2
|
||||||
|
|
||||||
|
|||||||
@ -9,7 +9,7 @@ return {}
|
|||||||
"""
|
"""
|
||||||
SCOPE_EMAIL_EXPRESSION = """
|
SCOPE_EMAIL_EXPRESSION = """
|
||||||
return {
|
return {
|
||||||
"email": user.email,
|
"email": request.user.email,
|
||||||
"email_verified": True
|
"email_verified": True
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
@ -17,14 +17,14 @@ SCOPE_PROFILE_EXPRESSION = """
|
|||||||
return {
|
return {
|
||||||
# Because authentik only saves the user's full name, and has no concept of first and last names,
|
# Because authentik only saves the user's full name, and has no concept of first and last names,
|
||||||
# the full name is used as given name.
|
# the full name is used as given name.
|
||||||
# You can override this behaviour in custom mappings, i.e. `user.name.split(" ")`
|
# You can override this behaviour in custom mappings, i.e. `request.user.name.split(" ")`
|
||||||
"name": user.name,
|
"name": request.user.name,
|
||||||
"given_name": user.name,
|
"given_name": request.user.name,
|
||||||
"family_name": "",
|
"family_name": "",
|
||||||
"preferred_username": user.username,
|
"preferred_username": request.user.username,
|
||||||
"nickname": user.username,
|
"nickname": request.user.username,
|
||||||
# groups is not part of the official userinfo schema, but is a quasi-standard
|
# groups is not part of the official userinfo schema, but is a quasi-standard
|
||||||
"groups": [group.name for group in user.ak_groups.all()],
|
"groups": [group.name for group in request.user.ak_groups.all()],
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.2.4 on 2021-07-03 13:13
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_providers_oauth2", "0014_alter_oauth2provider_rsa_key"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="authorizationcode",
|
||||||
|
name="revoked",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="refreshtoken",
|
||||||
|
name="revoked",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -278,7 +278,7 @@ class OAuth2Provider(Provider):
|
|||||||
"""Guess launch_url based on first redirect_uri"""
|
"""Guess launch_url based on first redirect_uri"""
|
||||||
if self.redirect_uris == "":
|
if self.redirect_uris == "":
|
||||||
return None
|
return None
|
||||||
main_url = self.redirect_uris.split("\n")[0]
|
main_url = self.redirect_uris.split("\n", maxsplit=1)[0]
|
||||||
launch_url = urlparse(main_url)
|
launch_url = urlparse(main_url)
|
||||||
return main_url.replace(launch_url.path, "")
|
return main_url.replace(launch_url.path, "")
|
||||||
|
|
||||||
@ -318,6 +318,7 @@ class BaseGrantModel(models.Model):
|
|||||||
provider = models.ForeignKey(OAuth2Provider, on_delete=models.CASCADE)
|
provider = models.ForeignKey(OAuth2Provider, on_delete=models.CASCADE)
|
||||||
user = models.ForeignKey(User, verbose_name=_("User"), on_delete=models.CASCADE)
|
user = models.ForeignKey(User, verbose_name=_("User"), on_delete=models.CASCADE)
|
||||||
_scope = models.TextField(default="", verbose_name=_("Scopes"))
|
_scope = models.TextField(default="", verbose_name=_("Scopes"))
|
||||||
|
revoked = models.BooleanField(default=False)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def scope(self) -> list[str]:
|
def scope(self) -> list[str]:
|
||||||
@ -473,9 +474,7 @@ class RefreshToken(ExpiringModel, BaseGrantModel):
|
|||||||
# Convert datetimes into timestamps.
|
# Convert datetimes into timestamps.
|
||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
iat_time = now
|
iat_time = now
|
||||||
exp_time = int(
|
exp_time = int(dateformat.format(self.expires, "U"))
|
||||||
now + timedelta_from_string(self.provider.token_validity).seconds
|
|
||||||
)
|
|
||||||
# We use the timestamp of the user's last successful login (EventAction.LOGIN) for auth_time
|
# We use the timestamp of the user's last successful login (EventAction.LOGIN) for auth_time
|
||||||
auth_events = Event.objects.filter(
|
auth_events = Event.objects.filter(
|
||||||
action=EventAction.LOGIN, user=get_user(user)
|
action=EventAction.LOGIN, user=get_user(user)
|
||||||
|
|||||||
@ -6,6 +6,8 @@ from django.urls import reverse
|
|||||||
from django.utils.encoding import force_str
|
from django.utils.encoding import force_str
|
||||||
|
|
||||||
from authentik.core.models import Application, User
|
from authentik.core.models import Application, User
|
||||||
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.flows.models import Flow
|
from authentik.flows.models import Flow
|
||||||
from authentik.providers.oauth2.constants import (
|
from authentik.providers.oauth2.constants import (
|
||||||
GRANT_TYPE_AUTHORIZATION_CODE,
|
GRANT_TYPE_AUTHORIZATION_CODE,
|
||||||
@ -39,7 +41,8 @@ class TestToken(OAuthTestCase):
|
|||||||
client_id=generate_client_id(),
|
client_id=generate_client_id(),
|
||||||
client_secret=generate_client_secret(),
|
client_secret=generate_client_secret(),
|
||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=Flow.objects.first(),
|
||||||
redirect_uris="http://local.invalid",
|
redirect_uris="http://testserver",
|
||||||
|
rsa_key=CertificateKeyPair.objects.first(),
|
||||||
)
|
)
|
||||||
header = b64encode(
|
header = b64encode(
|
||||||
f"{provider.client_id}:{provider.client_secret}".encode()
|
f"{provider.client_id}:{provider.client_secret}".encode()
|
||||||
@ -53,11 +56,13 @@ class TestToken(OAuthTestCase):
|
|||||||
data={
|
data={
|
||||||
"grant_type": GRANT_TYPE_AUTHORIZATION_CODE,
|
"grant_type": GRANT_TYPE_AUTHORIZATION_CODE,
|
||||||
"code": code.code,
|
"code": code.code,
|
||||||
"redirect_uri": "http://local.invalid",
|
"redirect_uri": "http://testserver",
|
||||||
},
|
},
|
||||||
HTTP_AUTHORIZATION=f"Basic {header}",
|
HTTP_AUTHORIZATION=f"Basic {header}",
|
||||||
)
|
)
|
||||||
params = TokenParams.from_request(request)
|
params = TokenParams.parse(
|
||||||
|
request, provider, provider.client_id, provider.client_secret
|
||||||
|
)
|
||||||
self.assertEqual(params.provider, provider)
|
self.assertEqual(params.provider, provider)
|
||||||
|
|
||||||
def test_request_refresh_token(self):
|
def test_request_refresh_token(self):
|
||||||
@ -68,6 +73,7 @@ class TestToken(OAuthTestCase):
|
|||||||
client_secret=generate_client_secret(),
|
client_secret=generate_client_secret(),
|
||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=Flow.objects.first(),
|
||||||
redirect_uris="http://local.invalid",
|
redirect_uris="http://local.invalid",
|
||||||
|
rsa_key=CertificateKeyPair.objects.first(),
|
||||||
)
|
)
|
||||||
header = b64encode(
|
header = b64encode(
|
||||||
f"{provider.client_id}:{provider.client_secret}".encode()
|
f"{provider.client_id}:{provider.client_secret}".encode()
|
||||||
@ -87,7 +93,9 @@ class TestToken(OAuthTestCase):
|
|||||||
},
|
},
|
||||||
HTTP_AUTHORIZATION=f"Basic {header}",
|
HTTP_AUTHORIZATION=f"Basic {header}",
|
||||||
)
|
)
|
||||||
params = TokenParams.from_request(request)
|
params = TokenParams.parse(
|
||||||
|
request, provider, provider.client_id, provider.client_secret
|
||||||
|
)
|
||||||
self.assertEqual(params.provider, provider)
|
self.assertEqual(params.provider, provider)
|
||||||
|
|
||||||
def test_auth_code_view(self):
|
def test_auth_code_view(self):
|
||||||
@ -98,6 +106,7 @@ class TestToken(OAuthTestCase):
|
|||||||
client_secret=generate_client_secret(),
|
client_secret=generate_client_secret(),
|
||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=Flow.objects.first(),
|
||||||
redirect_uris="http://local.invalid",
|
redirect_uris="http://local.invalid",
|
||||||
|
rsa_key=CertificateKeyPair.objects.first(),
|
||||||
)
|
)
|
||||||
# Needs to be assigned to an application for iss to be set
|
# Needs to be assigned to an application for iss to be set
|
||||||
self.app.provider = provider
|
self.app.provider = provider
|
||||||
@ -141,6 +150,7 @@ class TestToken(OAuthTestCase):
|
|||||||
client_secret=generate_client_secret(),
|
client_secret=generate_client_secret(),
|
||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=Flow.objects.first(),
|
||||||
redirect_uris="http://local.invalid",
|
redirect_uris="http://local.invalid",
|
||||||
|
rsa_key=CertificateKeyPair.objects.first(),
|
||||||
)
|
)
|
||||||
# Needs to be assigned to an application for iss to be set
|
# Needs to be assigned to an application for iss to be set
|
||||||
self.app.provider = provider
|
self.app.provider = provider
|
||||||
@ -193,6 +203,7 @@ class TestToken(OAuthTestCase):
|
|||||||
client_secret=generate_client_secret(),
|
client_secret=generate_client_secret(),
|
||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=Flow.objects.first(),
|
||||||
redirect_uris="http://local.invalid",
|
redirect_uris="http://local.invalid",
|
||||||
|
rsa_key=CertificateKeyPair.objects.first(),
|
||||||
)
|
)
|
||||||
header = b64encode(
|
header = b64encode(
|
||||||
f"{provider.client_id}:{provider.client_secret}".encode()
|
f"{provider.client_id}:{provider.client_secret}".encode()
|
||||||
@ -230,3 +241,65 @@ class TestToken(OAuthTestCase):
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_refresh_token_revoke(self):
|
||||||
|
"""test request param"""
|
||||||
|
provider = OAuth2Provider.objects.create(
|
||||||
|
name="test",
|
||||||
|
client_id=generate_client_id(),
|
||||||
|
client_secret=generate_client_secret(),
|
||||||
|
authorization_flow=Flow.objects.first(),
|
||||||
|
redirect_uris="http://testserver",
|
||||||
|
rsa_key=CertificateKeyPair.objects.first(),
|
||||||
|
)
|
||||||
|
# Needs to be assigned to an application for iss to be set
|
||||||
|
self.app.provider = provider
|
||||||
|
self.app.save()
|
||||||
|
header = b64encode(
|
||||||
|
f"{provider.client_id}:{provider.client_secret}".encode()
|
||||||
|
).decode()
|
||||||
|
user = User.objects.get(username="akadmin")
|
||||||
|
token: RefreshToken = RefreshToken.objects.create(
|
||||||
|
provider=provider,
|
||||||
|
user=user,
|
||||||
|
refresh_token=generate_client_id(),
|
||||||
|
)
|
||||||
|
# Create initial refresh token
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token"),
|
||||||
|
data={
|
||||||
|
"grant_type": GRANT_TYPE_REFRESH_TOKEN,
|
||||||
|
"refresh_token": token.refresh_token,
|
||||||
|
"redirect_uri": "http://testserver",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Basic {header}",
|
||||||
|
)
|
||||||
|
new_token: RefreshToken = (
|
||||||
|
RefreshToken.objects.filter(user=user).exclude(pk=token.pk).first()
|
||||||
|
)
|
||||||
|
# Post again with initial token -> get new refresh token
|
||||||
|
# and revoke old one
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token"),
|
||||||
|
data={
|
||||||
|
"grant_type": GRANT_TYPE_REFRESH_TOKEN,
|
||||||
|
"refresh_token": new_token.refresh_token,
|
||||||
|
"redirect_uri": "http://local.invalid",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Basic {header}",
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
# Post again with old token, is now revoked and should error
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token"),
|
||||||
|
data={
|
||||||
|
"grant_type": GRANT_TYPE_REFRESH_TOKEN,
|
||||||
|
"refresh_token": new_token.refresh_token,
|
||||||
|
"redirect_uri": "http://local.invalid",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Basic {header}",
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertTrue(
|
||||||
|
Event.objects.filter(action=EventAction.SUSPICIOUS_REQUEST).exists()
|
||||||
|
)
|
||||||
|
|||||||
@ -10,6 +10,7 @@ from django.http.response import HttpResponseRedirect
|
|||||||
from django.utils.cache import patch_vary_headers
|
from django.utils.cache import patch_vary_headers
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.providers.oauth2.errors import BearerTokenError
|
from authentik.providers.oauth2.errors import BearerTokenError
|
||||||
from authentik.providers.oauth2.models import RefreshToken
|
from authentik.providers.oauth2.models import RefreshToken
|
||||||
|
|
||||||
@ -50,7 +51,7 @@ def cors_allow(request: HttpRequest, response: HttpResponse, *allowed_origins: s
|
|||||||
if not allowed:
|
if not allowed:
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"CORS: Origin is not an allowed origin",
|
"CORS: Origin is not an allowed origin",
|
||||||
requested=origin,
|
requested=received_origin,
|
||||||
allowed=allowed_origins,
|
allowed=allowed_origins,
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
@ -132,22 +133,31 @@ def protected_resource_view(scopes: list[str]):
|
|||||||
raise BearerTokenError("invalid_token")
|
raise BearerTokenError("invalid_token")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
kwargs["token"] = RefreshToken.objects.get(
|
token: RefreshToken = RefreshToken.objects.get(
|
||||||
access_token=access_token
|
access_token=access_token
|
||||||
)
|
)
|
||||||
except RefreshToken.DoesNotExist:
|
except RefreshToken.DoesNotExist:
|
||||||
LOGGER.debug("Token does not exist", access_token=access_token)
|
LOGGER.debug("Token does not exist", access_token=access_token)
|
||||||
raise BearerTokenError("invalid_token")
|
raise BearerTokenError("invalid_token")
|
||||||
|
|
||||||
if kwargs["token"].is_expired:
|
if token.is_expired:
|
||||||
LOGGER.debug("Token has expired", access_token=access_token)
|
LOGGER.debug("Token has expired", access_token=access_token)
|
||||||
raise BearerTokenError("invalid_token")
|
raise BearerTokenError("invalid_token")
|
||||||
|
|
||||||
if not set(scopes).issubset(set(kwargs["token"].scope)):
|
if token.revoked:
|
||||||
|
LOGGER.warning("Revoked token was used", access_token=access_token)
|
||||||
|
Event.new(
|
||||||
|
action=EventAction.SUSPICIOUS_REQUEST,
|
||||||
|
message="Revoked refresh token was used",
|
||||||
|
token=access_token,
|
||||||
|
).from_http(request)
|
||||||
|
raise BearerTokenError("invalid_token")
|
||||||
|
|
||||||
|
if not set(scopes).issubset(set(token.scope)):
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Scope missmatch.",
|
"Scope missmatch.",
|
||||||
required=set(scopes),
|
required=set(scopes),
|
||||||
token_has=set(kwargs["token"].scope),
|
token_has=set(token.scope),
|
||||||
)
|
)
|
||||||
raise BearerTokenError("insufficient_scope")
|
raise BearerTokenError("insufficient_scope")
|
||||||
except BearerTokenError as error:
|
except BearerTokenError as error:
|
||||||
@ -156,7 +166,7 @@ def protected_resource_view(scopes: list[str]):
|
|||||||
"WWW-Authenticate"
|
"WWW-Authenticate"
|
||||||
] = f'error="{error.code}", error_description="{error.description}"'
|
] = f'error="{error.code}", error_description="{error.description}"'
|
||||||
return response
|
return response
|
||||||
|
kwargs["token"] = token
|
||||||
return view(request, *args, **kwargs)
|
return view(request, *args, **kwargs)
|
||||||
|
|
||||||
return view_wrapper
|
return view_wrapper
|
||||||
|
|||||||
@ -374,9 +374,9 @@ class OAuthFulfillmentStage(StageView):
|
|||||||
query_fragment["code"] = code.code
|
query_fragment["code"] = code.code
|
||||||
|
|
||||||
query_fragment["token_type"] = "bearer"
|
query_fragment["token_type"] = "bearer"
|
||||||
query_fragment["expires_in"] = timedelta_from_string(
|
query_fragment["expires_in"] = int(
|
||||||
self.provider.token_validity
|
timedelta_from_string(self.provider.token_validity).total_seconds()
|
||||||
).seconds
|
)
|
||||||
query_fragment["state"] = self.params.state if self.params.state else ""
|
query_fragment["state"] = self.params.state if self.params.state else ""
|
||||||
|
|
||||||
return query_fragment
|
return query_fragment
|
||||||
@ -468,14 +468,14 @@ class AuthorizationFlowInitView(PolicyAccessView):
|
|||||||
# OpenID clients can specify a `prompt` parameter, and if its set to consent we
|
# OpenID clients can specify a `prompt` parameter, and if its set to consent we
|
||||||
# need to inject a consent stage
|
# need to inject a consent stage
|
||||||
if PROMPT_CONSNET in self.params.prompt:
|
if PROMPT_CONSNET in self.params.prompt:
|
||||||
if not any(isinstance(x, ConsentStageView) for x in plan.stages):
|
if not any(isinstance(x.stage, ConsentStageView) for x in plan.bindings):
|
||||||
# Plan does not have any consent stage, so we add an in-memory one
|
# Plan does not have any consent stage, so we add an in-memory one
|
||||||
stage = ConsentStage(
|
stage = ConsentStage(
|
||||||
name="OAuth2 Provider In-memory consent stage",
|
name="OAuth2 Provider In-memory consent stage",
|
||||||
mode=ConsentMode.ALWAYS_REQUIRE,
|
mode=ConsentMode.ALWAYS_REQUIRE,
|
||||||
)
|
)
|
||||||
plan.append(stage)
|
plan.append_stage(stage)
|
||||||
plan.append(in_memory_stage(OAuthFulfillmentStage))
|
plan.append_stage(in_memory_stage(OAuthFulfillmentStage))
|
||||||
self.request.session[SESSION_KEY_PLAN] = plan
|
self.request.session[SESSION_KEY_PLAN] = plan
|
||||||
return redirect_with_qs(
|
return redirect_with_qs(
|
||||||
"authentik_core:if-flow",
|
"authentik_core:if-flow",
|
||||||
|
|||||||
@ -8,6 +8,7 @@ from django.http import HttpRequest, HttpResponse
|
|||||||
from django.views import View
|
from django.views import View
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.lib.utils.time import timedelta_from_string
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
from authentik.providers.oauth2.constants import (
|
from authentik.providers.oauth2.constants import (
|
||||||
GRANT_TYPE_AUTHORIZATION_CODE,
|
GRANT_TYPE_AUTHORIZATION_CODE,
|
||||||
@ -30,6 +31,7 @@ LOGGER = get_logger()
|
|||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
# pylint: disable=too-many-instance-attributes
|
||||||
class TokenParams:
|
class TokenParams:
|
||||||
"""Token params"""
|
"""Token params"""
|
||||||
|
|
||||||
@ -40,6 +42,8 @@ class TokenParams:
|
|||||||
state: str
|
state: str
|
||||||
scope: list[str]
|
scope: list[str]
|
||||||
|
|
||||||
|
provider: OAuth2Provider
|
||||||
|
|
||||||
authorization_code: Optional[AuthorizationCode] = None
|
authorization_code: Optional[AuthorizationCode] = None
|
||||||
refresh_token: Optional[RefreshToken] = None
|
refresh_token: Optional[RefreshToken] = None
|
||||||
|
|
||||||
@ -47,35 +51,34 @@ class TokenParams:
|
|||||||
|
|
||||||
raw_code: InitVar[str] = ""
|
raw_code: InitVar[str] = ""
|
||||||
raw_token: InitVar[str] = ""
|
raw_token: InitVar[str] = ""
|
||||||
|
request: InitVar[Optional[HttpRequest]] = None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_request(request: HttpRequest) -> "TokenParams":
|
def parse(
|
||||||
"""Extract Token Parameters from http request"""
|
request: HttpRequest,
|
||||||
client_id, client_secret = extract_client_auth(request)
|
provider: OAuth2Provider,
|
||||||
|
client_id: str,
|
||||||
|
client_secret: str,
|
||||||
|
) -> "TokenParams":
|
||||||
|
"""Parse params for request"""
|
||||||
return TokenParams(
|
return TokenParams(
|
||||||
|
# Init vars
|
||||||
|
raw_code=request.POST.get("code", ""),
|
||||||
|
raw_token=request.POST.get("refresh_token", ""),
|
||||||
|
request=request,
|
||||||
|
# Regular params
|
||||||
|
provider=provider,
|
||||||
client_id=client_id,
|
client_id=client_id,
|
||||||
client_secret=client_secret,
|
client_secret=client_secret,
|
||||||
redirect_uri=request.POST.get("redirect_uri", ""),
|
redirect_uri=request.POST.get("redirect_uri", ""),
|
||||||
grant_type=request.POST.get("grant_type", ""),
|
grant_type=request.POST.get("grant_type", ""),
|
||||||
raw_code=request.POST.get("code", ""),
|
|
||||||
raw_token=request.POST.get("refresh_token", ""),
|
|
||||||
state=request.POST.get("state", ""),
|
state=request.POST.get("state", ""),
|
||||||
scope=request.POST.get("scope", "").split(),
|
scope=request.POST.get("scope", "").split(),
|
||||||
# PKCE parameter.
|
# PKCE parameter.
|
||||||
code_verifier=request.POST.get("code_verifier"),
|
code_verifier=request.POST.get("code_verifier"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def __post_init__(self, raw_code, raw_token):
|
def __post_init__(self, raw_code: str, raw_token: str, request: HttpRequest):
|
||||||
try:
|
|
||||||
provider: OAuth2Provider = OAuth2Provider.objects.get(
|
|
||||||
client_id=self.client_id
|
|
||||||
)
|
|
||||||
self.provider = provider
|
|
||||||
except OAuth2Provider.DoesNotExist:
|
|
||||||
LOGGER.warning("OAuth2Provider does not exist", client_id=self.client_id)
|
|
||||||
raise TokenError("invalid_client")
|
|
||||||
|
|
||||||
if self.provider.client_type == ClientTypes.CONFIDENTIAL:
|
if self.provider.client_type == ClientTypes.CONFIDENTIAL:
|
||||||
if self.provider.client_secret != self.client_secret:
|
if self.provider.client_secret != self.client_secret:
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
@ -87,7 +90,6 @@ class TokenParams:
|
|||||||
|
|
||||||
if self.grant_type == GRANT_TYPE_AUTHORIZATION_CODE:
|
if self.grant_type == GRANT_TYPE_AUTHORIZATION_CODE:
|
||||||
self.__post_init_code(raw_code)
|
self.__post_init_code(raw_code)
|
||||||
|
|
||||||
elif self.grant_type == GRANT_TYPE_REFRESH_TOKEN:
|
elif self.grant_type == GRANT_TYPE_REFRESH_TOKEN:
|
||||||
if not raw_token:
|
if not raw_token:
|
||||||
LOGGER.warning("Missing refresh token")
|
LOGGER.warning("Missing refresh token")
|
||||||
@ -107,7 +109,14 @@ class TokenParams:
|
|||||||
token=raw_token,
|
token=raw_token,
|
||||||
)
|
)
|
||||||
raise TokenError("invalid_grant")
|
raise TokenError("invalid_grant")
|
||||||
|
if self.refresh_token.revoked:
|
||||||
|
LOGGER.warning("Refresh token is revoked", token=raw_token)
|
||||||
|
Event.new(
|
||||||
|
action=EventAction.SUSPICIOUS_REQUEST,
|
||||||
|
message="Revoked refresh token was used",
|
||||||
|
token=raw_token,
|
||||||
|
).from_http(request)
|
||||||
|
raise TokenError("invalid_grant")
|
||||||
else:
|
else:
|
||||||
LOGGER.warning("Invalid grant type", grant_type=self.grant_type)
|
LOGGER.warning("Invalid grant type", grant_type=self.grant_type)
|
||||||
raise TokenError("unsupported_grant_type")
|
raise TokenError("unsupported_grant_type")
|
||||||
@ -159,13 +168,14 @@ class TokenParams:
|
|||||||
class TokenView(View):
|
class TokenView(View):
|
||||||
"""Generate tokens for clients"""
|
"""Generate tokens for clients"""
|
||||||
|
|
||||||
|
provider: Optional[OAuth2Provider] = None
|
||||||
params: Optional[TokenParams] = None
|
params: Optional[TokenParams] = None
|
||||||
|
|
||||||
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
||||||
response = super().dispatch(request, *args, **kwargs)
|
response = super().dispatch(request, *args, **kwargs)
|
||||||
allowed_origins = []
|
allowed_origins = []
|
||||||
if self.params:
|
if self.provider:
|
||||||
allowed_origins = self.params.provider.redirect_uris.split("\n")
|
allowed_origins = self.provider.redirect_uris.split("\n")
|
||||||
cors_allow(self.request, response, *allowed_origins)
|
cors_allow(self.request, response, *allowed_origins)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@ -175,19 +185,32 @@ class TokenView(View):
|
|||||||
def post(self, request: HttpRequest) -> HttpResponse:
|
def post(self, request: HttpRequest) -> HttpResponse:
|
||||||
"""Generate tokens for clients"""
|
"""Generate tokens for clients"""
|
||||||
try:
|
try:
|
||||||
self.params = TokenParams.from_request(request)
|
client_id, client_secret = extract_client_auth(request)
|
||||||
|
try:
|
||||||
|
self.provider = OAuth2Provider.objects.get(client_id=client_id)
|
||||||
|
except OAuth2Provider.DoesNotExist:
|
||||||
|
LOGGER.warning(
|
||||||
|
"OAuth2Provider does not exist", client_id=self.client_id
|
||||||
|
)
|
||||||
|
raise TokenError("invalid_client")
|
||||||
|
|
||||||
|
if not self.provider:
|
||||||
|
raise ValueError
|
||||||
|
self.params = TokenParams.parse(
|
||||||
|
request, self.provider, client_id, client_secret
|
||||||
|
)
|
||||||
|
|
||||||
if self.params.grant_type == GRANT_TYPE_AUTHORIZATION_CODE:
|
if self.params.grant_type == GRANT_TYPE_AUTHORIZATION_CODE:
|
||||||
return TokenResponse(self.create_code_response_dic())
|
return TokenResponse(self.create_code_response())
|
||||||
if self.params.grant_type == GRANT_TYPE_REFRESH_TOKEN:
|
if self.params.grant_type == GRANT_TYPE_REFRESH_TOKEN:
|
||||||
return TokenResponse(self.create_refresh_response_dic())
|
return TokenResponse(self.create_refresh_response())
|
||||||
raise ValueError(f"Invalid grant_type: {self.params.grant_type}")
|
raise ValueError(f"Invalid grant_type: {self.params.grant_type}")
|
||||||
except TokenError as error:
|
except TokenError as error:
|
||||||
return TokenResponse(error.create_dict(), status=400)
|
return TokenResponse(error.create_dict(), status=400)
|
||||||
except UserAuthError as error:
|
except UserAuthError as error:
|
||||||
return TokenResponse(error.create_dict(), status=403)
|
return TokenResponse(error.create_dict(), status=403)
|
||||||
|
|
||||||
def create_code_response_dic(self) -> dict[str, Any]:
|
def create_code_response(self) -> dict[str, Any]:
|
||||||
"""See https://tools.ietf.org/html/rfc6749#section-4.1"""
|
"""See https://tools.ietf.org/html/rfc6749#section-4.1"""
|
||||||
|
|
||||||
refresh_token = self.params.authorization_code.provider.create_refresh_token(
|
refresh_token = self.params.authorization_code.provider.create_refresh_token(
|
||||||
@ -211,19 +234,19 @@ class TokenView(View):
|
|||||||
# We don't need to store the code anymore.
|
# We don't need to store the code anymore.
|
||||||
self.params.authorization_code.delete()
|
self.params.authorization_code.delete()
|
||||||
|
|
||||||
response_dict = {
|
return {
|
||||||
"access_token": refresh_token.access_token,
|
"access_token": refresh_token.access_token,
|
||||||
"refresh_token": refresh_token.refresh_token,
|
"refresh_token": refresh_token.refresh_token,
|
||||||
"token_type": "bearer",
|
"token_type": "bearer",
|
||||||
"expires_in": timedelta_from_string(
|
"expires_in": int(
|
||||||
self.params.provider.token_validity
|
timedelta_from_string(
|
||||||
).seconds,
|
self.params.provider.token_validity
|
||||||
|
).total_seconds()
|
||||||
|
),
|
||||||
"id_token": refresh_token.provider.encode(refresh_token.id_token.to_dict()),
|
"id_token": refresh_token.provider.encode(refresh_token.id_token.to_dict()),
|
||||||
}
|
}
|
||||||
|
|
||||||
return response_dict
|
def create_refresh_response(self) -> dict[str, Any]:
|
||||||
|
|
||||||
def create_refresh_response_dic(self) -> dict[str, Any]:
|
|
||||||
"""See https://tools.ietf.org/html/rfc6749#section-6"""
|
"""See https://tools.ietf.org/html/rfc6749#section-6"""
|
||||||
|
|
||||||
unauthorized_scopes = set(self.params.scope) - set(
|
unauthorized_scopes = set(self.params.scope) - set(
|
||||||
@ -251,17 +274,18 @@ class TokenView(View):
|
|||||||
# Store the refresh_token.
|
# Store the refresh_token.
|
||||||
refresh_token.save()
|
refresh_token.save()
|
||||||
|
|
||||||
# Forget the old token.
|
# Mark old token as revoked
|
||||||
self.params.refresh_token.delete()
|
self.params.refresh_token.revoked = True
|
||||||
|
self.params.refresh_token.save()
|
||||||
|
|
||||||
dic = {
|
return {
|
||||||
"access_token": refresh_token.access_token,
|
"access_token": refresh_token.access_token,
|
||||||
"refresh_token": refresh_token.refresh_token,
|
"refresh_token": refresh_token.refresh_token,
|
||||||
"token_type": "bearer",
|
"token_type": "bearer",
|
||||||
"expires_in": timedelta_from_string(
|
"expires_in": int(
|
||||||
refresh_token.provider.token_validity
|
timedelta_from_string(
|
||||||
).seconds,
|
refresh_token.provider.token_validity
|
||||||
|
).total_seconds()
|
||||||
|
),
|
||||||
"id_token": self.params.provider.encode(refresh_token.id_token.to_dict()),
|
"id_token": self.params.provider.encode(refresh_token.id_token.to_dict()),
|
||||||
}
|
}
|
||||||
|
|
||||||
return dic
|
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
"""authentik OAuth2 OpenID Userinfo views"""
|
"""authentik OAuth2 OpenID Userinfo views"""
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
from deepmerge import always_merger
|
||||||
from django.http import HttpRequest, HttpResponse
|
from django.http import HttpRequest, HttpResponse
|
||||||
from django.http.response import HttpResponseBadRequest
|
from django.http.response import HttpResponseBadRequest
|
||||||
from django.views import View
|
from django.views import View
|
||||||
@ -78,7 +79,7 @@ class UserInfoView(View):
|
|||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
LOGGER.debug("updated scope", scope=scope)
|
LOGGER.debug("updated scope", scope=scope)
|
||||||
final_claims.update(value)
|
always_merger.merge(final_claims, value)
|
||||||
return final_claims
|
return final_claims
|
||||||
|
|
||||||
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
||||||
|
|||||||
@ -8,7 +8,7 @@ SCOPE_AK_PROXY_EXPRESSION = """
|
|||||||
# which are used for example for the HTTP-Basic Authentication mapping.
|
# which are used for example for the HTTP-Basic Authentication mapping.
|
||||||
return {
|
return {
|
||||||
"ak_proxy": {
|
"ak_proxy": {
|
||||||
"user_attributes": user.group_attributes()
|
"user_attributes": request.user.group_attributes()
|
||||||
}
|
}
|
||||||
}"""
|
}"""
|
||||||
|
|
||||||
|
|||||||
@ -3,7 +3,7 @@ from authentik.managed.manager import EnsureExists, ObjectManager
|
|||||||
from authentik.providers.saml.models import SAMLPropertyMapping
|
from authentik.providers.saml.models import SAMLPropertyMapping
|
||||||
|
|
||||||
GROUP_EXPRESSION = """
|
GROUP_EXPRESSION = """
|
||||||
for group in user.ak_groups.all():
|
for group in request.user.ak_groups.all():
|
||||||
yield group.name
|
yield group.name
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -18,7 +18,7 @@ class SAMLProviderManager(ObjectManager):
|
|||||||
"goauthentik.io/providers/saml/upn",
|
"goauthentik.io/providers/saml/upn",
|
||||||
name="authentik default SAML Mapping: UPN",
|
name="authentik default SAML Mapping: UPN",
|
||||||
saml_name="http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn",
|
saml_name="http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn",
|
||||||
expression="return user.attributes.get('upn', user.email)",
|
expression="return request.user.attributes.get('upn', request.user.email)",
|
||||||
friendly_name="",
|
friendly_name="",
|
||||||
),
|
),
|
||||||
EnsureExists(
|
EnsureExists(
|
||||||
@ -26,7 +26,7 @@ class SAMLProviderManager(ObjectManager):
|
|||||||
"goauthentik.io/providers/saml/name",
|
"goauthentik.io/providers/saml/name",
|
||||||
name="authentik default SAML Mapping: Name",
|
name="authentik default SAML Mapping: Name",
|
||||||
saml_name="http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name",
|
saml_name="http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name",
|
||||||
expression="return user.name",
|
expression="return request.user.name",
|
||||||
friendly_name="",
|
friendly_name="",
|
||||||
),
|
),
|
||||||
EnsureExists(
|
EnsureExists(
|
||||||
@ -34,7 +34,7 @@ class SAMLProviderManager(ObjectManager):
|
|||||||
"goauthentik.io/providers/saml/email",
|
"goauthentik.io/providers/saml/email",
|
||||||
name="authentik default SAML Mapping: Email",
|
name="authentik default SAML Mapping: Email",
|
||||||
saml_name="http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress",
|
saml_name="http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress",
|
||||||
expression="return user.email",
|
expression="return request.user.email",
|
||||||
friendly_name="",
|
friendly_name="",
|
||||||
),
|
),
|
||||||
EnsureExists(
|
EnsureExists(
|
||||||
@ -42,7 +42,7 @@ class SAMLProviderManager(ObjectManager):
|
|||||||
"goauthentik.io/providers/saml/username",
|
"goauthentik.io/providers/saml/username",
|
||||||
name="authentik default SAML Mapping: Username",
|
name="authentik default SAML Mapping: Username",
|
||||||
saml_name="http://schemas.goauthentik.io/2021/02/saml/username",
|
saml_name="http://schemas.goauthentik.io/2021/02/saml/username",
|
||||||
expression="return user.username",
|
expression="return request.user.username",
|
||||||
friendly_name="",
|
friendly_name="",
|
||||||
),
|
),
|
||||||
EnsureExists(
|
EnsureExists(
|
||||||
@ -50,7 +50,7 @@ class SAMLProviderManager(ObjectManager):
|
|||||||
"goauthentik.io/providers/saml/uid",
|
"goauthentik.io/providers/saml/uid",
|
||||||
name="authentik default SAML Mapping: User ID",
|
name="authentik default SAML Mapping: User ID",
|
||||||
saml_name="http://schemas.goauthentik.io/2021/02/saml/uid",
|
saml_name="http://schemas.goauthentik.io/2021/02/saml/uid",
|
||||||
expression="return user.pk",
|
expression="return request.user.pk",
|
||||||
friendly_name="",
|
friendly_name="",
|
||||||
),
|
),
|
||||||
EnsureExists(
|
EnsureExists(
|
||||||
@ -68,7 +68,7 @@ class SAMLProviderManager(ObjectManager):
|
|||||||
saml_name=(
|
saml_name=(
|
||||||
"http://schemas.microsoft.com/ws/2008/06/identity/claims/windowsaccountname"
|
"http://schemas.microsoft.com/ws/2008/06/identity/claims/windowsaccountname"
|
||||||
),
|
),
|
||||||
expression="return user.username",
|
expression="return request.user.username",
|
||||||
friendly_name="",
|
friendly_name="",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@ -24,6 +24,7 @@ from authentik.sources.saml.processors.constants import (
|
|||||||
SAML_NAME_ID_FORMAT_EMAIL,
|
SAML_NAME_ID_FORMAT_EMAIL,
|
||||||
SAML_NAME_ID_FORMAT_PERSISTENT,
|
SAML_NAME_ID_FORMAT_PERSISTENT,
|
||||||
SAML_NAME_ID_FORMAT_TRANSIENT,
|
SAML_NAME_ID_FORMAT_TRANSIENT,
|
||||||
|
SAML_NAME_ID_FORMAT_UNSPECIFIED,
|
||||||
SAML_NAME_ID_FORMAT_WINDOWS,
|
SAML_NAME_ID_FORMAT_WINDOWS,
|
||||||
SAML_NAME_ID_FORMAT_X509,
|
SAML_NAME_ID_FORMAT_X509,
|
||||||
SIGN_ALGORITHM_TRANSFORM_MAP,
|
SIGN_ALGORITHM_TRANSFORM_MAP,
|
||||||
@ -165,7 +166,10 @@ class AssertionProcessor:
|
|||||||
if name_id.attrib["Format"] == SAML_NAME_ID_FORMAT_EMAIL:
|
if name_id.attrib["Format"] == SAML_NAME_ID_FORMAT_EMAIL:
|
||||||
name_id.text = self.http_request.user.email
|
name_id.text = self.http_request.user.email
|
||||||
return name_id
|
return name_id
|
||||||
if name_id.attrib["Format"] == SAML_NAME_ID_FORMAT_PERSISTENT:
|
if name_id.attrib["Format"] in [
|
||||||
|
SAML_NAME_ID_FORMAT_PERSISTENT,
|
||||||
|
SAML_NAME_ID_FORMAT_UNSPECIFIED,
|
||||||
|
]:
|
||||||
name_id.text = persistent
|
name_id.text = persistent
|
||||||
return name_id
|
return name_id
|
||||||
if name_id.attrib["Format"] == SAML_NAME_ID_FORMAT_X509:
|
if name_id.attrib["Format"] == SAML_NAME_ID_FORMAT_X509:
|
||||||
@ -180,7 +184,7 @@ class AssertionProcessor:
|
|||||||
return name_id
|
return name_id
|
||||||
if name_id.attrib["Format"] == SAML_NAME_ID_FORMAT_TRANSIENT:
|
if name_id.attrib["Format"] == SAML_NAME_ID_FORMAT_TRANSIENT:
|
||||||
# Use the hash of the user's session, which changes every session
|
# Use the hash of the user's session, which changes every session
|
||||||
session_key: str = self.http_request.user.session.session_key
|
session_key: str = self.http_request.session.session_key
|
||||||
name_id.text = sha256(session_key.encode()).hexdigest()
|
name_id.text = sha256(session_key.encode()).hexdigest()
|
||||||
return name_id
|
return name_id
|
||||||
raise UnsupportedNameIDFormat(
|
raise UnsupportedNameIDFormat(
|
||||||
|
|||||||
@ -20,10 +20,11 @@ from authentik.sources.saml.processors.constants import (
|
|||||||
RSA_SHA256,
|
RSA_SHA256,
|
||||||
RSA_SHA384,
|
RSA_SHA384,
|
||||||
RSA_SHA512,
|
RSA_SHA512,
|
||||||
SAML_NAME_ID_FORMAT_EMAIL,
|
SAML_NAME_ID_FORMAT_UNSPECIFIED,
|
||||||
)
|
)
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
ERROR_CANNOT_DECODE_REQUEST = "Cannot decode SAML request."
|
||||||
ERROR_SIGNATURE_REQUIRED_BUT_ABSENT = (
|
ERROR_SIGNATURE_REQUIRED_BUT_ABSENT = (
|
||||||
"Verification Certificate configured, but request is not signed."
|
"Verification Certificate configured, but request is not signed."
|
||||||
)
|
)
|
||||||
@ -42,7 +43,7 @@ class AuthNRequest:
|
|||||||
|
|
||||||
relay_state: Optional[str] = None
|
relay_state: Optional[str] = None
|
||||||
|
|
||||||
name_id_policy: str = SAML_NAME_ID_FORMAT_EMAIL
|
name_id_policy: str = SAML_NAME_ID_FORMAT_UNSPECIFIED
|
||||||
|
|
||||||
|
|
||||||
class AuthNRequestParser:
|
class AuthNRequestParser:
|
||||||
@ -69,16 +70,21 @@ class AuthNRequestParser:
|
|||||||
auth_n_request = AuthNRequest(id=root.attrib["ID"], relay_state=relay_state)
|
auth_n_request = AuthNRequest(id=root.attrib["ID"], relay_state=relay_state)
|
||||||
|
|
||||||
# Check if AuthnRequest has a NameID Policy object
|
# Check if AuthnRequest has a NameID Policy object
|
||||||
name_id_policies = root.findall(f"{{{NS_SAML_PROTOCOL}}}:NameIDPolicy")
|
name_id_policies = root.findall(f"{{{NS_SAML_PROTOCOL}}}NameIDPolicy")
|
||||||
if len(name_id_policies) > 0:
|
if len(name_id_policies) > 0:
|
||||||
name_id_policy = name_id_policies[0]
|
name_id_policy = name_id_policies[0]
|
||||||
auth_n_request.name_id_policy = name_id_policy.attrib["Format"]
|
auth_n_request.name_id_policy = name_id_policy.attrib.get(
|
||||||
|
"Format", SAML_NAME_ID_FORMAT_UNSPECIFIED
|
||||||
|
)
|
||||||
|
|
||||||
return auth_n_request
|
return auth_n_request
|
||||||
|
|
||||||
def parse(self, saml_request: str, relay_state: Optional[str]) -> AuthNRequest:
|
def parse(self, saml_request: str, relay_state: Optional[str]) -> AuthNRequest:
|
||||||
"""Validate and parse raw request with enveloped signautre."""
|
"""Validate and parse raw request with enveloped signautre."""
|
||||||
decoded_xml = b64decode(saml_request.encode()).decode()
|
try:
|
||||||
|
decoded_xml = b64decode(saml_request.encode()).decode()
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
raise CannotHandleAssertion(ERROR_CANNOT_DECODE_REQUEST)
|
||||||
|
|
||||||
verifier = self.provider.verification_kp
|
verifier = self.provider.verification_kp
|
||||||
|
|
||||||
@ -121,7 +127,10 @@ class AuthNRequestParser:
|
|||||||
sig_alg: Optional[str] = None,
|
sig_alg: Optional[str] = None,
|
||||||
) -> AuthNRequest:
|
) -> AuthNRequest:
|
||||||
"""Validate and parse raw request with detached signature"""
|
"""Validate and parse raw request with detached signature"""
|
||||||
decoded_xml = decode_base64_and_inflate(saml_request)
|
try:
|
||||||
|
decoded_xml = decode_base64_and_inflate(saml_request)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
raise CannotHandleAssertion(ERROR_CANNOT_DECODE_REQUEST)
|
||||||
|
|
||||||
verifier = self.provider.verification_kp
|
verifier = self.provider.verification_kp
|
||||||
|
|
||||||
|
|||||||
@ -14,7 +14,7 @@ from authentik.providers.saml.processors.assertion import AssertionProcessor
|
|||||||
from authentik.providers.saml.processors.request_parser import AuthNRequestParser
|
from authentik.providers.saml.processors.request_parser import AuthNRequestParser
|
||||||
from authentik.sources.saml.exceptions import MismatchedRequestID
|
from authentik.sources.saml.exceptions import MismatchedRequestID
|
||||||
from authentik.sources.saml.models import SAMLSource
|
from authentik.sources.saml.models import SAMLSource
|
||||||
from authentik.sources.saml.processors.constants import SAML_NAME_ID_FORMAT_EMAIL
|
from authentik.sources.saml.processors.constants import SAML_NAME_ID_FORMAT_UNSPECIFIED
|
||||||
from authentik.sources.saml.processors.request import (
|
from authentik.sources.saml.processors.request import (
|
||||||
SESSION_REQUEST_ID,
|
SESSION_REQUEST_ID,
|
||||||
RequestProcessor,
|
RequestProcessor,
|
||||||
@ -206,5 +206,5 @@ class TestAuthNRequest(TestCase):
|
|||||||
REDIRECT_REQUEST, REDIRECT_RELAY_STATE, REDIRECT_SIGNATURE, REDIRECT_SIG_ALG
|
REDIRECT_REQUEST, REDIRECT_RELAY_STATE, REDIRECT_SIGNATURE, REDIRECT_SIG_ALG
|
||||||
)
|
)
|
||||||
self.assertEqual(parsed_request.id, "_dcf55fcd27a887e60a7ef9ee6fd3adab")
|
self.assertEqual(parsed_request.id, "_dcf55fcd27a887e60a7ef9ee6fd3adab")
|
||||||
self.assertEqual(parsed_request.name_id_policy, SAML_NAME_ID_FORMAT_EMAIL)
|
self.assertEqual(parsed_request.name_id_policy, SAML_NAME_ID_FORMAT_UNSPECIFIED)
|
||||||
self.assertEqual(parsed_request.relay_state, REDIRECT_RELAY_STATE)
|
self.assertEqual(parsed_request.relay_state, REDIRECT_RELAY_STATE)
|
||||||
|
|||||||
@ -17,6 +17,7 @@ from authentik.providers.saml.models import SAMLBindings, SAMLProvider
|
|||||||
from authentik.providers.saml.processors.assertion import AssertionProcessor
|
from authentik.providers.saml.processors.assertion import AssertionProcessor
|
||||||
from authentik.providers.saml.processors.request_parser import AuthNRequest
|
from authentik.providers.saml.processors.request_parser import AuthNRequest
|
||||||
from authentik.providers.saml.utils.encoding import deflate_and_base64_encode, nice64
|
from authentik.providers.saml.utils.encoding import deflate_and_base64_encode, nice64
|
||||||
|
from authentik.sources.saml.exceptions import SAMLException
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
URL_VALIDATOR = URLValidator(schemes=("http", "https"))
|
URL_VALIDATOR = URLValidator(schemes=("http", "https"))
|
||||||
@ -56,22 +57,30 @@ class SAMLFlowFinalView(ChallengeStageView):
|
|||||||
provider: SAMLProvider = get_object_or_404(
|
provider: SAMLProvider = get_object_or_404(
|
||||||
SAMLProvider, pk=application.provider_id
|
SAMLProvider, pk=application.provider_id
|
||||||
)
|
)
|
||||||
# Log Application Authorization
|
|
||||||
Event.new(
|
|
||||||
EventAction.AUTHORIZE_APPLICATION,
|
|
||||||
authorized_application=application,
|
|
||||||
flow=self.executor.plan.flow_pk,
|
|
||||||
).from_http(self.request)
|
|
||||||
|
|
||||||
if SESSION_KEY_AUTH_N_REQUEST not in self.request.session:
|
if SESSION_KEY_AUTH_N_REQUEST not in self.request.session:
|
||||||
return self.executor.stage_invalid()
|
return self.executor.stage_invalid()
|
||||||
|
|
||||||
auth_n_request: AuthNRequest = self.request.session.pop(
|
auth_n_request: AuthNRequest = self.request.session.pop(
|
||||||
SESSION_KEY_AUTH_N_REQUEST
|
SESSION_KEY_AUTH_N_REQUEST
|
||||||
)
|
)
|
||||||
response = AssertionProcessor(
|
try:
|
||||||
provider, request, auth_n_request
|
response = AssertionProcessor(
|
||||||
).build_response()
|
provider, request, auth_n_request
|
||||||
|
).build_response()
|
||||||
|
except SAMLException as exc:
|
||||||
|
Event.new(
|
||||||
|
EventAction.CONFIGURATION_ERROR,
|
||||||
|
message=f"Failed to process SAML assertion: {str(exc)}",
|
||||||
|
provider=provider,
|
||||||
|
).from_http(self.request)
|
||||||
|
return self.executor.stage_invalid()
|
||||||
|
|
||||||
|
# Log Application Authorization
|
||||||
|
Event.new(
|
||||||
|
EventAction.AUTHORIZE_APPLICATION,
|
||||||
|
authorized_application=application,
|
||||||
|
flow=self.executor.plan.flow_pk,
|
||||||
|
).from_http(self.request)
|
||||||
|
|
||||||
if provider.sp_binding == SAMLBindings.POST:
|
if provider.sp_binding == SAMLBindings.POST:
|
||||||
form_attrs = {
|
form_attrs = {
|
||||||
|
|||||||
@ -79,7 +79,7 @@ class SAMLSSOView(PolicyAccessView):
|
|||||||
PLAN_CONTEXT_CONSENT_PERMISSIONS: [],
|
PLAN_CONTEXT_CONSENT_PERMISSIONS: [],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
plan.append(in_memory_stage(SAMLFlowFinalView))
|
plan.append_stage(in_memory_stage(SAMLFlowFinalView))
|
||||||
request.session[SESSION_KEY_PLAN] = plan
|
request.session[SESSION_KEY_PLAN] = plan
|
||||||
return redirect_with_qs(
|
return redirect_with_qs(
|
||||||
"authentik_core:if-flow",
|
"authentik_core:if-flow",
|
||||||
|
|||||||
@ -44,7 +44,7 @@ class Command(BaseCommand):
|
|||||||
user=user,
|
user=user,
|
||||||
intent=TokenIntents.INTENT_RECOVERY,
|
intent=TokenIntents.INTENT_RECOVERY,
|
||||||
description=f"Recovery Token generated by {getuser()} on {_now}",
|
description=f"Recovery Token generated by {getuser()} on {_now}",
|
||||||
identifier=f"ak-recovery-{user}",
|
identifier=f"ak-recovery-{user}-{_now}",
|
||||||
)
|
)
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
(
|
(
|
||||||
|
|||||||
@ -15,7 +15,7 @@ class MessageConsumer(JsonWebsocketConsumer):
|
|||||||
cache.set(f"user_{self.session_key}_messages_{self.channel_name}", True, None)
|
cache.set(f"user_{self.session_key}_messages_{self.channel_name}", True, None)
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def disconnect(self, close_code):
|
def disconnect(self, code):
|
||||||
cache.delete(f"user_{self.session_key}_messages_{self.channel_name}")
|
cache.delete(f"user_{self.session_key}_messages_{self.channel_name}")
|
||||||
|
|
||||||
def event_update(self, event: dict):
|
def event_update(self, event: dict):
|
||||||
|
|||||||
@ -15,6 +15,7 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from json import dumps
|
from json import dumps
|
||||||
|
from tempfile import gettempdir
|
||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
import structlog
|
import structlog
|
||||||
@ -152,6 +153,7 @@ SPECTACULAR_SETTINGS = {
|
|||||||
"url": "https://github.com/goauthentik/authentik/blob/master/LICENSE",
|
"url": "https://github.com/goauthentik/authentik/blob/master/LICENSE",
|
||||||
},
|
},
|
||||||
"ENUM_NAME_OVERRIDES": {
|
"ENUM_NAME_OVERRIDES": {
|
||||||
|
"EventActions": "authentik.events.models.EventAction",
|
||||||
"ChallengeChoices": "authentik.flows.challenge.ChallengeTypes",
|
"ChallengeChoices": "authentik.flows.challenge.ChallengeTypes",
|
||||||
"FlowDesignationEnum": "authentik.flows.models.FlowDesignation",
|
"FlowDesignationEnum": "authentik.flows.models.FlowDesignation",
|
||||||
"PolicyEngineMode": "authentik.policies.models.PolicyEngineMode",
|
"PolicyEngineMode": "authentik.policies.models.PolicyEngineMode",
|
||||||
@ -193,6 +195,7 @@ CACHES = {
|
|||||||
f"redis://:{CONFIG.y('redis.password')}@{CONFIG.y('redis.host')}:6379"
|
f"redis://:{CONFIG.y('redis.password')}@{CONFIG.y('redis.host')}:6379"
|
||||||
f"/{CONFIG.y('redis.cache_db')}"
|
f"/{CONFIG.y('redis.cache_db')}"
|
||||||
),
|
),
|
||||||
|
"TIMEOUT": int(CONFIG.y("redis.cache_timeout", 300)),
|
||||||
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -341,7 +344,7 @@ DBBACKUP_FILENAME_TEMPLATE = "authentik-backup-{datetime}.sql"
|
|||||||
DBBACKUP_CONNECTOR_MAPPING = {
|
DBBACKUP_CONNECTOR_MAPPING = {
|
||||||
"django_prometheus.db.backends.postgresql": "dbbackup.db.postgresql.PgDumpConnector",
|
"django_prometheus.db.backends.postgresql": "dbbackup.db.postgresql.PgDumpConnector",
|
||||||
}
|
}
|
||||||
|
DBBACKUP_TMP_DIR = gettempdir() if DEBUG else "/tmp" # nosec
|
||||||
if CONFIG.y("postgresql.s3_backup"):
|
if CONFIG.y("postgresql.s3_backup"):
|
||||||
DBBACKUP_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
DBBACKUP_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
||||||
DBBACKUP_STORAGE_OPTIONS = {
|
DBBACKUP_STORAGE_OPTIONS = {
|
||||||
|
|||||||
@ -15,6 +15,10 @@ class PytestTestRunner: # pragma: no cover
|
|||||||
settings.CELERY_TASK_ALWAYS_EAGER = True
|
settings.CELERY_TASK_ALWAYS_EAGER = True
|
||||||
CONFIG.y_set("authentik.avatars", "none")
|
CONFIG.y_set("authentik.avatars", "none")
|
||||||
CONFIG.y_set("authentik.geoip", "tests/GeoLite2-City-Test.mmdb")
|
CONFIG.y_set("authentik.geoip", "tests/GeoLite2-City-Test.mmdb")
|
||||||
|
CONFIG.y_set(
|
||||||
|
"outposts.docker_image_base",
|
||||||
|
"beryju.org/authentik/outpost-%(type)s:gh-master",
|
||||||
|
)
|
||||||
|
|
||||||
def run_tests(self, test_labels):
|
def run_tests(self, test_labels):
|
||||||
"""Run pytest and return the exitcode.
|
"""Run pytest and return the exitcode.
|
||||||
|
|||||||
@ -60,14 +60,21 @@ class LDAPPasswordChanger:
|
|||||||
def check_ad_password_complexity_enabled(self) -> bool:
|
def check_ad_password_complexity_enabled(self) -> bool:
|
||||||
"""Check if DOMAIN_PASSWORD_COMPLEX is enabled"""
|
"""Check if DOMAIN_PASSWORD_COMPLEX is enabled"""
|
||||||
root_dn = self.get_domain_root_dn()
|
root_dn = self.get_domain_root_dn()
|
||||||
root_attrs = self._source.connection.extend.standard.paged_search(
|
try:
|
||||||
search_base=root_dn,
|
root_attrs = self._source.connection.extend.standard.paged_search(
|
||||||
search_filter="(objectClass=*)",
|
search_base=root_dn,
|
||||||
search_scope=ldap3.BASE,
|
search_filter="(objectClass=*)",
|
||||||
attributes=["pwdProperties"],
|
search_scope=ldap3.BASE,
|
||||||
)
|
attributes=["pwdProperties"],
|
||||||
|
)
|
||||||
|
except ldap3.core.exceptions.LDAPAttributeError:
|
||||||
|
return False
|
||||||
root_attrs = list(root_attrs)[0]
|
root_attrs = list(root_attrs)[0]
|
||||||
pwd_properties = PwdProperties(root_attrs["attributes"]["pwdProperties"])
|
raw_pwd_properties = root_attrs.get("attributes", {}).get("pwdProperties", None)
|
||||||
|
if raw_pwd_properties is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
pwd_properties = PwdProperties(raw_pwd_properties)
|
||||||
if PwdProperties.DOMAIN_PASSWORD_COMPLEX in pwd_properties:
|
if PwdProperties.DOMAIN_PASSWORD_COMPLEX in pwd_properties:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|||||||
@ -36,7 +36,8 @@ class SourceType:
|
|||||||
class SourceTypeManager:
|
class SourceTypeManager:
|
||||||
"""Manager to hold all Source types."""
|
"""Manager to hold all Source types."""
|
||||||
|
|
||||||
__sources: list[SourceType] = []
|
def __init__(self) -> None:
|
||||||
|
self.__sources: list[SourceType] = []
|
||||||
|
|
||||||
def type(self):
|
def type(self):
|
||||||
"""Class decorator to register classes inline."""
|
"""Class decorator to register classes inline."""
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
"""OAuth Callback Views"""
|
"""OAuth Callback Views"""
|
||||||
|
from json import JSONDecodeError
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@ -10,6 +11,7 @@ from django.views.generic import View
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.sources.flow_manager import SourceFlowManager
|
from authentik.core.sources.flow_manager import SourceFlowManager
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.sources.oauth.models import OAuthSource, UserOAuthSourceConnection
|
from authentik.sources.oauth.models import OAuthSource, UserOAuthSourceConnection
|
||||||
from authentik.sources.oauth.views.base import OAuthClientMixin
|
from authentik.sources.oauth.views.base import OAuthClientMixin
|
||||||
|
|
||||||
@ -42,8 +44,16 @@ class OAuthCallback(OAuthClientMixin, View):
|
|||||||
if "error" in token:
|
if "error" in token:
|
||||||
return self.handle_login_failure(token["error"])
|
return self.handle_login_failure(token["error"])
|
||||||
# Fetch profile info
|
# Fetch profile info
|
||||||
raw_info = client.get_profile_info(token)
|
try:
|
||||||
if raw_info is None:
|
raw_info = client.get_profile_info(token)
|
||||||
|
if raw_info is None:
|
||||||
|
return self.handle_login_failure("Could not retrieve profile.")
|
||||||
|
except JSONDecodeError as exc:
|
||||||
|
Event.new(
|
||||||
|
EventAction.CONFIGURATION_ERROR,
|
||||||
|
message="Failed to JSON-decode profile.",
|
||||||
|
raw_profile=exc.doc,
|
||||||
|
).from_http(self.request)
|
||||||
return self.handle_login_failure("Could not retrieve profile.")
|
return self.handle_login_failure("Could not retrieve profile.")
|
||||||
identifier = self.get_user_id(raw_info)
|
identifier = self.get_user_id(raw_info)
|
||||||
if identifier is None:
|
if identifier is None:
|
||||||
|
|||||||
@ -2,17 +2,21 @@
|
|||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
|
|
||||||
|
|
||||||
class MissingSAMLResponse(SentryIgnoredException):
|
class SAMLException(SentryIgnoredException):
|
||||||
|
"""Base SAML Exception"""
|
||||||
|
|
||||||
|
|
||||||
|
class MissingSAMLResponse(SAMLException):
|
||||||
"""Exception raised when request does not contain SAML Response."""
|
"""Exception raised when request does not contain SAML Response."""
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedNameIDFormat(SentryIgnoredException):
|
class UnsupportedNameIDFormat(SAMLException):
|
||||||
"""Exception raised when SAML Response contains NameID Format not supported."""
|
"""Exception raised when SAML Response contains NameID Format not supported."""
|
||||||
|
|
||||||
|
|
||||||
class MismatchedRequestID(SentryIgnoredException):
|
class MismatchedRequestID(SAMLException):
|
||||||
"""Exception raised when the returned request ID doesn't match the saved ID."""
|
"""Exception raised when the returned request ID doesn't match the saved ID."""
|
||||||
|
|
||||||
|
|
||||||
class InvalidSignature(SentryIgnoredException):
|
class InvalidSignature(SAMLException):
|
||||||
"""Signature of XML Object is either missing or invalid"""
|
"""Signature of XML Object is either missing or invalid"""
|
||||||
|
|||||||
@ -15,6 +15,9 @@ NS_MAP = {
|
|||||||
|
|
||||||
SAML_NAME_ID_FORMAT_EMAIL = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress"
|
SAML_NAME_ID_FORMAT_EMAIL = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress"
|
||||||
SAML_NAME_ID_FORMAT_PERSISTENT = "urn:oasis:names:tc:SAML:2.0:nameid-format:persistent"
|
SAML_NAME_ID_FORMAT_PERSISTENT = "urn:oasis:names:tc:SAML:2.0:nameid-format:persistent"
|
||||||
|
SAML_NAME_ID_FORMAT_UNSPECIFIED = (
|
||||||
|
"urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified"
|
||||||
|
)
|
||||||
SAML_NAME_ID_FORMAT_X509 = "urn:oasis:names:tc:SAML:2.0:nameid-format:X509SubjectName"
|
SAML_NAME_ID_FORMAT_X509 = "urn:oasis:names:tc:SAML:2.0:nameid-format:X509SubjectName"
|
||||||
SAML_NAME_ID_FORMAT_WINDOWS = (
|
SAML_NAME_ID_FORMAT_WINDOWS = (
|
||||||
"urn:oasis:names:tc:SAML:2.0:nameid-format:WindowsDomainQualifiedName"
|
"urn:oasis:names:tc:SAML:2.0:nameid-format:WindowsDomainQualifiedName"
|
||||||
|
|||||||
@ -90,7 +90,7 @@ class InitiateView(View):
|
|||||||
planner.allow_empty_flows = True
|
planner.allow_empty_flows = True
|
||||||
plan = planner.plan(self.request, kwargs)
|
plan = planner.plan(self.request, kwargs)
|
||||||
for stage in stages_to_append:
|
for stage in stages_to_append:
|
||||||
plan.append(stage)
|
plan.append_stage(stage)
|
||||||
self.request.session[SESSION_KEY_PLAN] = plan
|
self.request.session[SESSION_KEY_PLAN] = plan
|
||||||
return redirect_with_qs(
|
return redirect_with_qs(
|
||||||
"authentik_core:if-flow",
|
"authentik_core:if-flow",
|
||||||
|
|||||||
@ -9,7 +9,7 @@ from rest_framework.permissions import IsAdminUser
|
|||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import GenericViewSet, ModelViewSet, ReadOnlyModelViewSet
|
from rest_framework.viewsets import GenericViewSet, ModelViewSet
|
||||||
|
|
||||||
from authentik.api.authorization import OwnerFilter, OwnerPermissions
|
from authentik.api.authorization import OwnerFilter, OwnerPermissions
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
@ -94,7 +94,7 @@ class DuoDeviceViewSet(
|
|||||||
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||||
|
|
||||||
|
|
||||||
class DuoAdminDeviceViewSet(ReadOnlyModelViewSet):
|
class DuoAdminDeviceViewSet(ModelViewSet):
|
||||||
"""Viewset for Duo authenticator devices (for admins)"""
|
"""Viewset for Duo authenticator devices (for admins)"""
|
||||||
|
|
||||||
permission_classes = [IsAdminUser]
|
permission_classes = [IsAdminUser]
|
||||||
|
|||||||
@ -3,6 +3,7 @@ from django.http import HttpRequest, HttpResponse
|
|||||||
from rest_framework.fields import CharField
|
from rest_framework.fields import CharField
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.flows.challenge import (
|
from authentik.flows.challenge import (
|
||||||
Challenge,
|
Challenge,
|
||||||
ChallengeResponse,
|
ChallengeResponse,
|
||||||
@ -11,6 +12,7 @@ from authentik.flows.challenge import (
|
|||||||
)
|
)
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||||
from authentik.flows.stage import ChallengeStageView
|
from authentik.flows.stage import ChallengeStageView
|
||||||
|
from authentik.flows.views import InvalidStageError
|
||||||
from authentik.stages.authenticator_duo.models import AuthenticatorDuoStage, DuoDevice
|
from authentik.stages.authenticator_duo.models import AuthenticatorDuoStage, DuoDevice
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -42,7 +44,15 @@ class AuthenticatorDuoStageView(ChallengeStageView):
|
|||||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||||
user = self.get_pending_user()
|
user = self.get_pending_user()
|
||||||
stage: AuthenticatorDuoStage = self.executor.current_stage
|
stage: AuthenticatorDuoStage = self.executor.current_stage
|
||||||
enroll = stage.client.enroll(user.username)
|
try:
|
||||||
|
enroll = stage.client.enroll(user.username)
|
||||||
|
except RuntimeError as exc:
|
||||||
|
Event.new(
|
||||||
|
EventAction.CONFIGURATION_ERROR,
|
||||||
|
message=f"Failed to enroll user: {str(exc)}",
|
||||||
|
user=user,
|
||||||
|
).from_http(self.request, user)
|
||||||
|
raise InvalidStageError(str(exc)) from exc
|
||||||
user_id = enroll["user_id"]
|
user_id = enroll["user_id"]
|
||||||
self.request.session[SESSION_KEY_DUO_USER_ID] = user_id
|
self.request.session[SESSION_KEY_DUO_USER_ID] = user_id
|
||||||
self.request.session[SESSION_KEY_DUO_ACTIVATION_CODE] = enroll[
|
self.request.session[SESSION_KEY_DUO_ACTIVATION_CODE] = enroll[
|
||||||
@ -53,7 +63,7 @@ class AuthenticatorDuoStageView(ChallengeStageView):
|
|||||||
"type": ChallengeTypes.NATIVE.value,
|
"type": ChallengeTypes.NATIVE.value,
|
||||||
"activation_barcode": enroll["activation_barcode"],
|
"activation_barcode": enroll["activation_barcode"],
|
||||||
"activation_code": enroll["activation_code"],
|
"activation_code": enroll["activation_code"],
|
||||||
"stage_uuid": stage.stage_uuid,
|
"stage_uuid": str(stage.stage_uuid),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -10,7 +10,7 @@ from authentik.flows.challenge import (
|
|||||||
ChallengeTypes,
|
ChallengeTypes,
|
||||||
WithUserInfoChallenge,
|
WithUserInfoChallenge,
|
||||||
)
|
)
|
||||||
from authentik.flows.models import NotConfiguredAction
|
from authentik.flows.models import NotConfiguredAction, Stage
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||||
from authentik.flows.stage import ChallengeStageView
|
from authentik.flows.stage import ChallengeStageView
|
||||||
from authentik.stages.authenticator_validate.challenge import (
|
from authentik.stages.authenticator_validate.challenge import (
|
||||||
@ -74,12 +74,12 @@ class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
|||||||
duo, self.stage.request, self.stage.get_pending_user()
|
duo, self.stage.request, self.stage.get_pending_user()
|
||||||
)
|
)
|
||||||
|
|
||||||
def validate(self, data: dict):
|
def validate(self, attrs: dict):
|
||||||
# Checking if the given data is from a valid device class is done above
|
# Checking if the given data is from a valid device class is done above
|
||||||
# Here we only check if the any data was sent at all
|
# Here we only check if the any data was sent at all
|
||||||
if "code" not in data and "webauthn" not in data and "duo" not in data:
|
if "code" not in attrs and "webauthn" not in attrs and "duo" not in attrs:
|
||||||
raise ValidationError("Empty response")
|
raise ValidationError("Empty response")
|
||||||
return data
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
class AuthenticatorValidateStageView(ChallengeStageView):
|
class AuthenticatorValidateStageView(ChallengeStageView):
|
||||||
@ -143,9 +143,12 @@ class AuthenticatorValidateStageView(ChallengeStageView):
|
|||||||
return self.executor.stage_invalid()
|
return self.executor.stage_invalid()
|
||||||
if stage.not_configured_action == NotConfiguredAction.CONFIGURE:
|
if stage.not_configured_action == NotConfiguredAction.CONFIGURE:
|
||||||
LOGGER.debug("Authenticator not configured, sending user to configure")
|
LOGGER.debug("Authenticator not configured, sending user to configure")
|
||||||
|
# Because the foreign key to stage.configuration_stage points to
|
||||||
|
# a base stage class, we need to do another lookup
|
||||||
|
stage = Stage.objects.get_subclass(pk=stage.configuration_stage.pk)
|
||||||
# plan.insert inserts at 1 index, so when stage_ok pops 0,
|
# plan.insert inserts at 1 index, so when stage_ok pops 0,
|
||||||
# the configuration stage is next
|
# the configuration stage is next
|
||||||
self.executor.plan.insert(stage.configuration_stage)
|
self.executor.plan.insert_stage(stage)
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
return super().get(request, *args, **kwargs)
|
return super().get(request, *args, **kwargs)
|
||||||
|
|
||||||
@ -160,7 +163,7 @@ class AuthenticatorValidateStageView(ChallengeStageView):
|
|||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def challenge_valid(
|
def challenge_valid(
|
||||||
self, challenge: AuthenticatorValidationChallengeResponse
|
self, response: AuthenticatorValidationChallengeResponse
|
||||||
) -> HttpResponse:
|
) -> HttpResponse:
|
||||||
# All validation is done by the serializer
|
# All validation is done by the serializer
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
|
|||||||
@ -4,11 +4,14 @@ from unittest.mock import MagicMock, patch
|
|||||||
from django.contrib.sessions.middleware import SessionMiddleware
|
from django.contrib.sessions.middleware import SessionMiddleware
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from django.test.client import RequestFactory
|
from django.test.client import RequestFactory
|
||||||
|
from django.urls.base import reverse
|
||||||
|
from django.utils.encoding import force_str
|
||||||
from django_otp.plugins.otp_totp.models import TOTPDevice
|
from django_otp.plugins.otp_totp.models import TOTPDevice
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.flows.models import NotConfiguredAction
|
from authentik.flows.challenge import ChallengeTypes
|
||||||
|
from authentik.flows.models import Flow, FlowStageBinding, NotConfiguredAction
|
||||||
from authentik.flows.tests.test_planner import dummy_get_response
|
from authentik.flows.tests.test_planner import dummy_get_response
|
||||||
from authentik.providers.oauth2.generators import (
|
from authentik.providers.oauth2.generators import (
|
||||||
generate_client_id,
|
generate_client_id,
|
||||||
@ -24,7 +27,9 @@ from authentik.stages.authenticator_validate.challenge import (
|
|||||||
validate_challenge_duo,
|
validate_challenge_duo,
|
||||||
validate_challenge_webauthn,
|
validate_challenge_webauthn,
|
||||||
)
|
)
|
||||||
|
from authentik.stages.authenticator_validate.models import AuthenticatorValidateStage
|
||||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||||
|
from authentik.stages.identification.models import IdentificationStage, UserFields
|
||||||
|
|
||||||
|
|
||||||
class AuthenticatorValidateStageTests(TestCase):
|
class AuthenticatorValidateStageTests(TestCase):
|
||||||
@ -34,6 +39,50 @@ class AuthenticatorValidateStageTests(TestCase):
|
|||||||
self.user = User.objects.get(username="akadmin")
|
self.user = User.objects.get(username="akadmin")
|
||||||
self.request_factory = RequestFactory()
|
self.request_factory = RequestFactory()
|
||||||
|
|
||||||
|
def test_not_configured_action(self):
|
||||||
|
"""Test not_configured_action"""
|
||||||
|
conf_stage = IdentificationStage.objects.create(
|
||||||
|
name="conf",
|
||||||
|
user_fields=[
|
||||||
|
UserFields.USERNAME,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
stage = AuthenticatorValidateStage.objects.create(
|
||||||
|
name="foo",
|
||||||
|
not_configured_action=NotConfiguredAction.CONFIGURE,
|
||||||
|
configuration_stage=conf_stage,
|
||||||
|
)
|
||||||
|
flow = Flow.objects.create(name="test", slug="test", title="test")
|
||||||
|
FlowStageBinding.objects.create(target=flow, stage=conf_stage, order=0)
|
||||||
|
FlowStageBinding.objects.create(target=flow, stage=stage, order=1)
|
||||||
|
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||||
|
{"uid_field": "akadmin"},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 302)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||||
|
follow=True,
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
force_str(response.content),
|
||||||
|
{
|
||||||
|
"type": ChallengeTypes.NATIVE.value,
|
||||||
|
"component": "ak-stage-identification",
|
||||||
|
"password_fields": False,
|
||||||
|
"primary_action": "Log in",
|
||||||
|
"flow_info": {
|
||||||
|
"background": flow.background_url,
|
||||||
|
"cancel_url": reverse("authentik_flows:cancel"),
|
||||||
|
"title": flow.title,
|
||||||
|
},
|
||||||
|
"user_fields": ["username"],
|
||||||
|
"sources": [],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
def test_stage_validation(self):
|
def test_stage_validation(self):
|
||||||
"""Test serializer validation"""
|
"""Test serializer validation"""
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|||||||
@ -36,12 +36,14 @@ class TestCaptchaStage(TestCase):
|
|||||||
public_key=RECAPTCHA_PUBLIC_KEY,
|
public_key=RECAPTCHA_PUBLIC_KEY,
|
||||||
private_key=RECAPTCHA_PRIVATE_KEY,
|
private_key=RECAPTCHA_PRIVATE_KEY,
|
||||||
)
|
)
|
||||||
FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
self.binding = FlowStageBinding.objects.create(
|
||||||
|
target=self.flow, stage=self.stage, order=2
|
||||||
|
)
|
||||||
|
|
||||||
def test_valid(self):
|
def test_valid(self):
|
||||||
"""Test valid captcha"""
|
"""Test valid captcha"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
|
|||||||
@ -39,9 +39,11 @@ class TestConsentStage(TestCase):
|
|||||||
stage = ConsentStage.objects.create(
|
stage = ConsentStage.objects.create(
|
||||||
name="consent", mode=ConsentMode.ALWAYS_REQUIRE
|
name="consent", mode=ConsentMode.ALWAYS_REQUIRE
|
||||||
)
|
)
|
||||||
FlowStageBinding.objects.create(target=flow, stage=stage, order=2)
|
binding = FlowStageBinding.objects.create(target=flow, stage=stage, order=2)
|
||||||
|
|
||||||
plan = FlowPlan(flow_pk=flow.pk.hex, stages=[stage], markers=[StageMarker()])
|
plan = FlowPlan(
|
||||||
|
flow_pk=flow.pk.hex, bindings=[binding], markers=[StageMarker()]
|
||||||
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
session.save()
|
session.save()
|
||||||
@ -69,11 +71,11 @@ class TestConsentStage(TestCase):
|
|||||||
designation=FlowDesignation.AUTHENTICATION,
|
designation=FlowDesignation.AUTHENTICATION,
|
||||||
)
|
)
|
||||||
stage = ConsentStage.objects.create(name="consent", mode=ConsentMode.PERMANENT)
|
stage = ConsentStage.objects.create(name="consent", mode=ConsentMode.PERMANENT)
|
||||||
FlowStageBinding.objects.create(target=flow, stage=stage, order=2)
|
binding = FlowStageBinding.objects.create(target=flow, stage=stage, order=2)
|
||||||
|
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=flow.pk.hex,
|
flow_pk=flow.pk.hex,
|
||||||
stages=[stage],
|
bindings=[binding],
|
||||||
markers=[StageMarker()],
|
markers=[StageMarker()],
|
||||||
context={PLAN_CONTEXT_APPLICATION: self.application},
|
context={PLAN_CONTEXT_APPLICATION: self.application},
|
||||||
)
|
)
|
||||||
@ -110,11 +112,11 @@ class TestConsentStage(TestCase):
|
|||||||
stage = ConsentStage.objects.create(
|
stage = ConsentStage.objects.create(
|
||||||
name="consent", mode=ConsentMode.EXPIRING, consent_expire_in="seconds=1"
|
name="consent", mode=ConsentMode.EXPIRING, consent_expire_in="seconds=1"
|
||||||
)
|
)
|
||||||
FlowStageBinding.objects.create(target=flow, stage=stage, order=2)
|
binding = FlowStageBinding.objects.create(target=flow, stage=stage, order=2)
|
||||||
|
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=flow.pk.hex,
|
flow_pk=flow.pk.hex,
|
||||||
stages=[stage],
|
bindings=[binding],
|
||||||
markers=[StageMarker()],
|
markers=[StageMarker()],
|
||||||
context={PLAN_CONTEXT_APPLICATION: self.application},
|
context={PLAN_CONTEXT_APPLICATION: self.application},
|
||||||
)
|
)
|
||||||
|
|||||||
@ -26,12 +26,14 @@ class TestUserDenyStage(TestCase):
|
|||||||
designation=FlowDesignation.AUTHENTICATION,
|
designation=FlowDesignation.AUTHENTICATION,
|
||||||
)
|
)
|
||||||
self.stage = DenyStage.objects.create(name="logout")
|
self.stage = DenyStage.objects.create(name="logout")
|
||||||
FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
self.binding = FlowStageBinding.objects.create(
|
||||||
|
target=self.flow, stage=self.stage, order=2
|
||||||
|
)
|
||||||
|
|
||||||
def test_valid_password(self):
|
def test_valid_password(self):
|
||||||
"""Test with a valid pending user and backend"""
|
"""Test with a valid pending user and backend"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
|
|||||||
@ -38,7 +38,7 @@ class EmailChallengeResponse(ChallengeResponse):
|
|||||||
|
|
||||||
component = CharField(default="ak-stage-email")
|
component = CharField(default="ak-stage-email")
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, attrs):
|
||||||
raise ValidationError("")
|
raise ValidationError("")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -34,12 +34,14 @@ class TestEmailStageSending(TestCase):
|
|||||||
self.stage = EmailStage.objects.create(
|
self.stage = EmailStage.objects.create(
|
||||||
name="email",
|
name="email",
|
||||||
)
|
)
|
||||||
FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
self.binding = FlowStageBinding.objects.create(
|
||||||
|
target=self.flow, stage=self.stage, order=2
|
||||||
|
)
|
||||||
|
|
||||||
def test_pending_user(self):
|
def test_pending_user(self):
|
||||||
"""Test with pending user"""
|
"""Test with pending user"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
@ -67,7 +69,7 @@ class TestEmailStageSending(TestCase):
|
|||||||
def test_send_error(self):
|
def test_send_error(self):
|
||||||
"""Test error during sending (sending will be retried)"""
|
"""Test error during sending (sending will be retried)"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
|
|||||||
@ -35,12 +35,14 @@ class TestEmailStage(TestCase):
|
|||||||
self.stage = EmailStage.objects.create(
|
self.stage = EmailStage.objects.create(
|
||||||
name="email",
|
name="email",
|
||||||
)
|
)
|
||||||
FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
self.binding = FlowStageBinding.objects.create(
|
||||||
|
target=self.flow, stage=self.stage, order=2
|
||||||
|
)
|
||||||
|
|
||||||
def test_rendering(self):
|
def test_rendering(self):
|
||||||
"""Test with pending user"""
|
"""Test with pending user"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
@ -56,7 +58,7 @@ class TestEmailStage(TestCase):
|
|||||||
def test_without_user(self):
|
def test_without_user(self):
|
||||||
"""Test without pending user"""
|
"""Test without pending user"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
@ -71,7 +73,7 @@ class TestEmailStage(TestCase):
|
|||||||
def test_pending_user(self):
|
def test_pending_user(self):
|
||||||
"""Test with pending user"""
|
"""Test with pending user"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
@ -102,7 +104,7 @@ class TestEmailStage(TestCase):
|
|||||||
# Make sure token exists
|
# Make sure token exists
|
||||||
self.test_pending_user()
|
self.test_pending_user()
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
|
|||||||
@ -73,9 +73,9 @@ class IdentificationChallengeResponse(ChallengeResponse):
|
|||||||
|
|
||||||
pre_user: Optional[User] = None
|
pre_user: Optional[User] = None
|
||||||
|
|
||||||
def validate(self, data: dict[str, Any]) -> dict[str, Any]:
|
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||||
"""Validate that user exists, and optionally their password"""
|
"""Validate that user exists, and optionally their password"""
|
||||||
uid_field = data["uid_field"]
|
uid_field = attrs["uid_field"]
|
||||||
current_stage: IdentificationStage = self.stage.executor.current_stage
|
current_stage: IdentificationStage = self.stage.executor.current_stage
|
||||||
|
|
||||||
pre_user = self.stage.get_user(uid_field)
|
pre_user = self.stage.get_user(uid_field)
|
||||||
@ -85,13 +85,25 @@ class IdentificationChallengeResponse(ChallengeResponse):
|
|||||||
identification_failed.send(
|
identification_failed.send(
|
||||||
sender=self, request=self.stage.request, uid_field=uid_field
|
sender=self, request=self.stage.request, uid_field=uid_field
|
||||||
)
|
)
|
||||||
|
# We set the pending_user even on failure so it's part of the context, even
|
||||||
|
# when the input is invalid
|
||||||
|
# This is so its part of the current flow plan, and on flow restart can be kept, and
|
||||||
|
# policies can be applied.
|
||||||
|
self.stage.executor.plan.context[PLAN_CONTEXT_PENDING_USER] = User(
|
||||||
|
username=uid_field,
|
||||||
|
email=uid_field,
|
||||||
|
)
|
||||||
|
if not current_stage.show_matched_user:
|
||||||
|
self.stage.executor.plan.context[
|
||||||
|
PLAN_CONTEXT_PENDING_USER_IDENTIFIER
|
||||||
|
] = uid_field
|
||||||
raise ValidationError("Failed to authenticate.")
|
raise ValidationError("Failed to authenticate.")
|
||||||
self.pre_user = pre_user
|
self.pre_user = pre_user
|
||||||
if not current_stage.password_stage:
|
if not current_stage.password_stage:
|
||||||
# No password stage select, don't validate the password
|
# No password stage select, don't validate the password
|
||||||
return data
|
return attrs
|
||||||
|
|
||||||
password = data["password"]
|
password = attrs["password"]
|
||||||
try:
|
try:
|
||||||
user = authenticate(
|
user = authenticate(
|
||||||
self.stage.request,
|
self.stage.request,
|
||||||
@ -104,7 +116,7 @@ class IdentificationChallengeResponse(ChallengeResponse):
|
|||||||
self.pre_user = user
|
self.pre_user = user
|
||||||
except PermissionDenied as exc:
|
except PermissionDenied as exc:
|
||||||
raise ValidationError(str(exc)) from exc
|
raise ValidationError(str(exc)) from exc
|
||||||
return data
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
class IdentificationStageView(ChallengeStageView):
|
class IdentificationStageView(ChallengeStageView):
|
||||||
@ -175,7 +187,6 @@ class IdentificationStageView(ChallengeStageView):
|
|||||||
button = asdict(ui_login_button)
|
button = asdict(ui_login_button)
|
||||||
button["challenge"] = ui_login_button.challenge.data
|
button["challenge"] = ui_login_button.challenge.data
|
||||||
ui_sources.append(button)
|
ui_sources.append(button)
|
||||||
print(ui_sources)
|
|
||||||
challenge.initial_data["sources"] = ui_sources
|
challenge.initial_data["sources"] = ui_sources
|
||||||
return challenge
|
return challenge
|
||||||
|
|
||||||
|
|||||||
@ -35,7 +35,9 @@ class TestUserLoginStage(TestCase):
|
|||||||
designation=FlowDesignation.AUTHENTICATION,
|
designation=FlowDesignation.AUTHENTICATION,
|
||||||
)
|
)
|
||||||
self.stage = InvitationStage.objects.create(name="invitation")
|
self.stage = InvitationStage.objects.create(name="invitation")
|
||||||
FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
self.binding = FlowStageBinding.objects.create(
|
||||||
|
target=self.flow, stage=self.stage, order=2
|
||||||
|
)
|
||||||
|
|
||||||
@patch(
|
@patch(
|
||||||
"authentik.flows.views.to_stage_response",
|
"authentik.flows.views.to_stage_response",
|
||||||
@ -44,7 +46,7 @@ class TestUserLoginStage(TestCase):
|
|||||||
def test_without_invitation_fail(self):
|
def test_without_invitation_fail(self):
|
||||||
"""Test without any invitation, continue_flow_without_invitation not set."""
|
"""Test without any invitation, continue_flow_without_invitation not set."""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
plan.context[PLAN_CONTEXT_AUTHENTICATION_BACKEND] = BACKEND_DJANGO
|
plan.context[PLAN_CONTEXT_AUTHENTICATION_BACKEND] = BACKEND_DJANGO
|
||||||
@ -75,7 +77,7 @@ class TestUserLoginStage(TestCase):
|
|||||||
self.stage.continue_flow_without_invitation = True
|
self.stage.continue_flow_without_invitation = True
|
||||||
self.stage.save()
|
self.stage.save()
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
plan.context[PLAN_CONTEXT_AUTHENTICATION_BACKEND] = BACKEND_DJANGO
|
plan.context[PLAN_CONTEXT_AUTHENTICATION_BACKEND] = BACKEND_DJANGO
|
||||||
@ -103,7 +105,7 @@ class TestUserLoginStage(TestCase):
|
|||||||
def test_with_invitation_get(self):
|
def test_with_invitation_get(self):
|
||||||
"""Test with invitation, check data in session"""
|
"""Test with invitation, check data in session"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
@ -143,7 +145,7 @@ class TestUserLoginStage(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PROMPT] = {INVITATION_TOKEN_KEY: invite.pk.hex}
|
plan.context[PLAN_CONTEXT_PROMPT] = {INVITATION_TOKEN_KEY: invite.pk.hex}
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
|
|||||||
@ -39,7 +39,9 @@ class TestPasswordStage(TestCase):
|
|||||||
self.stage = PasswordStage.objects.create(
|
self.stage = PasswordStage.objects.create(
|
||||||
name="password", backends=[BACKEND_DJANGO]
|
name="password", backends=[BACKEND_DJANGO]
|
||||||
)
|
)
|
||||||
FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
self.binding = FlowStageBinding.objects.create(
|
||||||
|
target=self.flow, stage=self.stage, order=2
|
||||||
|
)
|
||||||
|
|
||||||
@patch(
|
@patch(
|
||||||
"authentik.flows.views.to_stage_response",
|
"authentik.flows.views.to_stage_response",
|
||||||
@ -48,7 +50,7 @@ class TestPasswordStage(TestCase):
|
|||||||
def test_without_user(self):
|
def test_without_user(self):
|
||||||
"""Test without user"""
|
"""Test without user"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
@ -84,7 +86,7 @@ class TestPasswordStage(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
@ -101,7 +103,7 @@ class TestPasswordStage(TestCase):
|
|||||||
def test_valid_password(self):
|
def test_valid_password(self):
|
||||||
"""Test with a valid pending user and valid password"""
|
"""Test with a valid pending user and valid password"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
@ -129,7 +131,7 @@ class TestPasswordStage(TestCase):
|
|||||||
def test_invalid_password(self):
|
def test_invalid_password(self):
|
||||||
"""Test with a valid pending user and invalid password"""
|
"""Test with a valid pending user and invalid password"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
@ -148,7 +150,7 @@ class TestPasswordStage(TestCase):
|
|||||||
def test_invalid_password_lockout(self):
|
def test_invalid_password_lockout(self):
|
||||||
"""Test with a valid pending user and invalid password (trigger logout counter)"""
|
"""Test with a valid pending user and invalid password (trigger logout counter)"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
@ -189,7 +191,7 @@ class TestPasswordStage(TestCase):
|
|||||||
"""Test with a valid pending user and valid password.
|
"""Test with a valid pending user and valid password.
|
||||||
Backend is patched to return PermissionError"""
|
Backend is patched to return PermissionError"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
|
|||||||
@ -90,6 +90,14 @@ class PromptChallengeResponse(ChallengeResponse):
|
|||||||
raise ValidationError(_("Passwords don't match."))
|
raise ValidationError(_("Passwords don't match."))
|
||||||
|
|
||||||
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
# Check if we have any static or hidden fields, and ensure they
|
||||||
|
# still have the same value
|
||||||
|
static_hidden_fields: QuerySet[Prompt] = self.stage.fields.filter(
|
||||||
|
type__in=[FieldTypes.HIDDEN, FieldTypes.STATIC]
|
||||||
|
)
|
||||||
|
for static_hidden in static_hidden_fields:
|
||||||
|
attrs[static_hidden.field_key] = static_hidden.placeholder
|
||||||
|
|
||||||
# Check if we have two password fields, and make sure they are the same
|
# Check if we have two password fields, and make sure they are the same
|
||||||
password_fields: QuerySet[Prompt] = self.stage.fields.filter(
|
password_fields: QuerySet[Prompt] = self.stage.fields.filter(
|
||||||
type=FieldTypes.PASSWORD
|
type=FieldTypes.PASSWORD
|
||||||
@ -138,8 +146,6 @@ def password_single_validator_factory() -> Callable[[PromptChallenge, str], Any]
|
|||||||
class ListPolicyEngine(PolicyEngine):
|
class ListPolicyEngine(PolicyEngine):
|
||||||
"""Slightly modified policy engine, which uses a list instead of a PolicyBindingModel"""
|
"""Slightly modified policy engine, which uses a list instead of a PolicyBindingModel"""
|
||||||
|
|
||||||
__list: list[Policy]
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, policies: list[Policy], user: User, request: HttpRequest = None
|
self, policies: list[Policy], user: User, request: HttpRequest = None
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|||||||
@ -78,6 +78,12 @@ class TestPromptStage(TestCase):
|
|||||||
required=True,
|
required=True,
|
||||||
placeholder="HIDDEN_PLACEHOLDER",
|
placeholder="HIDDEN_PLACEHOLDER",
|
||||||
)
|
)
|
||||||
|
static_prompt = Prompt.objects.create(
|
||||||
|
field_key="static_prompt",
|
||||||
|
type=FieldTypes.STATIC,
|
||||||
|
required=True,
|
||||||
|
placeholder="static",
|
||||||
|
)
|
||||||
self.stage = PromptStage.objects.create(name="prompt-stage")
|
self.stage = PromptStage.objects.create(name="prompt-stage")
|
||||||
self.stage.fields.set(
|
self.stage.fields.set(
|
||||||
[
|
[
|
||||||
@ -88,6 +94,7 @@ class TestPromptStage(TestCase):
|
|||||||
password2_prompt,
|
password2_prompt,
|
||||||
number_prompt,
|
number_prompt,
|
||||||
hidden_prompt,
|
hidden_prompt,
|
||||||
|
static_prompt,
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
self.stage.save()
|
self.stage.save()
|
||||||
@ -100,14 +107,17 @@ class TestPromptStage(TestCase):
|
|||||||
password2_prompt.field_key: "test",
|
password2_prompt.field_key: "test",
|
||||||
number_prompt.field_key: 3,
|
number_prompt.field_key: 3,
|
||||||
hidden_prompt.field_key: hidden_prompt.placeholder,
|
hidden_prompt.field_key: hidden_prompt.placeholder,
|
||||||
|
static_prompt.field_key: static_prompt.placeholder,
|
||||||
}
|
}
|
||||||
|
|
||||||
FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
self.binding = FlowStageBinding.objects.create(
|
||||||
|
target=self.flow, stage=self.stage, order=2
|
||||||
|
)
|
||||||
|
|
||||||
def test_render(self):
|
def test_render(self):
|
||||||
"""Test render of form, check if all prompts are rendered correctly"""
|
"""Test render of form, check if all prompts are rendered correctly"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
@ -125,7 +135,7 @@ class TestPromptStage(TestCase):
|
|||||||
def test_valid_challenge_with_policy(self) -> PromptChallengeResponse:
|
def test_valid_challenge_with_policy(self) -> PromptChallengeResponse:
|
||||||
"""Test challenge_response validation"""
|
"""Test challenge_response validation"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
expr = "return request.context['password_prompt'] == request.context['password2_prompt']"
|
expr = "return request.context['password_prompt'] == request.context['password2_prompt']"
|
||||||
expr_policy = ExpressionPolicy.objects.create(
|
expr_policy = ExpressionPolicy.objects.create(
|
||||||
@ -142,7 +152,7 @@ class TestPromptStage(TestCase):
|
|||||||
def test_invalid_challenge(self) -> PromptChallengeResponse:
|
def test_invalid_challenge(self) -> PromptChallengeResponse:
|
||||||
"""Test challenge_response validation"""
|
"""Test challenge_response validation"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
expr = "False"
|
expr = "False"
|
||||||
expr_policy = ExpressionPolicy.objects.create(
|
expr_policy = ExpressionPolicy.objects.create(
|
||||||
@ -159,7 +169,7 @@ class TestPromptStage(TestCase):
|
|||||||
def test_valid_challenge_request(self):
|
def test_valid_challenge_request(self):
|
||||||
"""Test a request with valid challenge_response data"""
|
"""Test a request with valid challenge_response data"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
@ -196,7 +206,7 @@ class TestPromptStage(TestCase):
|
|||||||
def test_invalid_password(self):
|
def test_invalid_password(self):
|
||||||
"""Test challenge_response validation"""
|
"""Test challenge_response validation"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
self.prompt_data["password2_prompt"] = "qwerqwerqr"
|
self.prompt_data["password2_prompt"] = "qwerqwerqr"
|
||||||
challenge_response = PromptChallengeResponse(
|
challenge_response = PromptChallengeResponse(
|
||||||
@ -215,7 +225,7 @@ class TestPromptStage(TestCase):
|
|||||||
def test_invalid_username(self):
|
def test_invalid_username(self):
|
||||||
"""Test challenge_response validation"""
|
"""Test challenge_response validation"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
self.prompt_data["username_prompt"] = "akadmin"
|
self.prompt_data["username_prompt"] = "akadmin"
|
||||||
challenge_response = PromptChallengeResponse(
|
challenge_response = PromptChallengeResponse(
|
||||||
@ -230,3 +240,17 @@ class TestPromptStage(TestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_static_hidden_overwrite(self):
|
||||||
|
"""Test that static and hidden fields ignore any value sent to them"""
|
||||||
|
plan = FlowPlan(
|
||||||
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
|
)
|
||||||
|
self.prompt_data["hidden_prompt"] = "foo"
|
||||||
|
self.prompt_data["static_prompt"] = "foo"
|
||||||
|
challenge_response = PromptChallengeResponse(
|
||||||
|
None, stage=self.stage, plan=plan, data=self.prompt_data
|
||||||
|
)
|
||||||
|
self.assertEqual(challenge_response.is_valid(), True)
|
||||||
|
self.assertNotEqual(challenge_response.validated_data["hidden_prompt"], "foo")
|
||||||
|
self.assertNotEqual(challenge_response.validated_data["static_prompt"], "foo")
|
||||||
|
|||||||
@ -30,7 +30,9 @@ class TestUserDeleteStage(TestCase):
|
|||||||
designation=FlowDesignation.AUTHENTICATION,
|
designation=FlowDesignation.AUTHENTICATION,
|
||||||
)
|
)
|
||||||
self.stage = UserDeleteStage.objects.create(name="delete")
|
self.stage = UserDeleteStage.objects.create(name="delete")
|
||||||
FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
self.binding = FlowStageBinding.objects.create(
|
||||||
|
target=self.flow, stage=self.stage, order=2
|
||||||
|
)
|
||||||
|
|
||||||
@patch(
|
@patch(
|
||||||
"authentik.flows.views.to_stage_response",
|
"authentik.flows.views.to_stage_response",
|
||||||
@ -39,7 +41,7 @@ class TestUserDeleteStage(TestCase):
|
|||||||
def test_no_user(self):
|
def test_no_user(self):
|
||||||
"""Test without user set"""
|
"""Test without user set"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
@ -66,7 +68,7 @@ class TestUserDeleteStage(TestCase):
|
|||||||
def test_user_delete_get(self):
|
def test_user_delete_get(self):
|
||||||
"""Test Form render"""
|
"""Test Form render"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
|
|||||||
@ -30,12 +30,14 @@ class TestUserLoginStage(TestCase):
|
|||||||
designation=FlowDesignation.AUTHENTICATION,
|
designation=FlowDesignation.AUTHENTICATION,
|
||||||
)
|
)
|
||||||
self.stage = UserLoginStage.objects.create(name="login")
|
self.stage = UserLoginStage.objects.create(name="login")
|
||||||
FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
self.binding = FlowStageBinding.objects.create(
|
||||||
|
target=self.flow, stage=self.stage, order=2
|
||||||
|
)
|
||||||
|
|
||||||
def test_valid_password(self):
|
def test_valid_password(self):
|
||||||
"""Test with a valid pending user and backend"""
|
"""Test with a valid pending user and backend"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
@ -61,7 +63,7 @@ class TestUserLoginStage(TestCase):
|
|||||||
self.stage.session_duration = "seconds=2"
|
self.stage.session_duration = "seconds=2"
|
||||||
self.stage.save()
|
self.stage.save()
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
@ -92,7 +94,7 @@ class TestUserLoginStage(TestCase):
|
|||||||
def test_without_user(self):
|
def test_without_user(self):
|
||||||
"""Test a plan without any pending user, resulting in a denied"""
|
"""Test a plan without any pending user, resulting in a denied"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
|
|||||||
@ -28,12 +28,14 @@ class TestUserLogoutStage(TestCase):
|
|||||||
designation=FlowDesignation.AUTHENTICATION,
|
designation=FlowDesignation.AUTHENTICATION,
|
||||||
)
|
)
|
||||||
self.stage = UserLogoutStage.objects.create(name="logout")
|
self.stage = UserLogoutStage.objects.create(name="logout")
|
||||||
FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
self.binding = FlowStageBinding.objects.create(
|
||||||
|
target=self.flow, stage=self.stage, order=2
|
||||||
|
)
|
||||||
|
|
||||||
def test_valid_password(self):
|
def test_valid_password(self):
|
||||||
"""Test with a valid pending user and backend"""
|
"""Test with a valid pending user and backend"""
|
||||||
plan = FlowPlan(
|
plan = FlowPlan(
|
||||||
flow_pk=self.flow.pk.hex, stages=[self.stage], markers=[StageMarker()]
|
flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()]
|
||||||
)
|
)
|
||||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
plan.context[PLAN_CONTEXT_AUTHENTICATION_BACKEND] = BACKEND_DJANGO
|
plan.context[PLAN_CONTEXT_AUTHENTICATION_BACKEND] = BACKEND_DJANGO
|
||||||
|
|||||||
@ -12,7 +12,7 @@ class UserWriteStageSerializer(StageSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = UserWriteStage
|
model = UserWriteStage
|
||||||
fields = StageSerializer.Meta.fields
|
fields = StageSerializer.Meta.fields + ["create_users_as_inactive"]
|
||||||
|
|
||||||
|
|
||||||
class UserWriteStageViewSet(UsedByMixin, ModelViewSet):
|
class UserWriteStageViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
|||||||
@ -0,0 +1,21 @@
|
|||||||
|
# Generated by Django 3.2.4 on 2021-06-28 20:31
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_stages_user_write", "0002_auto_20200918_1653"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="userwritestage",
|
||||||
|
name="create_users_as_inactive",
|
||||||
|
field=models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
help_text="When set, newly created users are inactive and cannot login.",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user