Compare commits
259 Commits
version-20
...
version/20
Author | SHA1 | Date | |
---|---|---|---|
7639cdad0a | |||
6533f48912 | |||
2024dac39a | |||
33d5cd2973 | |||
b003e8e1e8 | |||
294d70ae4d | |||
23fd257624 | |||
3e909ae6bb | |||
ff24bc8cb8 | |||
ecf35cfd1d | |||
673520c9f8 | |||
b4f738492d | |||
00a666856d | |||
bff7addb55 | |||
2a90c0b35e | |||
93e27d1959 | |||
02c736d784 | |||
2015d91484 | |||
6433b5982e | |||
f0bc90738f | |||
970a4baf49 | |||
5fbefef56f | |||
1110038eb0 | |||
e945c250db | |||
b46d08cc97 | |||
18eccd995d | |||
6f06ba06d0 | |||
495b068be5 | |||
84c4547005 | |||
065121d280 | |||
8c943e187b | |||
ee54a8b33d | |||
373d94635f | |||
31422c6836 | |||
bca59a2b5a | |||
4ff3bc59b7 | |||
a6b1ee949d | |||
f93e2c5eb6 | |||
8fe38b528b | |||
38dbde191c | |||
39434053b9 | |||
5bdc1a3ddc | |||
36e6d5e394 | |||
0a6efab7cb | |||
c8dc299ae3 | |||
700c66f312 | |||
04861b1b00 | |||
06badf88b2 | |||
67ab4305ad | |||
b35e62e5ae | |||
051016f613 | |||
295f0fe730 | |||
54b7ef42f5 | |||
669b5db8e5 | |||
4882de6ade | |||
95ceabe1ba | |||
769a3424dc | |||
47070261b0 | |||
0d5a7f9b44 | |||
07ceaa20f3 | |||
d1403f6f7d | |||
9430a2eea2 | |||
2592fc3826 | |||
d9ece98bbc | |||
1524efcf51 | |||
8cceacb33f | |||
3b13f322de | |||
a570189c73 | |||
c92c0102ca | |||
c6dddc97f0 | |||
38292a588b | |||
01e54cb986 | |||
e90da9283e | |||
e0e0f4fa6c | |||
90426802fd | |||
8b28039c1b | |||
cdf57d7eea | |||
b237f2ddfb | |||
784a3efaa5 | |||
9e0c4e7e08 | |||
7e62b82d56 | |||
c079f9e339 | |||
72d42249e2 | |||
f9e826d553 | |||
0f5e0a774a | |||
34fe250fb0 | |||
92990b4ded | |||
9e2f165dd8 | |||
88891c99bc | |||
93de363c86 | |||
7db3be604c | |||
ec95a2bddc | |||
de9d483b9f | |||
0c9c3153b5 | |||
557724768a | |||
68608087ec | |||
3118365118 | |||
1f821521c6 | |||
281a460960 | |||
0e131e6b2f | |||
ca9e632b57 | |||
184aa25513 | |||
80df444067 | |||
d18e829d80 | |||
c5dfe189f7 | |||
29f6f1d54f | |||
e952bd671f | |||
421c7df536 | |||
f322198020 | |||
c392aa607d | |||
4e368d1e8d | |||
229468175a | |||
e1f7421c6a | |||
7a836e0d7e | |||
5b57d67b5f | |||
4cd3466e56 | |||
f496b8b5d7 | |||
3d5eebda3b | |||
a26e5f3b17 | |||
fe91bff854 | |||
03958d170b | |||
837fa23af0 | |||
665c1aa81b | |||
ebc6afe015 | |||
45bee4b4dc | |||
c025d64ba3 | |||
a9ef1a3190 | |||
2a53bc4330 | |||
8180d6f9e8 | |||
ccfc1dbcc2 | |||
16f0f89a9d | |||
c5976de500 | |||
1781ab59ba | |||
3367b83368 | |||
f21bb319d0 | |||
f0a8c30ce9 | |||
571049219f | |||
260f0b8710 | |||
787f5a1e96 | |||
b36a3100e6 | |||
e02207f38d | |||
3eafa4711e | |||
9a8240bdd1 | |||
f6ab241219 | |||
ff579fd387 | |||
1693118df7 | |||
b0f09eb2c4 | |||
9c9addb0ce | |||
decb91e5f1 | |||
b39339409a | |||
0d75ce45c3 | |||
8801e39e65 | |||
0faa91c1fe | |||
2d5094fdf7 | |||
8044818a4d | |||
9703e32c1b | |||
f28bfdaeb9 | |||
fdd8e66b91 | |||
562eb8af95 | |||
a43fb026a0 | |||
29b88d0e5c | |||
48c980e8e7 | |||
667ccbe00e | |||
6af2c6a014 | |||
6fe5175f21 | |||
90775d5122 | |||
e52390aa28 | |||
fea493f3a0 | |||
5803575ee2 | |||
1a17ce24f9 | |||
ddd5047cc3 | |||
919946609d | |||
d861a0cec9 | |||
6ea83edd9f | |||
13a8ad3126 | |||
e83465517b | |||
bc23197643 | |||
f887c257f8 | |||
1d4017d94a | |||
8f9e8bb9dd | |||
ded9060af2 | |||
579697b978 | |||
200391c533 | |||
5384a06cb5 | |||
04486d65dc | |||
a449f9c69b | |||
36b346662c | |||
9d392931df | |||
2c60ec50be | |||
77ed25ae34 | |||
b87903a209 | |||
87a418de25 | |||
683d10fa70 | |||
8e84d74634 | |||
d783c632ad | |||
756f3dbedc | |||
eff2e3aeb0 | |||
fb3e302f44 | |||
24d2c94e7c | |||
400adaa282 | |||
6d67ad8451 | |||
7ad1656369 | |||
79b1b21931 | |||
9c9bcb7a01 | |||
75fec19079 | |||
a939e224fc | |||
1fc2bcf02b | |||
b7bfb93928 | |||
4e5dba1d0b | |||
92a448b677 | |||
f875149983 | |||
d70b81fe43 | |||
a64dbc94c1 | |||
b58c913618 | |||
9665e33156 | |||
96d7a5a27f | |||
05aefefb61 | |||
f5dc8c045e | |||
1e1f17aceb | |||
35c1476bbe | |||
18bb4fd0bf | |||
ac77291b6d | |||
5571aa32b6 | |||
66c3535bcb | |||
293c479364 | |||
f9382b8458 | |||
c9fe28dad7 | |||
8bb57a1283 | |||
55a5300bd2 | |||
8ceef82c55 | |||
f933cd99ad | |||
e5b63377a0 | |||
6c81a1929d | |||
e5269306df | |||
7ac5091e5a | |||
bc9ff792a8 | |||
8495ff9fc0 | |||
309cd90c43 | |||
acbc0ee5cc | |||
a60f6e426f | |||
6fd86aa357 | |||
f1e32b989d | |||
6aebbec270 | |||
b86fd7b716 | |||
5693a794b4 | |||
f01bc20d44 | |||
1b03aae7aa | |||
7eb97cd2bc | |||
8aaec3b149 | |||
4c9b49e7a6 | |||
903d1ecc6e | |||
f2197d63f1 | |||
9c0f7e0018 | |||
75ff2480e2 | |||
bc7f84fff4 | |||
1b638adf89 | |||
7eebc40e00 | |||
33ddccf066 | |||
efc8452e72 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2021.7.3
|
||||
current_version = 2021.8.1-rc1
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
||||
|
60
.github/workflows/codeql-analysis.yml
vendored
Normal file
60
.github/workflows/codeql-analysis.yml
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, '*', next, version* ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ master ]
|
||||
schedule:
|
||||
- cron: '30 6 * * 5'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'go', 'javascript', 'python' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
|
||||
# Learn more:
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
22
.github/workflows/release.yml
vendored
22
.github/workflows/release.yml
vendored
@ -33,14 +33,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik:2021.7.3,
|
||||
beryju/authentik:2021.8.1-rc1,
|
||||
beryju/authentik:latest,
|
||||
ghcr.io/goauthentik/server:2021.7.3,
|
||||
ghcr.io/goauthentik/server:2021.8.1-rc1,
|
||||
ghcr.io/goauthentik/server:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.7.3', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.8.1-rc1', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik:latest
|
||||
docker tag beryju/authentik:latest beryju/authentik:stable
|
||||
@ -75,14 +75,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-proxy:2021.7.3,
|
||||
beryju/authentik-proxy:2021.8.1-rc1,
|
||||
beryju/authentik-proxy:latest,
|
||||
ghcr.io/goauthentik/proxy:2021.7.3,
|
||||
ghcr.io/goauthentik/proxy:2021.8.1-rc1,
|
||||
ghcr.io/goauthentik/proxy:latest
|
||||
file: proxy.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.7.3', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.8.1-rc1', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik-proxy:latest
|
||||
docker tag beryju/authentik-proxy:latest beryju/authentik-proxy:stable
|
||||
@ -117,14 +117,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-ldap:2021.7.3,
|
||||
beryju/authentik-ldap:2021.8.1-rc1,
|
||||
beryju/authentik-ldap:latest,
|
||||
ghcr.io/goauthentik/ldap:2021.7.3,
|
||||
ghcr.io/goauthentik/ldap:2021.8.1-rc1,
|
||||
ghcr.io/goauthentik/ldap:latest
|
||||
file: ldap.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.7.3', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.8.1-rc1', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik-ldap:latest
|
||||
docker tag beryju/authentik-ldap:latest beryju/authentik-ldap:stable
|
||||
@ -157,7 +157,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2.3.0
|
||||
uses: actions/setup-node@v2.4.0
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: Build web api client and web ui
|
||||
@ -176,7 +176,7 @@ jobs:
|
||||
SENTRY_PROJECT: authentik
|
||||
SENTRY_URL: https://sentry.beryju.org
|
||||
with:
|
||||
version: authentik@2021.7.3
|
||||
version: authentik@2021.8.1-rc1
|
||||
environment: beryjuorg-prod
|
||||
sourcemaps: './web/dist'
|
||||
url_prefix: '~/static/dist'
|
||||
|
2
.github/workflows/tag.yml
vendored
2
.github/workflows/tag.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
||||
docker-compose run -u root server test
|
||||
- name: Extract version number
|
||||
id: get_version
|
||||
uses: actions/github-script@v4.0.2
|
||||
uses: actions/github-script@v4.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
@ -54,7 +54,7 @@ ENV NODE_ENV=production
|
||||
RUN cd /static && npm i && npm run build
|
||||
|
||||
# Stage 5: Build go proxy
|
||||
FROM golang:1.16.6 AS builder
|
||||
FROM golang:1.17.0 AS builder
|
||||
|
||||
WORKDIR /work
|
||||
|
||||
|
3
Makefile
3
Makefile
@ -43,7 +43,8 @@ gen-web:
|
||||
-g typescript-fetch \
|
||||
-o /local/web/api \
|
||||
--additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=authentik-api,npmVersion=1.0.0
|
||||
cd web/api && npx tsc
|
||||
# npm i runs tsc as part of the installation process
|
||||
cd web/api && npm i
|
||||
|
||||
gen-outpost:
|
||||
docker run \
|
||||
|
2
Pipfile
2
Pipfile
@ -39,7 +39,7 @@ sentry-sdk = "*"
|
||||
service_identity = "*"
|
||||
structlog = "*"
|
||||
swagger-spec-validator = "*"
|
||||
twisted = "==20.3.0"
|
||||
twisted = "==21.7.0"
|
||||
urllib3 = {extras = ["secure"],version = "*"}
|
||||
uvicorn = {extras = ["standard"],version = "*"}
|
||||
webauthn = "*"
|
||||
|
233
Pipfile.lock
generated
233
Pipfile.lock
generated
@ -1,7 +1,7 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "e4f2e57bd5c709809515ab2b95eb3f5fa337d4a9334f4110a24bf28c3f9d5f8f"
|
||||
"sha256": "f0befa9b3dacc1c3363b9442fa7a43f6be2c46a8fcb80a994230d288a384e54d"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
@ -122,19 +122,19 @@
|
||||
},
|
||||
"boto3": {
|
||||
"hashes": [
|
||||
"sha256:a012570d3535ec6c4db97e60ef51c2f39f38246429e1455cecc26c633ed81c10",
|
||||
"sha256:c7f45b0417395d3020c98cdc10f942939883018210e29dbfe6fbfc0a74e503ec"
|
||||
"sha256:057196ac15de4de2221a24a3a0a41692414fa1dd697994d062ebd447163265e7",
|
||||
"sha256:852e776cea4287f74edcb45564f8345fb6b0168dde0fd5bf46668b94c3f21177"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.18.7"
|
||||
"version": "==1.18.25"
|
||||
},
|
||||
"botocore": {
|
||||
"hashes": [
|
||||
"sha256:34c8b151a25616ed7791218f6d7780c3a97725fe3ceeaa28085b345a8513af6e",
|
||||
"sha256:dcf399d21170bb899e00d2a693bddcc79e61471fbfead8500a65578700a3190a"
|
||||
"sha256:201e10d3b1b40d65b7c9214be7087d78ed65de00e7362bd1e020741301d09fbc",
|
||||
"sha256:b9820ee29d70059c9b0e2a69ec13ebf80f4a0bc85f47578f17e951438c506b2d"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.21.7"
|
||||
"version": "==1.21.25"
|
||||
},
|
||||
"cachetools": {
|
||||
"hashes": [
|
||||
@ -254,11 +254,11 @@
|
||||
},
|
||||
"charset-normalizer": {
|
||||
"hashes": [
|
||||
"sha256:88fce3fa5b1a84fdcb3f603d889f723d1dd89b26059d0123ca435570e848d5e1",
|
||||
"sha256:c46c3ace2d744cfbdebceaa3c19ae691f53ae621b39fd7570f59d14fb7f2fd12"
|
||||
"sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b",
|
||||
"sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"
|
||||
],
|
||||
"markers": "python_version >= '3'",
|
||||
"version": "==2.0.3"
|
||||
"version": "==2.0.4"
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
@ -314,6 +314,8 @@
|
||||
"sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1",
|
||||
"sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177",
|
||||
"sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250",
|
||||
"sha256:b01fd6f2737816cb1e08ed4807ae194404790eac7ad030b34f2ce72b332f5586",
|
||||
"sha256:bf40af59ca2465b24e54f671b2de2c59257ddc4f7e5706dbd6930e26823668d3",
|
||||
"sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca",
|
||||
"sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d",
|
||||
"sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"
|
||||
@ -354,11 +356,11 @@
|
||||
},
|
||||
"django": {
|
||||
"hashes": [
|
||||
"sha256:3da05fea54fdec2315b54a563d5b59f3b4e2b1e69c3a5841dda35019c01855cd",
|
||||
"sha256:c58b5f19c5ae0afe6d75cbdd7df561e6eb929339985dbbda2565e1cabb19a62e"
|
||||
"sha256:7f92413529aa0e291f3be78ab19be31aefb1e1c9a52cd59e130f505f27a51f13",
|
||||
"sha256:f27f8544c9d4c383bbe007c57e3235918e258364577373d4920e9162837be022"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.2.5"
|
||||
"version": "==3.2.6"
|
||||
},
|
||||
"django-dbbackup": {
|
||||
"git": "https://github.com/django-dbbackup/django-dbbackup.git",
|
||||
@ -485,11 +487,11 @@
|
||||
},
|
||||
"google-auth": {
|
||||
"hashes": [
|
||||
"sha256:036dd68c1e8baa422b6b61619b8e02793da2e20f55e69514612de6c080468755",
|
||||
"sha256:7665c04f2df13cc938dc7d9066cddb1f8af62b038bc8b2306848c1b23121865f"
|
||||
"sha256:c012c8be7c442c8309ca8fa0876fef33f5fd977c467be1e1c1c2f721e8ebd73c",
|
||||
"sha256:ea1af050b3e06eb73e4470f704d23007307bc0e87c13e015f6b90460f1407bd3"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
||||
"version": "==1.33.1"
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2.0.1"
|
||||
},
|
||||
"gunicorn": {
|
||||
"hashes": [
|
||||
@ -628,11 +630,11 @@
|
||||
},
|
||||
"kubernetes": {
|
||||
"hashes": [
|
||||
"sha256:225a95a0aadbd5b645ab389d941a7980db8cdad2a776fde64d1b43fc3299bde9",
|
||||
"sha256:c69b318696ba797dcf63eb928a8d4370c52319f4140023c502d7dfdf2080eb79"
|
||||
"sha256:0c72d00e7883375bd39ae99758425f5e6cb86388417cf7cc84305c211b2192cf",
|
||||
"sha256:ff31ec17437293e7d4e1459f1228c42d27c7724dfb56b4868aba7a901a5b72c9"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==17.17.0"
|
||||
"version": "==18.20.0"
|
||||
},
|
||||
"ldap3": {
|
||||
"hashes": [
|
||||
@ -666,6 +668,7 @@
|
||||
"sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83",
|
||||
"sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04",
|
||||
"sha256:5c8c163396cc0df3fd151b927e74f6e4acd67160d6c33304e805b84293351d16",
|
||||
"sha256:64812391546a18896adaa86c77c59a4998f33c24788cadc35789e55b727a37f4",
|
||||
"sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791",
|
||||
"sha256:6f12e1427285008fd32a6025e38e977d44d6382cf28e7201ed10d6c1698d2a9a",
|
||||
"sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51",
|
||||
@ -680,6 +683,7 @@
|
||||
"sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa",
|
||||
"sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106",
|
||||
"sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d",
|
||||
"sha256:c1a40c06fd5ba37ad39caa0b3144eb3772e813b5fb5b084198a985431c2f1e8d",
|
||||
"sha256:c47ff7e0a36d4efac9fd692cfa33fbd0636674c102e9e8d9b26e1b93a94e7617",
|
||||
"sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4",
|
||||
"sha256:cdaf11d2bd275bf391b5308f86731e5194a21af45fbaaaf1d9e8147b9160ea92",
|
||||
@ -927,14 +931,6 @@
|
||||
"index": "pypi",
|
||||
"version": "==3.10.1"
|
||||
},
|
||||
"pyhamcrest": {
|
||||
"hashes": [
|
||||
"sha256:412e00137858f04bde0729913874a48485665f2d36fe9ee449f26be864af9316",
|
||||
"sha256:7ead136e03655af85069b6f47b23eb7c3e5c221aa9f022a4fbb499f5b7308f29"
|
||||
],
|
||||
"markers": "python_version >= '3.5'",
|
||||
"version": "==2.0.2"
|
||||
},
|
||||
"pyjwt": {
|
||||
"hashes": [
|
||||
"sha256:934d73fbba91b0483d3857d1aff50e96b2a892384ee2c17417ed3203f173fca1",
|
||||
@ -1072,7 +1068,7 @@
|
||||
"sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2",
|
||||
"sha256:9d689e6ca1b3038bc82bf8d23e944b6b6037bc02301a574935b2dd946e0353b9"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"markers": "python_version >= '3.5' and python_version < '4'",
|
||||
"version": "==4.7.2"
|
||||
},
|
||||
"s3transfer": {
|
||||
@ -1085,11 +1081,11 @@
|
||||
},
|
||||
"sentry-sdk": {
|
||||
"hashes": [
|
||||
"sha256:5210a712dd57d88d225c1fc3fe3a3626fee493637bcd54e204826cf04b8d769c",
|
||||
"sha256:6864dcb6f7dec692635e5518c2a5c80010adf673c70340817f1a1b713d65bb41"
|
||||
"sha256:ebe99144fa9618d4b0e7617e7929b75acd905d258c3c779edcd34c0adfffe26c",
|
||||
"sha256:f33d34c886d0ba24c75ea8885a8b3a172358853c7cbde05979fc99c29ef7bc52"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.3.0"
|
||||
"version": "==1.3.1"
|
||||
},
|
||||
"service-identity": {
|
||||
"hashes": [
|
||||
@ -1136,32 +1132,11 @@
|
||||
"tls"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:040eb6641125d2a9a09cf198ec7b83dd8858c6f51f6770325ed9959c00f5098f",
|
||||
"sha256:147780b8caf21ba2aef3688628eaf13d7e7fe02a86747cd54bfaf2140538f042",
|
||||
"sha256:158ddb80719a4813d292293ac44ba41d8b56555ed009d90994a278237ee63d2c",
|
||||
"sha256:2182000d6ffc05d269e6c03bfcec8b57e20259ca1086180edaedec3f1e689292",
|
||||
"sha256:25ffcf37944bdad4a99981bc74006d735a678d2b5c193781254fbbb6d69e3b22",
|
||||
"sha256:3281d9ce889f7b21bdb73658e887141aa45a102baf3b2320eafcfba954fcefec",
|
||||
"sha256:356e8d8dd3590e790e3dba4db139eb8a17aca64b46629c622e1b1597a4a92478",
|
||||
"sha256:70952c56e4965b9f53b180daecf20a9595cf22b8d0935cd3bd664c90273c3ab2",
|
||||
"sha256:7408c6635ee1b96587289283ebe90ee15dbf9614b05857b446055116bc822d29",
|
||||
"sha256:7c547fd0215db9da8a1bc23182b309e84a232364cc26d829e9ee196ce840b114",
|
||||
"sha256:894f6f3cfa57a15ea0d0714e4283913a5f2511dbd18653dd148eba53b3919797",
|
||||
"sha256:94ac3d55a58c90e2075c5fe1853f2aa3892b73e3bf56395f743aefde8605eeaa",
|
||||
"sha256:a58e61a2a01e5bcbe3b575c0099a2bcb8d70a75b1a087338e0c48dd6e01a5f15",
|
||||
"sha256:c09c47ff9750a8e3aa60ad169c4b95006d455a29b80ad0901f031a103b2991cd",
|
||||
"sha256:ca3a0b8c9110800e576d89b5337373e52018b41069bc879f12fa42b7eb2d0274",
|
||||
"sha256:cd1dc5c85b58494138a3917752b54bb1daa0045d234b7c132c37a61d5483ebad",
|
||||
"sha256:cdbc4c7f0cd7a2218b575844e970f05a1be1861c607b0e048c9bceca0c4d42f7",
|
||||
"sha256:d267125cc0f1e8a0eed6319ba4ac7477da9b78a535601c49ecd20c875576433a",
|
||||
"sha256:d72c55b5d56e176563b91d11952d13b01af8725c623e498db5507b6614fc1e10",
|
||||
"sha256:d95803193561a243cb0401b0567c6b7987d3f2a67046770e1dccd1c9e49a9780",
|
||||
"sha256:e92703bed0cc21d6cb5c61d66922b3b1564015ca8a51325bd164a5e33798d504",
|
||||
"sha256:f058bd0168271de4dcdc39845b52dd0a4a2fecf5f1246335f13f5e96eaebb467",
|
||||
"sha256:f3c19e5bd42bbe4bf345704ad7c326c74d3fd7a1b3844987853bef180be638d4"
|
||||
"sha256:13c1d1d2421ae556d91e81e66cf0d4f4e4e1e4a36a0486933bee4305c6a4fb9b",
|
||||
"sha256:2cd652542463277378b0d349f47c62f20d9306e57d1247baabd6d1d38a109006"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==20.3.0"
|
||||
"version": "==21.7.0"
|
||||
},
|
||||
"txaio": {
|
||||
"hashes": [
|
||||
@ -1211,26 +1186,32 @@
|
||||
"standard"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:2a76bb359171a504b3d1c853409af3adbfa5cef374a4a59e5881945a97a93eae",
|
||||
"sha256:45ad7dfaaa7d55cab4cd1e85e03f27e9d60bc067ddc59db52a2b0aeca8870292"
|
||||
"sha256:17f898c64c71a2640514d4089da2689e5db1ce5d4086c2d53699bf99513421c1",
|
||||
"sha256:d9a3c0dd1ca86728d3e235182683b4cf94cd53a867c288eaeca80ee781b2caff"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.14.0"
|
||||
"version": "==0.15.0"
|
||||
},
|
||||
"uvloop": {
|
||||
"hashes": [
|
||||
"sha256:0de811931e90ae2da9e19ce70ffad73047ab0c1dba7c6e74f9ae1a3aabeb89bd",
|
||||
"sha256:1ff05116ede1ebdd81802df339e5b1d4cab1dfbd99295bf27e90b4cec64d70e9",
|
||||
"sha256:2d8ffe44ae709f839c54bacf14ed283f41bee90430c3b398e521e10f8d117b3a",
|
||||
"sha256:5cda65fc60a645470b8525ce014516b120b7057b576fa876cdfdd5e60ab1efbb",
|
||||
"sha256:63a3288abbc9c8ee979d7e34c34e780b2fbab3e7e53d00b6c80271119f277399",
|
||||
"sha256:7522df4e45e4f25b50adbbbeb5bb9847495c438a628177099d2721f2751ff825",
|
||||
"sha256:7f4b8a905df909a407c5791fb582f6c03b0d3b491ecdc1cdceaefbc9bf9e08f6",
|
||||
"sha256:905f0adb0c09c9f44222ee02f6b96fd88b493478fffb7a345287f9444e926030",
|
||||
"sha256:ae2b325c0f6d748027f7463077e457006b4fdb35a8788f01754aadba825285ee",
|
||||
"sha256:e71fb9038bfcd7646ca126c5ef19b17e48d4af9e838b2bcfda7a9f55a6552a32"
|
||||
"sha256:04ff57aa137230d8cc968f03481176041ae789308b4d5079118331ab01112450",
|
||||
"sha256:089b4834fd299d82d83a25e3335372f12117a7d38525217c2258e9b9f4578897",
|
||||
"sha256:1e5f2e2ff51aefe6c19ee98af12b4ae61f5be456cd24396953244a30880ad861",
|
||||
"sha256:30ba9dcbd0965f5c812b7c2112a1ddf60cf904c1c160f398e7eed3a6b82dcd9c",
|
||||
"sha256:3a19828c4f15687675ea912cc28bbcb48e9bb907c801873bd1519b96b04fb805",
|
||||
"sha256:6224f1401025b748ffecb7a6e2652b17768f30b1a6a3f7b44660e5b5b690b12d",
|
||||
"sha256:647e481940379eebd314c00440314c81ea547aa636056f554d491e40503c8464",
|
||||
"sha256:6ccd57ae8db17d677e9e06192e9c9ec4bd2066b77790f9aa7dede2cc4008ee8f",
|
||||
"sha256:772206116b9b57cd625c8a88f2413df2fcfd0b496eb188b82a43bed7af2c2ec9",
|
||||
"sha256:8e0d26fa5875d43ddbb0d9d79a447d2ace4180d9e3239788208527c4784f7cab",
|
||||
"sha256:98d117332cc9e5ea8dfdc2b28b0a23f60370d02e1395f88f40d1effd2cb86c4f",
|
||||
"sha256:b572256409f194521a9895aef274cea88731d14732343da3ecdb175228881638",
|
||||
"sha256:bd53f7f5db562f37cd64a3af5012df8cac2c464c97e732ed556800129505bd64",
|
||||
"sha256:bd8f42ea1ea8f4e84d265769089964ddda95eb2bb38b5cbe26712b0616c3edee",
|
||||
"sha256:e814ac2c6f9daf4c36eb8e85266859f42174a4ff0d71b99405ed559257750382",
|
||||
"sha256:f74bc20c7b67d1c27c72601c78cf95be99d5c2cdd4514502b4f3eb0933ff1228"
|
||||
],
|
||||
"version": "==0.15.3"
|
||||
"version": "==0.16.0"
|
||||
},
|
||||
"vine": {
|
||||
"hashes": [
|
||||
@ -1264,11 +1245,11 @@
|
||||
},
|
||||
"websocket-client": {
|
||||
"hashes": [
|
||||
"sha256:b68e4959d704768fa20e35c9d508c8dc2bbc041fd8d267c0d7345cffe2824568",
|
||||
"sha256:e5c333bfa9fa739538b652b6f8c8fc2559f1d364243c8a689d7c0e1d41c2e611"
|
||||
"sha256:0133d2f784858e59959ce82ddac316634229da55b498aac311f1620567a710ec",
|
||||
"sha256:8dfb715d8a992f5712fff8c843adae94e22b22a99b2c5e6b0ec4a1a981cc4e0d"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.1.0"
|
||||
"version": "==1.2.1"
|
||||
},
|
||||
"websockets": {
|
||||
"hashes": [
|
||||
@ -1436,11 +1417,11 @@
|
||||
},
|
||||
"astroid": {
|
||||
"hashes": [
|
||||
"sha256:7b963d1c590d490f60d2973e57437115978d3a2529843f160b5003b721e1e925",
|
||||
"sha256:83e494b02d75d07d4e347b27c066fd791c0c74fc96c613d1ea3de0c82c48168f"
|
||||
"sha256:3975a0bd5373bdce166e60c851cfcbaf21ee96de80ec518c1f4cb3e94c3fb334",
|
||||
"sha256:ab7f36e8a78b8e54a62028ba6beef7561db4cdb6f2a5009ecc44a6f42b5697ef"
|
||||
],
|
||||
"markers": "python_version ~= '3.6'",
|
||||
"version": "==2.6.5"
|
||||
"version": "==2.6.6"
|
||||
},
|
||||
"attrs": {
|
||||
"hashes": [
|
||||
@ -1483,11 +1464,11 @@
|
||||
},
|
||||
"charset-normalizer": {
|
||||
"hashes": [
|
||||
"sha256:88fce3fa5b1a84fdcb3f603d889f723d1dd89b26059d0123ca435570e848d5e1",
|
||||
"sha256:c46c3ace2d744cfbdebceaa3c19ae691f53ae621b39fd7570f59d14fb7f2fd12"
|
||||
"sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b",
|
||||
"sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"
|
||||
],
|
||||
"markers": "python_version >= '3'",
|
||||
"version": "==2.0.3"
|
||||
"version": "==2.0.4"
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
@ -1595,11 +1576,11 @@
|
||||
},
|
||||
"isort": {
|
||||
"hashes": [
|
||||
"sha256:eed17b53c3e7912425579853d078a0832820f023191561fcee9d7cae424e0813",
|
||||
"sha256:f65ce5bd4cbc6abdfbe29afc2f0245538ab358c14590912df638033f157d555e"
|
||||
"sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899",
|
||||
"sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"
|
||||
],
|
||||
"markers": "python_version < '4' and python_full_version >= '3.6.1'",
|
||||
"version": "==5.9.2"
|
||||
"version": "==5.9.3"
|
||||
},
|
||||
"lazy-object-proxy": {
|
||||
"hashes": [
|
||||
@ -1684,11 +1665,11 @@
|
||||
},
|
||||
"pylint": {
|
||||
"hashes": [
|
||||
"sha256:1f333dc72ef7f5ea166b3230936ebcfb1f3b722e76c980cb9fe6b9f95e8d3172",
|
||||
"sha256:748f81e5776d6273a6619506e08f1b48ff9bcb8198366a56821cf11aac14fc87"
|
||||
"sha256:2e1a0eb2e8ab41d6b5dbada87f066492bb1557b12b76c47c2ee8aa8a11186594",
|
||||
"sha256:8b838c8983ee1904b2de66cce9d0b96649a91901350e956d78f289c3bc87b48e"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.9.5"
|
||||
"version": "==2.9.6"
|
||||
},
|
||||
"pylint-django": {
|
||||
"hashes": [
|
||||
@ -1766,49 +1747,41 @@
|
||||
},
|
||||
"regex": {
|
||||
"hashes": [
|
||||
"sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f",
|
||||
"sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad",
|
||||
"sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a",
|
||||
"sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf",
|
||||
"sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59",
|
||||
"sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d",
|
||||
"sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895",
|
||||
"sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4",
|
||||
"sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3",
|
||||
"sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222",
|
||||
"sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0",
|
||||
"sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c",
|
||||
"sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417",
|
||||
"sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d",
|
||||
"sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d",
|
||||
"sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761",
|
||||
"sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0",
|
||||
"sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026",
|
||||
"sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854",
|
||||
"sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb",
|
||||
"sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d",
|
||||
"sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068",
|
||||
"sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde",
|
||||
"sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d",
|
||||
"sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec",
|
||||
"sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa",
|
||||
"sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd",
|
||||
"sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b",
|
||||
"sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26",
|
||||
"sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2",
|
||||
"sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f",
|
||||
"sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694",
|
||||
"sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0",
|
||||
"sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407",
|
||||
"sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874",
|
||||
"sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035",
|
||||
"sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d",
|
||||
"sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c",
|
||||
"sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5",
|
||||
"sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985",
|
||||
"sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58"
|
||||
"sha256:026beb631097a4a3def7299aa5825e05e057de3c6d72b139c37813bfa351274b",
|
||||
"sha256:14caacd1853e40103f59571f169704367e79fb78fac3d6d09ac84d9197cadd16",
|
||||
"sha256:16d9eaa8c7e91537516c20da37db975f09ac2e7772a0694b245076c6d68f85da",
|
||||
"sha256:18fdc51458abc0a974822333bd3a932d4e06ba2a3243e9a1da305668bd62ec6d",
|
||||
"sha256:28e8af338240b6f39713a34e337c3813047896ace09d51593d6907c66c0708ba",
|
||||
"sha256:3835de96524a7b6869a6c710b26c90e94558c31006e96ca3cf6af6751b27dca1",
|
||||
"sha256:3905c86cc4ab6d71635d6419a6f8d972cab7c634539bba6053c47354fd04452c",
|
||||
"sha256:3c09d88a07483231119f5017904db8f60ad67906efac3f1baa31b9b7f7cca281",
|
||||
"sha256:4551728b767f35f86b8e5ec19a363df87450c7376d7419c3cac5b9ceb4bce576",
|
||||
"sha256:459bbe342c5b2dec5c5223e7c363f291558bc27982ef39ffd6569e8c082bdc83",
|
||||
"sha256:4f421e3cdd3a273bace013751c345f4ebeef08f05e8c10757533ada360b51a39",
|
||||
"sha256:577737ec3d4c195c4aef01b757905779a9e9aee608fa1cf0aec16b5576c893d3",
|
||||
"sha256:57fece29f7cc55d882fe282d9de52f2f522bb85290555b49394102f3621751ee",
|
||||
"sha256:7976d410e42be9ae7458c1816a416218364e06e162b82e42f7060737e711d9ce",
|
||||
"sha256:85f568892422a0e96235eb8ea6c5a41c8ccbf55576a2260c0160800dbd7c4f20",
|
||||
"sha256:8764a78c5464ac6bde91a8c87dd718c27c1cabb7ed2b4beaf36d3e8e390567f9",
|
||||
"sha256:8935937dad2c9b369c3d932b0edbc52a62647c2afb2fafc0c280f14a8bf56a6a",
|
||||
"sha256:8fe58d9f6e3d1abf690174fd75800fda9bdc23d2a287e77758dc0e8567e38ce6",
|
||||
"sha256:937b20955806381e08e54bd9d71f83276d1f883264808521b70b33d98e4dec5d",
|
||||
"sha256:9569da9e78f0947b249370cb8fadf1015a193c359e7e442ac9ecc585d937f08d",
|
||||
"sha256:a3b73390511edd2db2d34ff09aa0b2c08be974c71b4c0505b4a048d5dc128c2b",
|
||||
"sha256:a4eddbe2a715b2dd3849afbdeacf1cc283160b24e09baf64fa5675f51940419d",
|
||||
"sha256:a5c6dbe09aff091adfa8c7cfc1a0e83fdb8021ddb2c183512775a14f1435fe16",
|
||||
"sha256:b63e3571b24a7959017573b6455e05b675050bbbea69408f35f3cb984ec54363",
|
||||
"sha256:bb350eb1060591d8e89d6bac4713d41006cd4d479f5e11db334a48ff8999512f",
|
||||
"sha256:bf6d987edd4a44dd2fa2723fca2790f9442ae4de2c8438e53fcb1befdf5d823a",
|
||||
"sha256:bfa6a679410b394600eafd16336b2ce8de43e9b13f7fb9247d84ef5ad2b45e91",
|
||||
"sha256:c856ec9b42e5af4fe2d8e75970fcc3a2c15925cbcc6e7a9bcb44583b10b95e80",
|
||||
"sha256:cea56288eeda8b7511d507bbe7790d89ae7049daa5f51ae31a35ae3c05408531",
|
||||
"sha256:ea212df6e5d3f60341aef46401d32fcfded85593af1d82b8b4a7a68cd67fdd6b",
|
||||
"sha256:f35567470ee6dbfb946f069ed5f5615b40edcbb5f1e6e1d3d2b114468d505fc6",
|
||||
"sha256:fbc20975eee093efa2071de80df7f972b7b35e560b213aafabcec7c0bd00bd8c",
|
||||
"sha256:ff4a8ad9638b7ca52313d8732f37ecd5fd3c8e3aff10a8ccb93176fd5b3812f6"
|
||||
],
|
||||
"version": "==2021.7.6"
|
||||
"version": "==2021.8.3"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
@ -1852,11 +1825,11 @@
|
||||
},
|
||||
"stevedore": {
|
||||
"hashes": [
|
||||
"sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee",
|
||||
"sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"
|
||||
"sha256:59b58edb7f57b11897f150475e7bc0c39c5381f0b8e3fa9f5c20ce6c89ec4aa1",
|
||||
"sha256:920ce6259f0b2498aaa4545989536a27e4e4607b8318802d7ddc3a533d3d069e"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.3.0"
|
||||
"version": "==3.4.0"
|
||||
},
|
||||
"toml": {
|
||||
"hashes": [
|
||||
|
16
README.md
16
README.md
@ -4,13 +4,13 @@
|
||||
|
||||
---
|
||||
|
||||
[](https://discord.gg/jg33eMhnj6)
|
||||
[](https://dev.azure.com/beryjuorg/authentik/_build?definitionId=6)
|
||||
[](https://dev.azure.com/beryjuorg/authentik/_build?definitionId=6)
|
||||
[](https://codecov.io/gh/goauthentik/authentik)
|
||||

|
||||

|
||||

|
||||
[](https://discord.gg/jg33eMhnj6)
|
||||
[](https://dev.azure.com/beryjuorg/authentik/_build?definitionId=6)
|
||||
[](https://dev.azure.com/beryjuorg/authentik/_build?definitionId=6)
|
||||
[](https://codecov.io/gh/goauthentik/authentik)
|
||||

|
||||

|
||||

|
||||
[Transifex](https://www.transifex.com/beryjuorg/authentik/)
|
||||
|
||||
## What is authentik?
|
||||
@ -21,7 +21,7 @@ authentik is an open-source Identity Provider focused on flexibility and versati
|
||||
|
||||
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/)
|
||||
|
||||
For bigger setups, there is a Helm Chart [here])(https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/)
|
||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/)
|
||||
|
||||
## Screenshots
|
||||
|
||||
|
@ -1,3 +1,3 @@
|
||||
"""authentik"""
|
||||
__version__ = "2021.7.3"
|
||||
__version__ = "2021.8.1-rc1"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
@ -23,9 +23,7 @@ def get_events_per_1h(**filter_kwargs) -> list[dict[str, int]]:
|
||||
date_from = now() - timedelta(days=1)
|
||||
result = (
|
||||
Event.objects.filter(created__gte=date_from, **filter_kwargs)
|
||||
.annotate(
|
||||
age=ExpressionWrapper(now() - F("created"), output_field=DurationField())
|
||||
)
|
||||
.annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField()))
|
||||
.annotate(age_hours=ExtractHour("age"))
|
||||
.values("age_hours")
|
||||
.annotate(count=Count("pk"))
|
||||
@ -37,8 +35,7 @@ def get_events_per_1h(**filter_kwargs) -> list[dict[str, int]]:
|
||||
for hour in range(0, -24, -1):
|
||||
results.append(
|
||||
{
|
||||
"x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple())
|
||||
* 1000,
|
||||
"x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple()) * 1000,
|
||||
"y_cord": data[hour * -1],
|
||||
}
|
||||
)
|
||||
|
@ -16,6 +16,8 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.outposts.managed import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
|
||||
|
||||
class RuntimeDict(TypedDict):
|
||||
@ -38,6 +40,7 @@ class SystemSerializer(PassiveSerializer):
|
||||
runtime = SerializerMethodField()
|
||||
tenant = SerializerMethodField()
|
||||
server_time = SerializerMethodField()
|
||||
embedded_outpost_host = SerializerMethodField()
|
||||
|
||||
def get_http_headers(self, request: Request) -> dict[str, str]:
|
||||
"""Get HTTP Request headers"""
|
||||
@ -61,9 +64,7 @@ class SystemSerializer(PassiveSerializer):
|
||||
return {
|
||||
"python_version": python_version,
|
||||
"gunicorn_version": ".".join(str(x) for x in gunicorn_version),
|
||||
"environment": "kubernetes"
|
||||
if SERVICE_HOST_ENV_NAME in os.environ
|
||||
else "compose",
|
||||
"environment": "kubernetes" if SERVICE_HOST_ENV_NAME in os.environ else "compose",
|
||||
"architecture": platform.machine(),
|
||||
"platform": platform.platform(),
|
||||
"uname": " ".join(platform.uname()),
|
||||
@ -77,6 +78,13 @@ class SystemSerializer(PassiveSerializer):
|
||||
"""Current server time"""
|
||||
return now()
|
||||
|
||||
def get_embedded_outpost_host(self, request: Request) -> str:
|
||||
"""Get the FQDN configured on the embeddded outpost"""
|
||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||
if not outposts.exists():
|
||||
return ""
|
||||
return outposts.first().config.authentik_host
|
||||
|
||||
|
||||
class SystemView(APIView):
|
||||
"""Get system information."""
|
||||
|
@ -92,10 +92,7 @@ class TaskViewSet(ViewSet):
|
||||
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
||||
messages.success(
|
||||
self.request,
|
||||
_(
|
||||
"Successfully re-scheduled Task %(name)s!"
|
||||
% {"name": task.task_name}
|
||||
),
|
||||
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
|
||||
)
|
||||
return Response(status=204)
|
||||
except ImportError: # pragma: no cover
|
||||
|
@ -41,9 +41,7 @@ class VersionSerializer(PassiveSerializer):
|
||||
|
||||
def get_outdated(self, instance) -> bool:
|
||||
"""Check if we're running the latest version"""
|
||||
return parse(self.get_version_current(instance)) < parse(
|
||||
self.get_version_latest(instance)
|
||||
)
|
||||
return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance))
|
||||
|
||||
|
||||
class VersionView(APIView):
|
||||
|
@ -17,9 +17,7 @@ class WorkerView(APIView):
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@extend_schema(
|
||||
responses=inline_serializer("Workers", fields={"count": IntegerField()})
|
||||
)
|
||||
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Get currently connected worker count."""
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
|
@ -37,18 +37,14 @@ def _set_prom_info():
|
||||
def update_latest_version(self: MonitoredTask):
|
||||
"""Update latest version info"""
|
||||
try:
|
||||
response = get(
|
||||
"https://api.github.com/repos/goauthentik/authentik/releases/latest"
|
||||
)
|
||||
response = get("https://api.github.com/repos/goauthentik/authentik/releases/latest")
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
tag_name = data.get("tag_name")
|
||||
upstream_version = tag_name.split("/")[1]
|
||||
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(
|
||||
TaskResult(
|
||||
TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"]
|
||||
)
|
||||
TaskResult(TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"])
|
||||
)
|
||||
_set_prom_info()
|
||||
# Check if upstream version is newer than what we're running,
|
||||
|
@ -27,9 +27,7 @@ class TestAdminAPI(TestCase):
|
||||
response = self.client.get(reverse("authentik_api:admin_system_tasks-list"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content)
|
||||
self.assertTrue(
|
||||
any(task["task_name"] == "clean_expired_models" for task in body)
|
||||
)
|
||||
self.assertTrue(any(task["task_name"] == "clean_expired_models" for task in body))
|
||||
|
||||
def test_tasks_single(self):
|
||||
"""Test Task API (read single)"""
|
||||
@ -45,9 +43,7 @@ class TestAdminAPI(TestCase):
|
||||
self.assertEqual(body["status"], TaskResultStatus.SUCCESSFUL.name)
|
||||
self.assertEqual(body["task_name"], "clean_expired_models")
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:admin_system_tasks-detail", kwargs={"pk": "qwerqwer"}
|
||||
)
|
||||
reverse("authentik_api:admin_system_tasks-detail", kwargs={"pk": "qwerqwer"})
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
|
@ -3,18 +3,20 @@ from base64 import b64decode
|
||||
from binascii import Error
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from rest_framework.request import Request
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
from authentik.outposts.models import Outpost
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
# pylint: disable=too-many-return-statements
|
||||
def token_from_header(raw_header: bytes) -> Optional[Token]:
|
||||
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
"""raw_header in the Format of `Bearer dGVzdDp0ZXN0`"""
|
||||
auth_credentials = raw_header.decode()
|
||||
if auth_credentials == "" or " " not in auth_credentials:
|
||||
@ -38,8 +40,26 @@ def token_from_header(raw_header: bytes) -> Optional[Token]:
|
||||
raise AuthenticationFailed("Malformed header")
|
||||
tokens = Token.filter_not_expired(key=password, intent=TokenIntents.INTENT_API)
|
||||
if not tokens.exists():
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
return tokens.first()
|
||||
LOGGER.info("Authenticating via secret_key")
|
||||
user = token_secret_key(password)
|
||||
if not user:
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
return user
|
||||
return tokens.first().user
|
||||
|
||||
|
||||
def token_secret_key(value: str) -> Optional[User]:
|
||||
"""Check if the token is the secret key
|
||||
and return the service account for the managed outpost"""
|
||||
from authentik.outposts.managed import MANAGED_OUTPOST
|
||||
|
||||
if value != settings.SECRET_KEY:
|
||||
return None
|
||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||
if not outposts:
|
||||
return None
|
||||
outpost = outposts.first()
|
||||
return outpost.user
|
||||
|
||||
|
||||
class TokenAuthentication(BaseAuthentication):
|
||||
@ -49,9 +69,9 @@ class TokenAuthentication(BaseAuthentication):
|
||||
"""Token-based authentication using HTTP Bearer authentication"""
|
||||
auth = get_authorization_header(request)
|
||||
|
||||
token = token_from_header(auth)
|
||||
user = bearer_auth(auth)
|
||||
# None is only returned when the header isn't set.
|
||||
if not token:
|
||||
if not user:
|
||||
return None
|
||||
|
||||
return (token.user, None) # pragma: no cover
|
||||
return (user, None) # pragma: no cover
|
||||
|
@ -7,9 +7,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
|
||||
def permission_required(
|
||||
perm: Optional[str] = None, other_perms: Optional[list[str]] = None
|
||||
):
|
||||
def permission_required(perm: Optional[str] = None, other_perms: Optional[list[str]] = None):
|
||||
"""Check permissions for a single custom action"""
|
||||
|
||||
def wrapper_outter(func: Callable):
|
||||
|
@ -63,9 +63,7 @@ def postprocess_schema_responses(result, generator, **kwargs): # noqa: W0613
|
||||
method["responses"].setdefault("400", validation_error.ref)
|
||||
method["responses"].setdefault("403", generic_error.ref)
|
||||
|
||||
result["components"] = generator.registry.build(
|
||||
spectacular_settings.APPEND_COMPONENTS
|
||||
)
|
||||
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
|
||||
|
||||
# This is a workaround for authentik/stages/prompt/stage.py
|
||||
# since the serializer PromptChallengeResponse
|
||||
|
@ -1,12 +1,14 @@
|
||||
"""Test API Authentication"""
|
||||
from base64 import b64encode
|
||||
|
||||
from django.conf import settings
|
||||
from django.test import TestCase
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from authentik.api.authentication import token_from_header
|
||||
from authentik.core.models import Token, TokenIntents
|
||||
from authentik.api.authentication import bearer_auth
|
||||
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
|
||||
from authentik.outposts.managed import OutpostManager
|
||||
|
||||
|
||||
class TestAPIAuth(TestCase):
|
||||
@ -14,36 +16,41 @@ class TestAPIAuth(TestCase):
|
||||
|
||||
def test_valid_basic(self):
|
||||
"""Test valid token"""
|
||||
token = Token.objects.create(
|
||||
intent=TokenIntents.INTENT_API, user=get_anonymous_user()
|
||||
)
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
|
||||
auth = b64encode(f":{token.key}".encode()).decode()
|
||||
self.assertEqual(token_from_header(f"Basic {auth}".encode()), token)
|
||||
self.assertEqual(bearer_auth(f"Basic {auth}".encode()), token.user)
|
||||
|
||||
def test_valid_bearer(self):
|
||||
"""Test valid token"""
|
||||
token = Token.objects.create(
|
||||
intent=TokenIntents.INTENT_API, user=get_anonymous_user()
|
||||
)
|
||||
self.assertEqual(token_from_header(f"Bearer {token.key}".encode()), token)
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
|
||||
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
|
||||
|
||||
def test_invalid_type(self):
|
||||
"""Test invalid type"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
token_from_header("foo bar".encode())
|
||||
bearer_auth("foo bar".encode())
|
||||
|
||||
def test_invalid_decode(self):
|
||||
"""Test invalid bas64"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
token_from_header("Basic bar".encode())
|
||||
bearer_auth("Basic bar".encode())
|
||||
|
||||
def test_invalid_empty_password(self):
|
||||
"""Test invalid with empty password"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
token_from_header("Basic :".encode())
|
||||
bearer_auth("Basic :".encode())
|
||||
|
||||
def test_invalid_no_token(self):
|
||||
"""Test invalid with no token"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
auth = b64encode(":abc".encode()).decode()
|
||||
self.assertIsNone(token_from_header(f"Basic :{auth}".encode()))
|
||||
self.assertIsNone(bearer_auth(f"Basic :{auth}".encode()))
|
||||
|
||||
def test_managed_outpost(self):
|
||||
"""Test managed outpost"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||
|
||||
OutpostManager().run()
|
||||
user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||
self.assertEqual(user.attributes[USER_ATTRIBUTE_SA], True)
|
||||
|
@ -5,7 +5,7 @@ from django.conf import settings
|
||||
from django.db import models
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, ListField
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, IntegerField, ListField
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -33,6 +33,11 @@ class ConfigSerializer(PassiveSerializer):
|
||||
|
||||
capabilities = ListField(child=ChoiceField(choices=Capabilities.choices))
|
||||
|
||||
cache_timeout = IntegerField(required=True)
|
||||
cache_timeout_flows = IntegerField(required=True)
|
||||
cache_timeout_policies = IntegerField(required=True)
|
||||
cache_timeout_reputation = IntegerField(required=True)
|
||||
|
||||
|
||||
class ConfigView(APIView):
|
||||
"""Read-only view set that returns the current session's Configs"""
|
||||
@ -49,7 +54,7 @@ class ConfigView(APIView):
|
||||
caps.append(Capabilities.CAN_GEO_IP)
|
||||
if SERVICE_HOST_ENV_NAME in environ:
|
||||
# Running in k8s, only s3 backup is supported
|
||||
if CONFIG.y_bool("postgresql.s3_backup"):
|
||||
if CONFIG.y("postgresql.s3_backup"):
|
||||
caps.append(Capabilities.CAN_BACKUP)
|
||||
else:
|
||||
# Running in compose, backup is always supported
|
||||
@ -65,6 +70,10 @@ class ConfigView(APIView):
|
||||
"error_reporting_environment": CONFIG.y("error_reporting.environment"),
|
||||
"error_reporting_send_pii": CONFIG.y("error_reporting.send_pii"),
|
||||
"capabilities": self.get_capabilities(),
|
||||
"cache_timeout": int(CONFIG.y("redis.cache_timeout")),
|
||||
"cache_timeout_flows": int(CONFIG.y("redis.cache_timeout_flows")),
|
||||
"cache_timeout_policies": int(CONFIG.y("redis.cache_timeout_policies")),
|
||||
"cache_timeout_reputation": int(CONFIG.y("redis.cache_timeout_reputation")),
|
||||
}
|
||||
)
|
||||
return Response(config.data)
|
||||
|
@ -52,21 +52,14 @@ from authentik.policies.reputation.api import (
|
||||
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
||||
from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet
|
||||
from authentik.providers.oauth2.api.scope import ScopeMappingViewSet
|
||||
from authentik.providers.oauth2.api.tokens import (
|
||||
AuthorizationCodeViewSet,
|
||||
RefreshTokenViewSet,
|
||||
)
|
||||
from authentik.providers.proxy.api import (
|
||||
ProxyOutpostConfigViewSet,
|
||||
ProxyProviderViewSet,
|
||||
)
|
||||
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
|
||||
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
||||
from authentik.providers.saml.api import SAMLPropertyMappingViewSet, SAMLProviderViewSet
|
||||
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
||||
from authentik.sources.oauth.api.source_connection import (
|
||||
UserOAuthSourceConnectionViewSet,
|
||||
)
|
||||
from authentik.sources.plex.api import PlexSourceViewSet
|
||||
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
||||
from authentik.sources.plex.api.source import PlexSourceViewSet
|
||||
from authentik.sources.plex.api.source_connection import PlexSourceConnectionViewSet
|
||||
from authentik.sources.saml.api import SAMLSourceViewSet
|
||||
from authentik.stages.authenticator_duo.api import (
|
||||
AuthenticatorDuoStageViewSet,
|
||||
@ -83,9 +76,7 @@ from authentik.stages.authenticator_totp.api import (
|
||||
TOTPAdminDeviceViewSet,
|
||||
TOTPDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_validate.api import (
|
||||
AuthenticatorValidateStageViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_validate.api import AuthenticatorValidateStageViewSet
|
||||
from authentik.stages.authenticator_webauthn.api import (
|
||||
AuthenticateWebAuthnStageViewSet,
|
||||
WebAuthnAdminDeviceViewSet,
|
||||
@ -122,9 +113,7 @@ router.register("core/tenants", TenantViewSet)
|
||||
router.register("outposts/instances", OutpostViewSet)
|
||||
router.register("outposts/service_connections/all", ServiceConnectionViewSet)
|
||||
router.register("outposts/service_connections/docker", DockerServiceConnectionViewSet)
|
||||
router.register(
|
||||
"outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet
|
||||
)
|
||||
router.register("outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet)
|
||||
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
|
||||
router.register("outposts/ldap", LDAPOutpostConfigViewSet)
|
||||
|
||||
@ -139,7 +128,8 @@ router.register("events/transports", NotificationTransportViewSet)
|
||||
router.register("events/rules", NotificationRuleViewSet)
|
||||
|
||||
router.register("sources/all", SourceViewSet)
|
||||
router.register("sources/oauth_user_connections", UserOAuthSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/plex", PlexSourceConnectionViewSet)
|
||||
router.register("sources/ldap", LDAPSourceViewSet)
|
||||
router.register("sources/saml", SAMLSourceViewSet)
|
||||
router.register("sources/oauth", OAuthSourceViewSet)
|
||||
@ -184,9 +174,7 @@ router.register(
|
||||
StaticAdminDeviceViewSet,
|
||||
basename="admin-staticdevice",
|
||||
)
|
||||
router.register(
|
||||
"authenticators/admin/totp", TOTPAdminDeviceViewSet, basename="admin-totpdevice"
|
||||
)
|
||||
router.register("authenticators/admin/totp", TOTPAdminDeviceViewSet, basename="admin-totpdevice")
|
||||
router.register(
|
||||
"authenticators/admin/webauthn",
|
||||
WebAuthnAdminDeviceViewSet,
|
||||
|
@ -147,9 +147,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Custom list method that checks Policy based access instead of guardian"""
|
||||
should_cache = request.GET.get("search", "") == ""
|
||||
|
||||
superuser_full_list = (
|
||||
str(request.GET.get("superuser_full_list", "false")).lower() == "true"
|
||||
)
|
||||
superuser_full_list = str(request.GET.get("superuser_full_list", "false")).lower() == "true"
|
||||
if superuser_full_list and request.user.is_superuser:
|
||||
return super().list(request)
|
||||
|
||||
@ -240,9 +238,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
app.save()
|
||||
return Response({})
|
||||
|
||||
@permission_required(
|
||||
"authentik_core.view_application", ["authentik_events.view_event"]
|
||||
)
|
||||
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
|
||||
@extend_schema(responses={200: CoordinateSerializer(many=True)})
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=unused-argument
|
||||
|
@ -68,9 +68,7 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
||||
"""Get parsed user agent"""
|
||||
return user_agent_parser.Parse(instance.last_user_agent)
|
||||
|
||||
def get_geo_ip(
|
||||
self, instance: AuthenticatedSession
|
||||
) -> Optional[GeoIPDict]: # pragma: no cover
|
||||
def get_geo_ip(self, instance: AuthenticatedSession) -> Optional[GeoIPDict]: # pragma: no cover
|
||||
"""Get parsed user agent"""
|
||||
return GEOIP_READER.city_dict(instance.last_ip)
|
||||
|
||||
|
@ -15,11 +15,7 @@ from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import (
|
||||
MetaNameSerializer,
|
||||
PassiveSerializer,
|
||||
TypeCreateSerializer,
|
||||
)
|
||||
from authentik.core.api.utils import MetaNameSerializer, PassiveSerializer, TypeCreateSerializer
|
||||
from authentik.core.expression import PropertyMappingEvaluator
|
||||
from authentik.core.models import PropertyMapping
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
@ -141,9 +137,7 @@ class PropertyMappingViewSet(
|
||||
self.request,
|
||||
**test_params.validated_data.get("context", {}),
|
||||
)
|
||||
response_data["result"] = dumps(
|
||||
result, indent=(4 if format_result else None)
|
||||
)
|
||||
response_data["result"] = dumps(result, indent=(4 if format_result else None))
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
response_data["result"] = str(exc)
|
||||
response_data["successful"] = False
|
||||
|
@ -74,6 +74,8 @@ class SourceViewSet(
|
||||
for subclass in all_subclasses(self.queryset.model):
|
||||
subclass: Source
|
||||
component = ""
|
||||
if len(subclass.__subclasses__()) > 0:
|
||||
continue
|
||||
if subclass._meta.abstract:
|
||||
component = subclass.__bases__[0]().component
|
||||
else:
|
||||
@ -93,9 +95,7 @@ class SourceViewSet(
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
def user_settings(self, request: Request) -> Response:
|
||||
"""Get all sources the user can configure"""
|
||||
_all_sources: Iterable[Source] = Source.objects.filter(
|
||||
enabled=True
|
||||
).select_subclasses()
|
||||
_all_sources: Iterable[Source] = Source.objects.filter(enabled=True).select_subclasses()
|
||||
matching_sources: list[UserSettingSerializer] = []
|
||||
for source in _all_sources:
|
||||
user_settings = source.ui_user_settings
|
||||
|
@ -70,9 +70,7 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
serializer.save(
|
||||
user=self.request.user,
|
||||
intent=TokenIntents.INTENT_API,
|
||||
expiring=self.request.user.attributes.get(
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING, True
|
||||
),
|
||||
expiring=self.request.user.attributes.get(USER_ATTRIBUTE_TOKEN_EXPIRING, True),
|
||||
)
|
||||
|
||||
@permission_required("authentik_core.view_token_key")
|
||||
@ -89,7 +87,5 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
token: Token = self.get_object()
|
||||
if token.is_expired:
|
||||
raise Http404
|
||||
Event.new(EventAction.SECRET_VIEW, secret=token).from_http( # noqa # nosec
|
||||
request
|
||||
)
|
||||
Event.new(EventAction.SECRET_VIEW, secret=token).from_http(request) # noqa # nosec
|
||||
return Response(TokenViewSerializer({"key": token.key}).data)
|
||||
|
@ -79,9 +79,7 @@ class UsedByMixin:
|
||||
).all():
|
||||
# Only merge shadows on first object
|
||||
if first_object:
|
||||
shadows += getattr(
|
||||
manager.model._meta, "authentik_used_by_shadows", []
|
||||
)
|
||||
shadows += getattr(manager.model._meta, "authentik_used_by_shadows", [])
|
||||
first_object = False
|
||||
serializer = UsedBySerializer(
|
||||
data={
|
||||
|
@ -1,13 +1,16 @@
|
||||
"""User API Views"""
|
||||
from json import loads
|
||||
from typing import Optional
|
||||
|
||||
from django.db.models.query import QuerySet
|
||||
from django.urls import reverse_lazy
|
||||
from django.utils.http import urlencode
|
||||
from django_filters.filters import BooleanFilter, CharFilter
|
||||
from django.utils.translation import gettext as _
|
||||
from django_filters.filters import BooleanFilter, CharFilter, ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||
from guardian.utils import get_anonymous_user
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_field
|
||||
from guardian.shortcuts import get_anonymous_user, get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, JSONField, SerializerMethodField
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
@ -17,24 +20,29 @@ from rest_framework.serializers import (
|
||||
BooleanField,
|
||||
ListSerializer,
|
||||
ModelSerializer,
|
||||
PrimaryKeyRelatedField,
|
||||
Serializer,
|
||||
ValidationError,
|
||||
)
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.groups import GroupSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
|
||||
from authentik.core.middleware import (
|
||||
SESSION_IMPERSONATE_ORIGINAL_USER,
|
||||
SESSION_IMPERSONATE_USER,
|
||||
)
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER
|
||||
from authentik.core.models import Group, Token, TokenIntents, User
|
||||
from authentik.events.models import EventAction
|
||||
from authentik.stages.email.models import EmailStage
|
||||
from authentik.stages.email.tasks import send_mails
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class UserSerializer(ModelSerializer):
|
||||
"""User Serializer"""
|
||||
@ -42,7 +50,10 @@ class UserSerializer(ModelSerializer):
|
||||
is_superuser = BooleanField(read_only=True)
|
||||
avatar = CharField(read_only=True)
|
||||
attributes = JSONField(validators=[is_dict], required=False)
|
||||
groups = ListSerializer(child=GroupSerializer(), read_only=True, source="ak_groups")
|
||||
groups = PrimaryKeyRelatedField(
|
||||
allow_empty=True, many=True, source="ak_groups", queryset=Group.objects.all()
|
||||
)
|
||||
groups_obj = ListSerializer(child=GroupSerializer(), read_only=True, source="ak_groups")
|
||||
uid = CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@ -56,6 +67,7 @@ class UserSerializer(ModelSerializer):
|
||||
"last_login",
|
||||
"is_superuser",
|
||||
"groups",
|
||||
"groups_obj",
|
||||
"email",
|
||||
"avatar",
|
||||
"attributes",
|
||||
@ -116,17 +128,13 @@ class UserMetricsSerializer(PassiveSerializer):
|
||||
def get_logins_failed_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
user = self.context["user"]
|
||||
return get_events_per_1h(
|
||||
action=EventAction.LOGIN_FAILED, context__username=user.username
|
||||
)
|
||||
return get_events_per_1h(action=EventAction.LOGIN_FAILED, context__username=user.username)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_authorizations_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
user = self.context["user"]
|
||||
return get_events_per_1h(
|
||||
action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk
|
||||
)
|
||||
return get_events_per_1h(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk)
|
||||
|
||||
|
||||
class UsersFilter(FilterSet):
|
||||
@ -141,6 +149,16 @@ class UsersFilter(FilterSet):
|
||||
|
||||
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
|
||||
|
||||
groups_by_name = ModelMultipleChoiceFilter(
|
||||
field_name="ak_groups__name",
|
||||
to_field_name="name",
|
||||
queryset=Group.objects.all(),
|
||||
)
|
||||
groups_by_pk = ModelMultipleChoiceFilter(
|
||||
field_name="ak_groups",
|
||||
queryset=Group.objects.all(),
|
||||
)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def filter_attributes(self, queryset, name, value):
|
||||
"""Filter attributes by query args"""
|
||||
@ -164,6 +182,8 @@ class UsersFilter(FilterSet):
|
||||
"is_active",
|
||||
"is_superuser",
|
||||
"attributes",
|
||||
"groups_by_name",
|
||||
"groups_by_pk",
|
||||
]
|
||||
|
||||
|
||||
@ -178,14 +198,34 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
def get_queryset(self): # pragma: no cover
|
||||
return User.objects.all().exclude(pk=get_anonymous_user().pk)
|
||||
|
||||
def _create_recovery_link(self) -> tuple[Optional[str], Optional[Token]]:
|
||||
"""Create a recovery link (when the current tenant has a recovery flow set),
|
||||
that can either be shown to an admin or sent to the user directly"""
|
||||
tenant: Tenant = self.request._request.tenant
|
||||
# Check that there is a recovery flow, if not return an error
|
||||
flow = tenant.flow_recovery
|
||||
if not flow:
|
||||
LOGGER.debug("No recovery flow set")
|
||||
return None, None
|
||||
user: User = self.get_object()
|
||||
token, __ = Token.objects.get_or_create(
|
||||
identifier=f"{user.uid}-password-reset",
|
||||
user=user,
|
||||
intent=TokenIntents.INTENT_RECOVERY,
|
||||
)
|
||||
querystring = urlencode({"token": token.key})
|
||||
link = self.request.build_absolute_uri(
|
||||
reverse_lazy("authentik_core:if-flow", kwargs={"flow_slug": flow.slug})
|
||||
+ f"?{querystring}"
|
||||
)
|
||||
return link, token
|
||||
|
||||
@extend_schema(responses={200: SessionUserSerializer(many=False)})
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name
|
||||
def me(self, request: Request) -> Response:
|
||||
"""Get information about current user"""
|
||||
serializer = SessionUserSerializer(
|
||||
data={"user": UserSerializer(request.user).data}
|
||||
)
|
||||
serializer = SessionUserSerializer(data={"user": UserSelfSerializer(request.user).data})
|
||||
if SESSION_IMPERSONATE_USER in request._request.session:
|
||||
serializer.initial_data["original"] = UserSelfSerializer(
|
||||
request._request.session[SESSION_IMPERSONATE_ORIGINAL_USER]
|
||||
@ -193,9 +233,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
serializer.is_valid()
|
||||
return Response(serializer.data)
|
||||
|
||||
@extend_schema(
|
||||
request=UserSelfSerializer, responses={200: SessionUserSerializer(many=False)}
|
||||
)
|
||||
@extend_schema(request=UserSelfSerializer, responses={200: SessionUserSerializer(many=False)})
|
||||
@action(
|
||||
methods=["PUT"],
|
||||
detail=False,
|
||||
@ -205,9 +243,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
)
|
||||
def update_self(self, request: Request) -> Response:
|
||||
"""Allow users to change information on their own profile"""
|
||||
data = UserSelfSerializer(
|
||||
instance=User.objects.get(pk=request.user.pk), data=request.data
|
||||
)
|
||||
data = UserSelfSerializer(instance=User.objects.get(pk=request.user.pk), data=request.data)
|
||||
if not data.is_valid():
|
||||
return Response(data.errors)
|
||||
new_user = data.save()
|
||||
@ -239,24 +275,60 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
# pylint: disable=invalid-name, unused-argument
|
||||
def recovery(self, request: Request, pk: int) -> Response:
|
||||
"""Create a temporary link that a user can use to recover their accounts"""
|
||||
tenant: Tenant = request._request.tenant
|
||||
# Check that there is a recovery flow, if not return an error
|
||||
flow = tenant.flow_recovery
|
||||
if not flow:
|
||||
link, _ = self._create_recovery_link()
|
||||
if not link:
|
||||
LOGGER.debug("Couldn't create token")
|
||||
return Response({"link": ""}, status=404)
|
||||
user: User = self.get_object()
|
||||
token, __ = Token.objects.get_or_create(
|
||||
identifier=f"{user.uid}-password-reset",
|
||||
user=user,
|
||||
intent=TokenIntents.INTENT_RECOVERY,
|
||||
)
|
||||
querystring = urlencode({"token": token.key})
|
||||
link = request.build_absolute_uri(
|
||||
reverse_lazy("authentik_core:if-flow", kwargs={"flow_slug": flow.slug})
|
||||
+ f"?{querystring}"
|
||||
)
|
||||
return Response({"link": link})
|
||||
|
||||
@permission_required("authentik_core.reset_user_password")
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="email_stage",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
required=True,
|
||||
)
|
||||
],
|
||||
responses={
|
||||
"204": Serializer(),
|
||||
"404": Serializer(),
|
||||
},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name, unused-argument
|
||||
def recovery_email(self, request: Request, pk: int) -> Response:
|
||||
"""Create a temporary link that a user can use to recover their accounts"""
|
||||
for_user = self.get_object()
|
||||
if for_user.email == "":
|
||||
LOGGER.debug("User doesn't have an email address")
|
||||
return Response(status=404)
|
||||
link, token = self._create_recovery_link()
|
||||
if not link:
|
||||
LOGGER.debug("Couldn't create token")
|
||||
return Response(status=404)
|
||||
# Lookup the email stage to assure the current user can access it
|
||||
stages = get_objects_for_user(
|
||||
request.user, "authentik_stages_email.view_emailstage"
|
||||
).filter(pk=request.query_params.get("email_stage"))
|
||||
if not stages.exists():
|
||||
LOGGER.debug("Email stage does not exist/user has no permissions")
|
||||
return Response(status=404)
|
||||
email_stage: EmailStage = stages.first()
|
||||
message = TemplateEmailMessage(
|
||||
subject=_(email_stage.subject),
|
||||
template_name=email_stage.template,
|
||||
to=[for_user.email],
|
||||
template_context={
|
||||
"url": link,
|
||||
"user": for_user,
|
||||
"expires": token.expires,
|
||||
},
|
||||
)
|
||||
send_mails(email_stage, message)
|
||||
return Response(status=204)
|
||||
|
||||
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||
for backend in list(self.filter_backends):
|
||||
|
@ -3,20 +3,14 @@ from typing import Any
|
||||
|
||||
from django.db.models import Model
|
||||
from rest_framework.fields import CharField, IntegerField
|
||||
from rest_framework.serializers import (
|
||||
Serializer,
|
||||
SerializerMethodField,
|
||||
ValidationError,
|
||||
)
|
||||
from rest_framework.serializers import Serializer, SerializerMethodField, ValidationError
|
||||
|
||||
|
||||
def is_dict(value: Any):
|
||||
"""Ensure a value is a dictionary, useful for JSONFields"""
|
||||
if isinstance(value, dict):
|
||||
return
|
||||
raise ValidationError(
|
||||
"Value must be a dictionary, and not have any duplicate keys."
|
||||
)
|
||||
raise ValidationError("Value must be a dictionary, and not have any duplicate keys.")
|
||||
|
||||
|
||||
class PassiveSerializer(Serializer):
|
||||
@ -25,9 +19,7 @@ class PassiveSerializer(Serializer):
|
||||
def create(self, validated_data: dict) -> Model: # pragma: no cover
|
||||
return Model()
|
||||
|
||||
def update(
|
||||
self, instance: Model, validated_data: dict
|
||||
) -> Model: # pragma: no cover
|
||||
def update(self, instance: Model, validated_data: dict) -> Model: # pragma: no cover
|
||||
return Model()
|
||||
|
||||
class Meta:
|
||||
|
@ -4,7 +4,7 @@ from channels.generic.websocket import JsonWebsocketConsumer
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.authentication import token_from_header
|
||||
from authentik.api.authentication import bearer_auth
|
||||
from authentik.core.models import User
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -24,12 +24,12 @@ class AuthJsonConsumer(JsonWebsocketConsumer):
|
||||
raw_header = headers[b"authorization"]
|
||||
|
||||
try:
|
||||
token = token_from_header(raw_header)
|
||||
# token is only None when no header was given, in which case we deny too
|
||||
if not token:
|
||||
user = bearer_auth(raw_header)
|
||||
# user is only None when no header was given, in which case we deny too
|
||||
if not user:
|
||||
raise DenyConnection()
|
||||
except AuthenticationFailed as exc:
|
||||
LOGGER.warning("Failed to authenticate", exc=exc)
|
||||
raise DenyConnection()
|
||||
|
||||
self.user = token.user
|
||||
self.user = user
|
||||
|
@ -38,9 +38,7 @@ class Migration(migrations.Migration):
|
||||
("password", models.CharField(max_length=128, verbose_name="password")),
|
||||
(
|
||||
"last_login",
|
||||
models.DateTimeField(
|
||||
blank=True, null=True, verbose_name="last login"
|
||||
),
|
||||
models.DateTimeField(blank=True, null=True, verbose_name="last login"),
|
||||
),
|
||||
(
|
||||
"is_superuser",
|
||||
@ -53,35 +51,25 @@ class Migration(migrations.Migration):
|
||||
(
|
||||
"username",
|
||||
models.CharField(
|
||||
error_messages={
|
||||
"unique": "A user with that username already exists."
|
||||
},
|
||||
error_messages={"unique": "A user with that username already exists."},
|
||||
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
|
||||
max_length=150,
|
||||
unique=True,
|
||||
validators=[
|
||||
django.contrib.auth.validators.UnicodeUsernameValidator()
|
||||
],
|
||||
validators=[django.contrib.auth.validators.UnicodeUsernameValidator()],
|
||||
verbose_name="username",
|
||||
),
|
||||
),
|
||||
(
|
||||
"first_name",
|
||||
models.CharField(
|
||||
blank=True, max_length=30, verbose_name="first name"
|
||||
),
|
||||
models.CharField(blank=True, max_length=30, verbose_name="first name"),
|
||||
),
|
||||
(
|
||||
"last_name",
|
||||
models.CharField(
|
||||
blank=True, max_length=150, verbose_name="last name"
|
||||
),
|
||||
models.CharField(blank=True, max_length=150, verbose_name="last name"),
|
||||
),
|
||||
(
|
||||
"email",
|
||||
models.EmailField(
|
||||
blank=True, max_length=254, verbose_name="email address"
|
||||
),
|
||||
models.EmailField(blank=True, max_length=254, verbose_name="email address"),
|
||||
),
|
||||
(
|
||||
"is_staff",
|
||||
@ -217,9 +205,7 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
(
|
||||
"expires",
|
||||
models.DateTimeField(
|
||||
default=authentik.core.models.default_token_duration
|
||||
),
|
||||
models.DateTimeField(default=authentik.core.models.default_token_duration),
|
||||
),
|
||||
("expiring", models.BooleanField(default=True)),
|
||||
("description", models.TextField(blank=True, default="")),
|
||||
@ -306,9 +292,7 @@ class Migration(migrations.Migration):
|
||||
("name", models.TextField(help_text="Application's display Name.")),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
help_text="Internal application name, used in URLs."
|
||||
),
|
||||
models.SlugField(help_text="Internal application name, used in URLs."),
|
||||
),
|
||||
("skip_authorization", models.BooleanField(default=False)),
|
||||
("meta_launch_url", models.URLField(blank=True, default="")),
|
||||
|
@ -17,9 +17,7 @@ def create_default_user(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
username="akadmin", email="root@localhost", name="authentik Default Admin"
|
||||
)
|
||||
if "TF_BUILD" in environ or "AK_ADMIN_PASS" in environ or settings.TEST:
|
||||
akadmin.set_password(
|
||||
environ.get("AK_ADMIN_PASS", "akadmin"), signal=False
|
||||
) # noqa # nosec
|
||||
akadmin.set_password(environ.get("AK_ADMIN_PASS", "akadmin"), signal=False) # noqa # nosec
|
||||
else:
|
||||
akadmin.set_unusable_password()
|
||||
akadmin.save()
|
||||
|
@ -13,8 +13,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="source",
|
||||
name="slug",
|
||||
field=models.SlugField(
|
||||
help_text="Internal source name, used in URLs.", unique=True
|
||||
),
|
||||
field=models.SlugField(help_text="Internal source name, used in URLs.", unique=True),
|
||||
),
|
||||
]
|
||||
|
@ -13,8 +13,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="first_name",
|
||||
field=models.CharField(
|
||||
blank=True, max_length=150, verbose_name="first name"
|
||||
),
|
||||
field=models.CharField(blank=True, max_length=150, verbose_name="first name"),
|
||||
),
|
||||
]
|
||||
|
@ -40,9 +40,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="pb_groups",
|
||||
field=models.ManyToManyField(
|
||||
related_name="users", to="authentik_core.Group"
|
||||
),
|
||||
field=models.ManyToManyField(related_name="users", to="authentik_core.Group"),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="group",
|
||||
|
@ -42,9 +42,7 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="token",
|
||||
index=models.Index(
|
||||
fields=["identifier"], name="authentik_co_identif_1a34a8_idx"
|
||||
),
|
||||
index=models.Index(fields=["identifier"], name="authentik_co_identif_1a34a8_idx"),
|
||||
),
|
||||
migrations.RunPython(set_default_token_key),
|
||||
]
|
||||
|
@ -17,8 +17,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name="application",
|
||||
name="meta_icon",
|
||||
field=models.FileField(
|
||||
blank=True, default="", upload_to="application-icons/"
|
||||
),
|
||||
field=models.FileField(blank=True, default="", upload_to="application-icons/"),
|
||||
),
|
||||
]
|
||||
|
@ -25,9 +25,7 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="token",
|
||||
index=models.Index(
|
||||
fields=["identifier"], name="authentik_c_identif_d9d032_idx"
|
||||
),
|
||||
index=models.Index(fields=["identifier"], name="authentik_c_identif_d9d032_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="token",
|
||||
|
@ -32,16 +32,12 @@ class Migration(migrations.Migration):
|
||||
fields=[
|
||||
(
|
||||
"expires",
|
||||
models.DateTimeField(
|
||||
default=authentik.core.models.default_token_duration
|
||||
),
|
||||
models.DateTimeField(default=authentik.core.models.default_token_duration),
|
||||
),
|
||||
("expiring", models.BooleanField(default=True)),
|
||||
(
|
||||
"uuid",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, primary_key=True, serialize=False
|
||||
),
|
||||
models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False),
|
||||
),
|
||||
("session_key", models.CharField(max_length=40)),
|
||||
("last_ip", models.TextField()),
|
||||
|
@ -13,8 +13,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="application",
|
||||
name="meta_icon",
|
||||
field=models.FileField(
|
||||
default=None, null=True, upload_to="application-icons/"
|
||||
),
|
||||
field=models.FileField(default=None, null=True, upload_to="application-icons/"),
|
||||
),
|
||||
]
|
||||
|
@ -17,4 +17,11 @@ class Migration(migrations.Migration):
|
||||
default=None, max_length=500, null=True, upload_to="application-icons/"
|
||||
),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="authenticatedsession",
|
||||
options={
|
||||
"verbose_name": "Authenticated Session",
|
||||
"verbose_name_plural": "Authenticated Sessions",
|
||||
},
|
||||
),
|
||||
]
|
||||
|
37
authentik/core/migrations/0027_bootstrap_token.py
Normal file
37
authentik/core/migrations/0027_bootstrap_token.py
Normal file
@ -0,0 +1,37 @@
|
||||
# Generated by Django 3.2.5 on 2021-08-11 19:40
|
||||
from os import environ
|
||||
|
||||
from django.apps.registry import Apps
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def create_default_user_token(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
# We have to use a direct import here, otherwise we get an object manager error
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
akadmin = User.objects.using(db_alias).filter(username="akadmin")
|
||||
if not akadmin.exists():
|
||||
return
|
||||
if "AK_ADMIN_TOKEN" not in environ:
|
||||
return
|
||||
Token.objects.using(db_alias).create(
|
||||
identifier="authentik-boostrap-token",
|
||||
user=akadmin.first(),
|
||||
intent=TokenIntents.INTENT_API,
|
||||
expiring=False,
|
||||
key=environ["AK_ADMIN_TOKEN"],
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0026_alter_application_meta_icon"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(create_default_user_token),
|
||||
]
|
@ -154,9 +154,7 @@ class User(GuardianUserMixin, AbstractUser):
|
||||
("s", "158"),
|
||||
("r", "g"),
|
||||
]
|
||||
gravatar_url = (
|
||||
f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}"
|
||||
)
|
||||
gravatar_url = f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}"
|
||||
return escape(gravatar_url)
|
||||
return mode % {
|
||||
"username": self.username,
|
||||
@ -186,9 +184,7 @@ class Provider(SerializerModel):
|
||||
related_name="provider_authorization",
|
||||
)
|
||||
|
||||
property_mappings = models.ManyToManyField(
|
||||
"PropertyMapping", default=None, blank=True
|
||||
)
|
||||
property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True)
|
||||
|
||||
objects = InheritanceManager()
|
||||
|
||||
@ -218,9 +214,7 @@ class Application(PolicyBindingModel):
|
||||
add custom fields and other properties"""
|
||||
|
||||
name = models.TextField(help_text=_("Application's display Name."))
|
||||
slug = models.SlugField(
|
||||
help_text=_("Internal application name, used in URLs."), unique=True
|
||||
)
|
||||
slug = models.SlugField(help_text=_("Internal application name, used in URLs."), unique=True)
|
||||
provider = models.OneToOneField(
|
||||
"Provider", null=True, blank=True, default=None, on_delete=models.SET_DEFAULT
|
||||
)
|
||||
@ -244,9 +238,7 @@ class Application(PolicyBindingModel):
|
||||
it is returned as-is"""
|
||||
if not self.meta_icon:
|
||||
return None
|
||||
if self.meta_icon.name.startswith("http") or self.meta_icon.name.startswith(
|
||||
"/static"
|
||||
):
|
||||
if self.meta_icon.name.startswith("http") or self.meta_icon.name.startswith("/static"):
|
||||
return self.meta_icon.name
|
||||
return self.meta_icon.url
|
||||
|
||||
@ -301,14 +293,10 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
"""Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server"""
|
||||
|
||||
name = models.TextField(help_text=_("Source's display Name."))
|
||||
slug = models.SlugField(
|
||||
help_text=_("Internal source name, used in URLs."), unique=True
|
||||
)
|
||||
slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True)
|
||||
|
||||
enabled = models.BooleanField(default=True)
|
||||
property_mappings = models.ManyToManyField(
|
||||
"PropertyMapping", default=None, blank=True
|
||||
)
|
||||
property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True)
|
||||
|
||||
authentication_flow = models.ForeignKey(
|
||||
Flow,
|
||||
@ -482,9 +470,7 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
||||
"""Get serializer for this model"""
|
||||
raise NotImplementedError
|
||||
|
||||
def evaluate(
|
||||
self, user: Optional[User], request: Optional[HttpRequest], **kwargs
|
||||
) -> Any:
|
||||
def evaluate(self, user: Optional[User], request: Optional[HttpRequest], **kwargs) -> Any:
|
||||
"""Evaluate `self.expression` using `**kwargs` as Context."""
|
||||
from authentik.core.expression import PropertyMappingEvaluator
|
||||
|
||||
@ -523,9 +509,7 @@ class AuthenticatedSession(ExpiringModel):
|
||||
last_used = models.DateTimeField(auto_now=True)
|
||||
|
||||
@staticmethod
|
||||
def from_request(
|
||||
request: HttpRequest, user: User
|
||||
) -> Optional["AuthenticatedSession"]:
|
||||
def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]:
|
||||
"""Create a new session from a http request"""
|
||||
if not hasattr(request, "session") or not request.session.session_key:
|
||||
return None
|
||||
@ -536,3 +520,8 @@ class AuthenticatedSession(ExpiringModel):
|
||||
last_user_agent=request.META.get("HTTP_USER_AGENT", ""),
|
||||
expires=request.session.get_expiry_date(),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Authenticated Session")
|
||||
verbose_name_plural = _("Authenticated Sessions")
|
||||
|
@ -14,9 +14,7 @@ from prometheus_client import Gauge
|
||||
# Arguments: user: User, password: str
|
||||
password_changed = Signal()
|
||||
|
||||
GAUGE_MODELS = Gauge(
|
||||
"authentik_models", "Count of various objects", ["model_name", "app"]
|
||||
)
|
||||
GAUGE_MODELS = Gauge("authentik_models", "Count of various objects", ["model_name", "app"])
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.core.models import AuthenticatedSession, User
|
||||
@ -60,15 +58,11 @@ def user_logged_out_session(sender, request: HttpRequest, user: "User", **_):
|
||||
"""Delete AuthenticatedSession if it exists"""
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
|
||||
AuthenticatedSession.objects.filter(
|
||||
session_key=request.session.session_key
|
||||
).delete()
|
||||
AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete()
|
||||
|
||||
|
||||
@receiver(pre_delete)
|
||||
def authenticated_session_delete(
|
||||
sender: Type[Model], instance: "AuthenticatedSession", **_
|
||||
):
|
||||
def authenticated_session_delete(sender: Type[Model], instance: "AuthenticatedSession", **_):
|
||||
"""Delete session when authenticated session is deleted"""
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
|
||||
|
@ -11,16 +11,8 @@ from django.urls import reverse
|
||||
from django.utils.translation import gettext as _
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import (
|
||||
Source,
|
||||
SourceUserMatchingModes,
|
||||
User,
|
||||
UserSourceConnection,
|
||||
)
|
||||
from authentik.core.sources.stage import (
|
||||
PLAN_CONTEXT_SOURCES_CONNECTION,
|
||||
PostUserEnrollmentStage,
|
||||
)
|
||||
from authentik.core.models import Source, SourceUserMatchingModes, User, UserSourceConnection
|
||||
from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostUserEnrollmentStage
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.models import Flow, Stage, in_memory_stage
|
||||
from authentik.flows.planner import (
|
||||
@ -76,9 +68,7 @@ class SourceFlowManager:
|
||||
# pylint: disable=too-many-return-statements
|
||||
def get_action(self, **kwargs) -> tuple[Action, Optional[UserSourceConnection]]:
|
||||
"""decide which action should be taken"""
|
||||
new_connection = self.connection_type(
|
||||
source=self.source, identifier=self.identifier
|
||||
)
|
||||
new_connection = self.connection_type(source=self.source, identifier=self.identifier)
|
||||
# When request is authenticated, always link
|
||||
if self.request.user.is_authenticated:
|
||||
new_connection.user = self.request.user
|
||||
@ -113,9 +103,7 @@ class SourceFlowManager:
|
||||
SourceUserMatchingModes.USERNAME_DENY,
|
||||
]:
|
||||
if not self.enroll_info.get("username", None):
|
||||
self._logger.warning(
|
||||
"Refusing to use none username", source=self.source
|
||||
)
|
||||
self._logger.warning("Refusing to use none username", source=self.source)
|
||||
return Action.DENY, None
|
||||
query = Q(username__exact=self.enroll_info.get("username", None))
|
||||
self._logger.debug("trying to link with existing user", query=query)
|
||||
@ -229,10 +217,7 @@ class SourceFlowManager:
|
||||
"""Login user and redirect."""
|
||||
messages.success(
|
||||
self.request,
|
||||
_(
|
||||
"Successfully authenticated with %(source)s!"
|
||||
% {"source": self.source.name}
|
||||
),
|
||||
_("Successfully authenticated with %(source)s!" % {"source": self.source.name}),
|
||||
)
|
||||
flow_kwargs = {PLAN_CONTEXT_PENDING_USER: connection.user}
|
||||
return self._handle_login_flow(self.source.authentication_flow, **flow_kwargs)
|
||||
@ -270,10 +255,7 @@ class SourceFlowManager:
|
||||
"""User was not authenticated and previous request was not authenticated."""
|
||||
messages.success(
|
||||
self.request,
|
||||
_(
|
||||
"Successfully authenticated with %(source)s!"
|
||||
% {"source": self.source.name}
|
||||
),
|
||||
_("Successfully authenticated with %(source)s!" % {"source": self.source.name}),
|
||||
)
|
||||
|
||||
# We run the Flow planner here so we can pass the Pending user in the context
|
||||
|
@ -7,12 +7,14 @@ from boto3.exceptions import Boto3Error
|
||||
from botocore.exceptions import BotoCoreError, ClientError
|
||||
from dbbackup.db.exceptions import CommandConnectorError
|
||||
from django.contrib.humanize.templatetags.humanize import naturaltime
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core import management
|
||||
from django.core.cache import cache
|
||||
from django.utils.timezone import now
|
||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import ExpiringModel
|
||||
from authentik.core.models import AuthenticatedSession, ExpiringModel
|
||||
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.root.celery import CELERY_APP
|
||||
@ -27,15 +29,23 @@ def clean_expired_models(self: MonitoredTask):
|
||||
for cls in ExpiringModel.__subclasses__():
|
||||
cls: ExpiringModel
|
||||
objects = (
|
||||
cls.objects.all()
|
||||
.exclude(expiring=False)
|
||||
.exclude(expiring=True, expires__gt=now())
|
||||
cls.objects.all().exclude(expiring=False).exclude(expiring=True, expires__gt=now())
|
||||
)
|
||||
for obj in objects:
|
||||
obj.expire_action()
|
||||
amount = objects.count()
|
||||
LOGGER.debug("Expired models", model=cls, amount=amount)
|
||||
messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}")
|
||||
# Special case
|
||||
amount = 0
|
||||
for session in AuthenticatedSession.objects.all():
|
||||
cache_key = f"{KEY_PREFIX}{session.session_key}"
|
||||
value = cache.get(cache_key)
|
||||
if not value:
|
||||
session.delete()
|
||||
amount += 1
|
||||
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
|
||||
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")
|
||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, messages))
|
||||
|
||||
|
||||
|
@ -17,9 +17,7 @@ class TestApplicationsAPI(APITestCase):
|
||||
self.denied = Application.objects.create(name="denied", slug="denied")
|
||||
PolicyBinding.objects.create(
|
||||
target=self.denied,
|
||||
policy=DummyPolicy.objects.create(
|
||||
name="deny", result=False, wait_min=1, wait_max=2
|
||||
),
|
||||
policy=DummyPolicy.objects.create(name="deny", result=False, wait_min=1, wait_max=2),
|
||||
order=0,
|
||||
)
|
||||
|
||||
@ -33,9 +31,7 @@ class TestApplicationsAPI(APITestCase):
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content), {"messages": [], "passing": True}
|
||||
)
|
||||
self.assertJSONEqual(force_str(response.content), {"messages": [], "passing": True})
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:application-check-access",
|
||||
@ -43,9 +39,7 @@ class TestApplicationsAPI(APITestCase):
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content), {"messages": ["dummy"], "passing": False}
|
||||
)
|
||||
self.assertJSONEqual(force_str(response.content), {"messages": ["dummy"], "passing": False})
|
||||
|
||||
def test_list(self):
|
||||
"""Test list operation without superuser_full_list"""
|
||||
|
@ -46,9 +46,7 @@ class TestImpersonation(TestCase):
|
||||
self.client.force_login(self.other_user)
|
||||
|
||||
self.client.get(
|
||||
reverse(
|
||||
"authentik_core:impersonate-init", kwargs={"user_id": self.akadmin.pk}
|
||||
)
|
||||
reverse("authentik_core:impersonate-init", kwargs={"user_id": self.akadmin.pk})
|
||||
)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
|
@ -22,9 +22,7 @@ class TestModels(TestCase):
|
||||
|
||||
def test_token_expire_no_expire(self):
|
||||
"""Test token expiring with "expiring" set"""
|
||||
token = Token.objects.create(
|
||||
expires=now(), user=get_anonymous_user(), expiring=False
|
||||
)
|
||||
token = Token.objects.create(expires=now(), user=get_anonymous_user(), expiring=False)
|
||||
sleep(0.5)
|
||||
self.assertFalse(token.is_expired)
|
||||
|
||||
|
@ -16,9 +16,7 @@ class TestPropertyMappings(TestCase):
|
||||
|
||||
def test_expression(self):
|
||||
"""Test expression"""
|
||||
mapping = PropertyMapping.objects.create(
|
||||
name="test", expression="return 'test'"
|
||||
)
|
||||
mapping = PropertyMapping.objects.create(name="test", expression="return 'test'")
|
||||
self.assertEqual(mapping.evaluate(None, None), "test")
|
||||
|
||||
def test_expression_syntax(self):
|
||||
|
@ -23,9 +23,7 @@ class TestPropertyMappingAPI(APITestCase):
|
||||
def test_test_call(self):
|
||||
"""Test PropertMappings's test endpoint"""
|
||||
response = self.client.post(
|
||||
reverse(
|
||||
"authentik_api:propertymapping-test", kwargs={"pk": self.mapping.pk}
|
||||
),
|
||||
reverse("authentik_api:propertymapping-test", kwargs={"pk": self.mapping.pk}),
|
||||
data={
|
||||
"user": self.user.pk,
|
||||
},
|
||||
|
@ -4,12 +4,7 @@ from django.utils.timezone import now
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||
Token,
|
||||
TokenIntents,
|
||||
User,
|
||||
)
|
||||
from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents, User
|
||||
from authentik.core.tasks import clean_expired_models
|
||||
|
||||
|
||||
|
@ -3,6 +3,9 @@ from django.urls.base import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.flows.models import Flow, FlowDesignation
|
||||
from authentik.stages.email.models import EmailStage
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
|
||||
class TestUsersAPI(APITestCase):
|
||||
@ -27,3 +30,78 @@ class TestUsersAPI(APITestCase):
|
||||
reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_recovery_no_flow(self):
|
||||
"""Test user recovery link (no recovery flow set)"""
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-recovery", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_recovery(self):
|
||||
"""Test user recovery link (no recovery flow set)"""
|
||||
flow = Flow.objects.create(
|
||||
name="test", title="test", slug="test", designation=FlowDesignation.RECOVERY
|
||||
)
|
||||
tenant: Tenant = Tenant.objects.first()
|
||||
tenant.flow_recovery = flow
|
||||
tenant.save()
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-recovery", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_recovery_email_no_flow(self):
|
||||
"""Test user recovery link (no recovery flow set)"""
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
self.user.email = "foo@bar.baz"
|
||||
self.user.save()
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_recovery_email_no_stage(self):
|
||||
"""Test user recovery link (no email stage)"""
|
||||
self.user.email = "foo@bar.baz"
|
||||
self.user.save()
|
||||
flow = Flow.objects.create(
|
||||
name="test", title="test", slug="test", designation=FlowDesignation.RECOVERY
|
||||
)
|
||||
tenant: Tenant = Tenant.objects.first()
|
||||
tenant.flow_recovery = flow
|
||||
tenant.save()
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_recovery_email(self):
|
||||
"""Test user recovery link"""
|
||||
self.user.email = "foo@bar.baz"
|
||||
self.user.save()
|
||||
flow = Flow.objects.create(
|
||||
name="test", title="test", slug="test", designation=FlowDesignation.RECOVERY
|
||||
)
|
||||
tenant: Tenant = Tenant.objects.first()
|
||||
tenant.flow_recovery = flow
|
||||
tenant.save()
|
||||
|
||||
stage = EmailStage.objects.create(name="email")
|
||||
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:user-recovery-email",
|
||||
kwargs={"pk": self.user.pk},
|
||||
)
|
||||
+ f"?email_stage={stage.pk}"
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
|
@ -5,10 +5,7 @@ from django.shortcuts import get_object_or_404, redirect
|
||||
from django.views import View
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.middleware import (
|
||||
SESSION_IMPERSONATE_ORIGINAL_USER,
|
||||
SESSION_IMPERSONATE_USER,
|
||||
)
|
||||
from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER
|
||||
from authentik.core.models import User
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
@ -21,9 +18,7 @@ class ImpersonateInitView(View):
|
||||
def get(self, request: HttpRequest, user_id: int) -> HttpResponse:
|
||||
"""Impersonation handler, checks permissions"""
|
||||
if not request.user.has_perm("impersonate"):
|
||||
LOGGER.debug(
|
||||
"User attempted to impersonate without permissions", user=request.user
|
||||
)
|
||||
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
||||
return HttpResponse("Unauthorized", status=401)
|
||||
|
||||
user_to_be = get_object_or_404(User, pk=user_id)
|
||||
|
@ -14,9 +14,7 @@ class EndSessionView(TemplateView, PolicyAccessView):
|
||||
template_name = "if/end_session.html"
|
||||
|
||||
def resolve_provider_application(self):
|
||||
self.application = get_object_or_404(
|
||||
Application, slug=self.kwargs["application_slug"]
|
||||
)
|
||||
self.application = get_object_or_404(Application, slug=self.kwargs["application_slug"])
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
|
@ -10,12 +10,7 @@ from django_filters.filters import BooleanFilter
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import (
|
||||
CharField,
|
||||
DateTimeField,
|
||||
IntegerField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer, ValidationError
|
||||
@ -86,9 +81,7 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
||||
backend=default_backend(),
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
raise ValidationError(
|
||||
"Unable to load private key (possibly encrypted?)."
|
||||
)
|
||||
raise ValidationError("Unable to load private key (possibly encrypted?).")
|
||||
return value
|
||||
|
||||
class Meta:
|
||||
@ -123,9 +116,7 @@ class CertificateGenerationSerializer(PassiveSerializer):
|
||||
"""Certificate generation parameters"""
|
||||
|
||||
common_name = CharField()
|
||||
subject_alt_name = CharField(
|
||||
required=False, allow_blank=True, label=_("Subject-alt name")
|
||||
)
|
||||
subject_alt_name = CharField(required=False, allow_blank=True, label=_("Subject-alt name"))
|
||||
validity_days = IntegerField(initial=365)
|
||||
|
||||
|
||||
@ -170,9 +161,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
||||
builder = CertificateBuilder()
|
||||
builder.common_name = data.validated_data["common_name"]
|
||||
builder.build(
|
||||
subject_alt_names=data.validated_data.get("subject_alt_name", "").split(
|
||||
","
|
||||
),
|
||||
subject_alt_names=data.validated_data.get("subject_alt_name", "").split(","),
|
||||
validity_days=int(data.validated_data["validity_days"]),
|
||||
)
|
||||
instance = builder.save()
|
||||
@ -208,9 +197,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
||||
"Content-Disposition"
|
||||
] = f'attachment; filename="{certificate.name}_certificate.pem"'
|
||||
return response
|
||||
return Response(
|
||||
CertificateDataSerializer({"data": certificate.certificate_data}).data
|
||||
)
|
||||
return Response(CertificateDataSerializer({"data": certificate.certificate_data}).data)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
@ -234,9 +221,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
||||
).from_http(request)
|
||||
if "download" in request._request.GET:
|
||||
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
|
||||
response = HttpResponse(
|
||||
certificate.key_data, content_type="application/x-pem-file"
|
||||
)
|
||||
response = HttpResponse(certificate.key_data, content_type="application/x-pem-file")
|
||||
response[
|
||||
"Content-Disposition"
|
||||
] = f'attachment; filename="{certificate.name}_private_key.pem"'
|
||||
|
@ -46,9 +46,7 @@ class CertificateBuilder:
|
||||
public_exponent=65537, key_size=2048, backend=default_backend()
|
||||
)
|
||||
self.__public_key = self.__private_key.public_key()
|
||||
alt_names: list[x509.GeneralName] = [
|
||||
x509.DNSName(x) for x in subject_alt_names or []
|
||||
]
|
||||
alt_names: list[x509.GeneralName] = [x509.DNSName(x) for x in subject_alt_names or []]
|
||||
self.__builder = (
|
||||
x509.CertificateBuilder()
|
||||
.subject_name(
|
||||
@ -59,9 +57,7 @@ class CertificateBuilder:
|
||||
self.common_name,
|
||||
),
|
||||
x509.NameAttribute(NameOID.ORGANIZATION_NAME, "authentik"),
|
||||
x509.NameAttribute(
|
||||
NameOID.ORGANIZATIONAL_UNIT_NAME, "Self-signed"
|
||||
),
|
||||
x509.NameAttribute(NameOID.ORGANIZATIONAL_UNIT_NAME, "Self-signed"),
|
||||
]
|
||||
)
|
||||
)
|
||||
@ -77,9 +73,7 @@ class CertificateBuilder:
|
||||
)
|
||||
.add_extension(x509.SubjectAlternativeName(alt_names), critical=True)
|
||||
.not_valid_before(datetime.datetime.today() - one_day)
|
||||
.not_valid_after(
|
||||
datetime.datetime.today() + datetime.timedelta(days=validity_days)
|
||||
)
|
||||
.not_valid_after(datetime.datetime.today() + datetime.timedelta(days=validity_days))
|
||||
.serial_number(int(uuid.uuid4()))
|
||||
.public_key(self.__public_key)
|
||||
)
|
||||
|
@ -57,9 +57,7 @@ class CertificateKeyPair(CreatedUpdatedModel):
|
||||
if not self._private_key and self._private_key != "":
|
||||
try:
|
||||
self._private_key = load_pem_private_key(
|
||||
str.encode(
|
||||
"\n".join([x.strip() for x in self.key_data.split("\n")])
|
||||
),
|
||||
str.encode("\n".join([x.strip() for x in self.key_data.split("\n")])),
|
||||
password=None,
|
||||
backend=default_backend(),
|
||||
)
|
||||
@ -70,24 +68,18 @@ class CertificateKeyPair(CreatedUpdatedModel):
|
||||
@property
|
||||
def fingerprint_sha256(self) -> str:
|
||||
"""Get SHA256 Fingerprint of certificate_data"""
|
||||
return hexlify(self.certificate.fingerprint(hashes.SHA256()), ":").decode(
|
||||
"utf-8"
|
||||
)
|
||||
return hexlify(self.certificate.fingerprint(hashes.SHA256()), ":").decode("utf-8")
|
||||
|
||||
@property
|
||||
def fingerprint_sha1(self) -> str:
|
||||
"""Get SHA1 Fingerprint of certificate_data"""
|
||||
return hexlify(
|
||||
self.certificate.fingerprint(hashes.SHA1()), ":" # nosec
|
||||
).decode("utf-8")
|
||||
return hexlify(self.certificate.fingerprint(hashes.SHA1()), ":").decode("utf-8") # nosec
|
||||
|
||||
@property
|
||||
def kid(self):
|
||||
"""Get Key ID used for JWKS"""
|
||||
return "{0}".format(
|
||||
md5(self.key_data.encode("utf-8")).hexdigest() # nosec
|
||||
if self.key_data
|
||||
else ""
|
||||
md5(self.key_data.encode("utf-8")).hexdigest() if self.key_data else "" # nosec
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
|
@ -143,7 +143,5 @@ class EventViewSet(ModelViewSet):
|
||||
"""Get all actions"""
|
||||
data = []
|
||||
for value, name in EventAction.choices:
|
||||
data.append(
|
||||
{"name": name, "description": "", "component": value, "model_name": ""}
|
||||
)
|
||||
data.append({"name": name, "description": "", "component": value, "model_name": ""})
|
||||
return Response(TypeCreateSerializer(data, many=True).data)
|
||||
|
@ -30,3 +30,5 @@ class NotificationRuleViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
queryset = NotificationRule.objects.all()
|
||||
serializer_class = NotificationRuleSerializer
|
||||
filterset_fields = ["name", "severity", "group__name"]
|
||||
ordering = ["name"]
|
||||
|
@ -5,11 +5,12 @@ from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, ListField, SerializerMethodField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer, Serializer
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.models import (
|
||||
Notification,
|
||||
NotificationSeverity,
|
||||
@ -41,23 +42,19 @@ class NotificationTransportSerializer(ModelSerializer):
|
||||
]
|
||||
|
||||
|
||||
class NotificationTransportTestSerializer(Serializer):
|
||||
class NotificationTransportTestSerializer(PassiveSerializer):
|
||||
"""Notification test serializer"""
|
||||
|
||||
messages = ListField(child=CharField())
|
||||
|
||||
def create(self, validated_data: Request) -> Response:
|
||||
raise NotImplementedError
|
||||
|
||||
def update(self, request: Request) -> Response:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class NotificationTransportViewSet(UsedByMixin, ModelViewSet):
|
||||
"""NotificationTransport Viewset"""
|
||||
|
||||
queryset = NotificationTransport.objects.all()
|
||||
serializer_class = NotificationTransportSerializer
|
||||
filterset_fields = ["name", "mode", "webhook_url", "send_once"]
|
||||
ordering = ["name"]
|
||||
|
||||
@permission_required("authentik_events.change_notificationtransport")
|
||||
@extend_schema(
|
||||
|
@ -29,12 +29,8 @@ class AuditMiddleware:
|
||||
|
||||
def __call__(self, request: HttpRequest) -> HttpResponse:
|
||||
# Connect signal for automatic logging
|
||||
if hasattr(request, "user") and getattr(
|
||||
request.user, "is_authenticated", False
|
||||
):
|
||||
post_save_handler = partial(
|
||||
self.post_save_handler, user=request.user, request=request
|
||||
)
|
||||
if hasattr(request, "user") and getattr(request.user, "is_authenticated", False):
|
||||
post_save_handler = partial(self.post_save_handler, user=request.user, request=request)
|
||||
pre_delete_handler = partial(
|
||||
self.pre_delete_handler, user=request.user, request=request
|
||||
)
|
||||
@ -94,13 +90,9 @@ class AuditMiddleware:
|
||||
|
||||
@staticmethod
|
||||
# pylint: disable=unused-argument
|
||||
def pre_delete_handler(
|
||||
user: User, request: HttpRequest, sender, instance: Model, **_
|
||||
):
|
||||
def pre_delete_handler(user: User, request: HttpRequest, sender, instance: Model, **_):
|
||||
"""Signal handler for all object's pre_delete"""
|
||||
if isinstance(
|
||||
instance, (Event, Notification, UserObjectPermission)
|
||||
): # pragma: no cover
|
||||
if isinstance(instance, (Event, Notification, UserObjectPermission)): # pragma: no cover
|
||||
return
|
||||
|
||||
EventNewThread(
|
||||
|
@ -14,9 +14,7 @@ def convert_user_to_json(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
event.delete()
|
||||
# Because event objects cannot be updated, we have to re-create them
|
||||
event.pk = None
|
||||
event.user_json = (
|
||||
authentik.events.models.get_user(event.user) if event.user else {}
|
||||
)
|
||||
event.user_json = authentik.events.models.get_user(event.user) if event.user else {}
|
||||
event._state.adding = True
|
||||
event.save()
|
||||
|
||||
@ -58,7 +56,5 @@ class Migration(migrations.Migration):
|
||||
model_name="event",
|
||||
name="user",
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name="event", old_name="user_json", new_name="user"
|
||||
),
|
||||
migrations.RenameField(model_name="event", old_name="user_json", new_name="user"),
|
||||
]
|
||||
|
@ -11,16 +11,12 @@ def notify_configuration_error(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
||||
db_alias = schema_editor.connection.alias
|
||||
Group = apps.get_model("authentik_core", "Group")
|
||||
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
|
||||
EventMatcherPolicy = apps.get_model(
|
||||
"authentik_policies_event_matcher", "EventMatcherPolicy"
|
||||
)
|
||||
EventMatcherPolicy = apps.get_model("authentik_policies_event_matcher", "EventMatcherPolicy")
|
||||
NotificationRule = apps.get_model("authentik_events", "NotificationRule")
|
||||
NotificationTransport = apps.get_model("authentik_events", "NotificationTransport")
|
||||
|
||||
admin_group = (
|
||||
Group.objects.using(db_alias)
|
||||
.filter(name="authentik Admins", is_superuser=True)
|
||||
.first()
|
||||
Group.objects.using(db_alias).filter(name="authentik Admins", is_superuser=True).first()
|
||||
)
|
||||
|
||||
policy, _ = EventMatcherPolicy.objects.using(db_alias).update_or_create(
|
||||
@ -32,9 +28,7 @@ def notify_configuration_error(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
||||
defaults={"group": admin_group, "severity": NotificationSeverity.ALERT},
|
||||
)
|
||||
trigger.transports.set(
|
||||
NotificationTransport.objects.using(db_alias).filter(
|
||||
name="default-email-transport"
|
||||
)
|
||||
NotificationTransport.objects.using(db_alias).filter(name="default-email-transport")
|
||||
)
|
||||
trigger.save()
|
||||
PolicyBinding.objects.using(db_alias).update_or_create(
|
||||
@ -50,16 +44,12 @@ def notify_update(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
Group = apps.get_model("authentik_core", "Group")
|
||||
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
|
||||
EventMatcherPolicy = apps.get_model(
|
||||
"authentik_policies_event_matcher", "EventMatcherPolicy"
|
||||
)
|
||||
EventMatcherPolicy = apps.get_model("authentik_policies_event_matcher", "EventMatcherPolicy")
|
||||
NotificationRule = apps.get_model("authentik_events", "NotificationRule")
|
||||
NotificationTransport = apps.get_model("authentik_events", "NotificationTransport")
|
||||
|
||||
admin_group = (
|
||||
Group.objects.using(db_alias)
|
||||
.filter(name="authentik Admins", is_superuser=True)
|
||||
.first()
|
||||
Group.objects.using(db_alias).filter(name="authentik Admins", is_superuser=True).first()
|
||||
)
|
||||
|
||||
policy, _ = EventMatcherPolicy.objects.using(db_alias).update_or_create(
|
||||
@ -71,9 +61,7 @@ def notify_update(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
defaults={"group": admin_group, "severity": NotificationSeverity.ALERT},
|
||||
)
|
||||
trigger.transports.set(
|
||||
NotificationTransport.objects.using(db_alias).filter(
|
||||
name="default-email-transport"
|
||||
)
|
||||
NotificationTransport.objects.using(db_alias).filter(name="default-email-transport")
|
||||
)
|
||||
trigger.save()
|
||||
PolicyBinding.objects.using(db_alias).update_or_create(
|
||||
@ -89,16 +77,12 @@ def notify_exception(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
Group = apps.get_model("authentik_core", "Group")
|
||||
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
|
||||
EventMatcherPolicy = apps.get_model(
|
||||
"authentik_policies_event_matcher", "EventMatcherPolicy"
|
||||
)
|
||||
EventMatcherPolicy = apps.get_model("authentik_policies_event_matcher", "EventMatcherPolicy")
|
||||
NotificationRule = apps.get_model("authentik_events", "NotificationRule")
|
||||
NotificationTransport = apps.get_model("authentik_events", "NotificationTransport")
|
||||
|
||||
admin_group = (
|
||||
Group.objects.using(db_alias)
|
||||
.filter(name="authentik Admins", is_superuser=True)
|
||||
.first()
|
||||
Group.objects.using(db_alias).filter(name="authentik Admins", is_superuser=True).first()
|
||||
)
|
||||
|
||||
policy_policy_exc, _ = EventMatcherPolicy.objects.using(db_alias).update_or_create(
|
||||
@ -114,9 +98,7 @@ def notify_exception(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
defaults={"group": admin_group, "severity": NotificationSeverity.ALERT},
|
||||
)
|
||||
trigger.transports.set(
|
||||
NotificationTransport.objects.using(db_alias).filter(
|
||||
name="default-email-transport"
|
||||
)
|
||||
NotificationTransport.objects.using(db_alias).filter(name="default-email-transport")
|
||||
)
|
||||
trigger.save()
|
||||
PolicyBinding.objects.using(db_alias).update_or_create(
|
||||
|
@ -38,9 +38,7 @@ def progress_bar(
|
||||
|
||||
def print_progress_bar(iteration):
|
||||
"""Progress Bar Printing Function"""
|
||||
percent = ("{0:." + str(decimals) + "f}").format(
|
||||
100 * (iteration / float(total))
|
||||
)
|
||||
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
|
||||
filledLength = int(length * iteration // total)
|
||||
bar = fill * filledLength + "-" * (length - filledLength)
|
||||
print(f"\r{prefix} |{bar}| {percent}% {suffix}", end=print_end)
|
||||
@ -78,9 +76,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name="event",
|
||||
name="expires",
|
||||
field=models.DateTimeField(
|
||||
default=authentik.events.models.default_event_duration
|
||||
),
|
||||
field=models.DateTimeField(default=authentik.events.models.default_event_duration),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="event",
|
||||
|
@ -15,9 +15,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name="event",
|
||||
name="tenant",
|
||||
field=models.JSONField(
|
||||
blank=True, default=authentik.events.models.default_tenant
|
||||
),
|
||||
field=models.JSONField(blank=True, default=authentik.events.models.default_tenant),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
|
@ -15,10 +15,7 @@ from requests import RequestException, post
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import __version__
|
||||
from authentik.core.middleware import (
|
||||
SESSION_IMPERSONATE_ORIGINAL_USER,
|
||||
SESSION_IMPERSONATE_USER,
|
||||
)
|
||||
from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER
|
||||
from authentik.core.models import ExpiringModel, Group, User
|
||||
from authentik.events.geo import GEOIP_READER
|
||||
from authentik.events.utils import cleanse_dict, get_user, model_to_dict, sanitize_dict
|
||||
@ -159,9 +156,7 @@ class Event(ExpiringModel):
|
||||
if hasattr(request, "user"):
|
||||
original_user = None
|
||||
if hasattr(request, "session"):
|
||||
original_user = request.session.get(
|
||||
SESSION_IMPERSONATE_ORIGINAL_USER, None
|
||||
)
|
||||
original_user = request.session.get(SESSION_IMPERSONATE_ORIGINAL_USER, None)
|
||||
self.user = get_user(request.user, original_user)
|
||||
if user:
|
||||
self.user = get_user(user)
|
||||
@ -169,9 +164,7 @@ class Event(ExpiringModel):
|
||||
if hasattr(request, "session"):
|
||||
if SESSION_IMPERSONATE_ORIGINAL_USER in request.session:
|
||||
self.user = get_user(request.session[SESSION_IMPERSONATE_ORIGINAL_USER])
|
||||
self.user["on_behalf_of"] = get_user(
|
||||
request.session[SESSION_IMPERSONATE_USER]
|
||||
)
|
||||
self.user["on_behalf_of"] = get_user(request.session[SESSION_IMPERSONATE_USER])
|
||||
# User 255.255.255.255 as fallback if IP cannot be determined
|
||||
self.client_ip = get_client_ip(request)
|
||||
# Apply GeoIP Data, when enabled
|
||||
@ -414,9 +407,7 @@ class NotificationRule(PolicyBindingModel):
|
||||
severity = models.TextField(
|
||||
choices=NotificationSeverity.choices,
|
||||
default=NotificationSeverity.NOTICE,
|
||||
help_text=_(
|
||||
"Controls which severity level the created notifications will have."
|
||||
),
|
||||
help_text=_("Controls which severity level the created notifications will have."),
|
||||
)
|
||||
group = models.ForeignKey(
|
||||
Group,
|
||||
|
@ -135,9 +135,7 @@ class MonitoredTask(Task):
|
||||
self._result = result
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def after_return(
|
||||
self, status, retval, task_id, args: list[Any], kwargs: dict[str, Any], einfo
|
||||
):
|
||||
def after_return(self, status, retval, task_id, args: list[Any], kwargs: dict[str, Any], einfo):
|
||||
if self._result:
|
||||
if not self._result.uid:
|
||||
self._result.uid = self._uid
|
||||
@ -159,9 +157,7 @@ class MonitoredTask(Task):
|
||||
# pylint: disable=too-many-arguments
|
||||
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
||||
if not self._result:
|
||||
self._result = TaskResult(
|
||||
status=TaskResultStatus.ERROR, messages=[str(exc)]
|
||||
)
|
||||
self._result = TaskResult(status=TaskResultStatus.ERROR, messages=[str(exc)])
|
||||
if not self._result.uid:
|
||||
self._result.uid = self._uid
|
||||
TaskInfo(
|
||||
@ -179,8 +175,7 @@ class MonitoredTask(Task):
|
||||
Event.new(
|
||||
EventAction.SYSTEM_TASK_EXCEPTION,
|
||||
message=(
|
||||
f"Task {self.__name__} encountered an error: "
|
||||
"\n".join(self._result.messages)
|
||||
f"Task {self.__name__} encountered an error: " "\n".join(self._result.messages)
|
||||
),
|
||||
).save()
|
||||
return super().on_failure(exc, task_id, args, kwargs, einfo=einfo)
|
||||
|
@ -2,11 +2,7 @@
|
||||
from threading import Thread
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.contrib.auth.signals import (
|
||||
user_logged_in,
|
||||
user_logged_out,
|
||||
user_login_failed,
|
||||
)
|
||||
from django.contrib.auth.signals import user_logged_in, user_logged_out, user_login_failed
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
from django.http import HttpRequest
|
||||
@ -30,9 +26,7 @@ class EventNewThread(Thread):
|
||||
kwargs: dict[str, Any]
|
||||
user: Optional[User] = None
|
||||
|
||||
def __init__(
|
||||
self, action: str, request: HttpRequest, user: Optional[User] = None, **kwargs
|
||||
):
|
||||
def __init__(self, action: str, request: HttpRequest, user: Optional[User] = None, **kwargs):
|
||||
super().__init__()
|
||||
self.action = action
|
||||
self.request = request
|
||||
@ -68,9 +62,7 @@ def on_user_logged_out(sender, request: HttpRequest, user: User, **_):
|
||||
|
||||
@receiver(user_write)
|
||||
# pylint: disable=unused-argument
|
||||
def on_user_write(
|
||||
sender, request: HttpRequest, user: User, data: dict[str, Any], **kwargs
|
||||
):
|
||||
def on_user_write(sender, request: HttpRequest, user: User, data: dict[str, Any], **kwargs):
|
||||
"""Log User write"""
|
||||
thread = EventNewThread(EventAction.USER_WRITE, request, **data)
|
||||
thread.kwargs["created"] = kwargs.get("created", False)
|
||||
@ -80,9 +72,7 @@ def on_user_write(
|
||||
|
||||
@receiver(user_login_failed)
|
||||
# pylint: disable=unused-argument
|
||||
def on_user_login_failed(
|
||||
sender, credentials: dict[str, str], request: HttpRequest, **_
|
||||
):
|
||||
def on_user_login_failed(sender, credentials: dict[str, str], request: HttpRequest, **_):
|
||||
"""Failed Login"""
|
||||
thread = EventNewThread(EventAction.LOGIN_FAILED, request, **credentials)
|
||||
thread.run()
|
||||
|
@ -22,9 +22,7 @@ LOGGER = get_logger()
|
||||
def event_notification_handler(event_uuid: str):
|
||||
"""Start task for each trigger definition"""
|
||||
for trigger in NotificationRule.objects.all():
|
||||
event_trigger_handler.apply_async(
|
||||
args=[event_uuid, trigger.name], queue="authentik_events"
|
||||
)
|
||||
event_trigger_handler.apply_async(args=[event_uuid, trigger.name], queue="authentik_events")
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
@ -43,17 +41,13 @@ def event_trigger_handler(event_uuid: str, trigger_name: str):
|
||||
if "policy_uuid" in event.context:
|
||||
policy_uuid = event.context["policy_uuid"]
|
||||
if PolicyBinding.objects.filter(
|
||||
target__in=NotificationRule.objects.all().values_list(
|
||||
"pbm_uuid", flat=True
|
||||
),
|
||||
target__in=NotificationRule.objects.all().values_list("pbm_uuid", flat=True),
|
||||
policy=policy_uuid,
|
||||
).exists():
|
||||
# If policy that caused this event to be created is attached
|
||||
# to *any* NotificationRule, we return early.
|
||||
# This is the most effective way to prevent infinite loops.
|
||||
LOGGER.debug(
|
||||
"e(trigger): attempting to prevent infinite loop", trigger=trigger
|
||||
)
|
||||
LOGGER.debug("e(trigger): attempting to prevent infinite loop", trigger=trigger)
|
||||
return
|
||||
|
||||
if not trigger.group:
|
||||
@ -62,9 +56,7 @@ def event_trigger_handler(event_uuid: str, trigger_name: str):
|
||||
|
||||
LOGGER.debug("e(trigger): checking if trigger applies", trigger=trigger)
|
||||
try:
|
||||
user = (
|
||||
User.objects.filter(pk=event.user.get("pk")).first() or get_anonymous_user()
|
||||
)
|
||||
user = User.objects.filter(pk=event.user.get("pk")).first() or get_anonymous_user()
|
||||
except User.DoesNotExist:
|
||||
LOGGER.warning("e(trigger): failed to get user", trigger=trigger)
|
||||
return
|
||||
@ -99,20 +91,14 @@ def event_trigger_handler(event_uuid: str, trigger_name: str):
|
||||
retry_backoff=True,
|
||||
base=MonitoredTask,
|
||||
)
|
||||
def notification_transport(
|
||||
self: MonitoredTask, notification_pk: int, transport_pk: int
|
||||
):
|
||||
def notification_transport(self: MonitoredTask, notification_pk: int, transport_pk: int):
|
||||
"""Send notification over specified transport"""
|
||||
self.save_on_success = False
|
||||
try:
|
||||
notification: Notification = Notification.objects.filter(
|
||||
pk=notification_pk
|
||||
).first()
|
||||
notification: Notification = Notification.objects.filter(pk=notification_pk).first()
|
||||
if not notification:
|
||||
return
|
||||
transport: NotificationTransport = NotificationTransport.objects.get(
|
||||
pk=transport_pk
|
||||
)
|
||||
transport: NotificationTransport = NotificationTransport.objects.get(pk=transport_pk)
|
||||
transport.send(notification)
|
||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
|
||||
except NotificationTransportError as exc:
|
||||
|
@ -38,7 +38,5 @@ class TestEvents(TestCase):
|
||||
event = Event.new("unittest", model=temp_model)
|
||||
event.save() # We save to ensure nothing is un-saveable
|
||||
model_content_type = ContentType.objects.get_for_model(temp_model)
|
||||
self.assertEqual(
|
||||
event.context.get("model").get("app"), model_content_type.app_label
|
||||
)
|
||||
self.assertEqual(event.context.get("model").get("app"), model_content_type.app_label)
|
||||
self.assertEqual(event.context.get("model").get("pk"), temp_model.pk.hex)
|
||||
|
@ -81,12 +81,8 @@ class TestEventsNotifications(TestCase):
|
||||
|
||||
execute_mock = MagicMock()
|
||||
passes = MagicMock(side_effect=PolicyException)
|
||||
with patch(
|
||||
"authentik.policies.event_matcher.models.EventMatcherPolicy.passes", passes
|
||||
):
|
||||
with patch(
|
||||
"authentik.events.models.NotificationTransport.send", execute_mock
|
||||
):
|
||||
with patch("authentik.policies.event_matcher.models.EventMatcherPolicy.passes", passes):
|
||||
with patch("authentik.events.models.NotificationTransport.send", execute_mock):
|
||||
Event.new(EventAction.CUSTOM_PREFIX).save()
|
||||
self.assertEqual(passes.call_count, 1)
|
||||
|
||||
@ -96,9 +92,7 @@ class TestEventsNotifications(TestCase):
|
||||
self.group.users.add(user2)
|
||||
self.group.save()
|
||||
|
||||
transport = NotificationTransport.objects.create(
|
||||
name="transport", send_once=True
|
||||
)
|
||||
transport = NotificationTransport.objects.create(name="transport", send_once=True)
|
||||
NotificationRule.objects.filter(name__startswith="default").delete()
|
||||
trigger = NotificationRule.objects.create(name="trigger", group=self.group)
|
||||
trigger.transports.add(transport)
|
||||
|
@ -14,12 +14,7 @@ from rest_framework.fields import BooleanField, FileField, ReadOnlyField
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import (
|
||||
CharField,
|
||||
ModelSerializer,
|
||||
Serializer,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.serializers import CharField, ModelSerializer, Serializer, SerializerMethodField
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
@ -152,11 +147,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
|
||||
],
|
||||
)
|
||||
@extend_schema(
|
||||
request={
|
||||
"multipart/form-data": inline_serializer(
|
||||
"SetIcon", fields={"file": FileField()}
|
||||
)
|
||||
},
|
||||
request={"multipart/form-data": inline_serializer("SetIcon", fields={"file": FileField()})},
|
||||
responses={
|
||||
204: OpenApiResponse(description="Successfully imported flow"),
|
||||
400: OpenApiResponse(description="Bad request"),
|
||||
@ -221,9 +212,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
|
||||
.order_by("order")
|
||||
):
|
||||
for p_index, policy_binding in enumerate(
|
||||
get_objects_for_user(
|
||||
request.user, "authentik_policies.view_policybinding"
|
||||
)
|
||||
get_objects_for_user(request.user, "authentik_policies.view_policybinding")
|
||||
.filter(target=stage_binding)
|
||||
.exclude(policy__isnull=True)
|
||||
.order_by("order")
|
||||
@ -256,20 +245,14 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
|
||||
element: DiagramElement = body[index]
|
||||
if element.type == "condition":
|
||||
# Policy passes, link policy yes to next stage
|
||||
footer.append(
|
||||
f"{element.identifier}(yes, right)->{body[index + 1].identifier}"
|
||||
)
|
||||
footer.append(f"{element.identifier}(yes, right)->{body[index + 1].identifier}")
|
||||
# Policy doesn't pass, go to stage after next stage
|
||||
no_element = body[index + 1]
|
||||
if no_element.type != "end":
|
||||
no_element = body[index + 2]
|
||||
footer.append(
|
||||
f"{element.identifier}(no, bottom)->{no_element.identifier}"
|
||||
)
|
||||
footer.append(f"{element.identifier}(no, bottom)->{no_element.identifier}")
|
||||
elif element.type == "operation":
|
||||
footer.append(
|
||||
f"{element.identifier}(bottom)->{body[index + 1].identifier}"
|
||||
)
|
||||
footer.append(f"{element.identifier}(bottom)->{body[index + 1].identifier}")
|
||||
diagram = "\n".join([str(x) for x in header + body + footer])
|
||||
return Response({"diagram": diagram})
|
||||
|
||||
|
@ -95,9 +95,7 @@ class Command(BaseCommand): # pragma: no cover
|
||||
"""Output results human readable"""
|
||||
total_max: int = max([max(inner) for inner in values])
|
||||
total_min: int = min([min(inner) for inner in values])
|
||||
total_avg = sum([sum(inner) for inner in values]) / sum(
|
||||
[len(inner) for inner in values]
|
||||
)
|
||||
total_avg = sum([sum(inner) for inner in values]) / sum([len(inner) for inner in values])
|
||||
|
||||
print(f"Version: {__version__}")
|
||||
print(f"Processes: {len(values)}")
|
||||
|
@ -9,21 +9,15 @@ from authentik.stages.identification.models import UserFields
|
||||
from authentik.stages.password import BACKEND_DJANGO, BACKEND_LDAP
|
||||
|
||||
|
||||
def create_default_authentication_flow(
|
||||
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
|
||||
):
|
||||
def create_default_authentication_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
Flow = apps.get_model("authentik_flows", "Flow")
|
||||
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
|
||||
PasswordStage = apps.get_model("authentik_stages_password", "PasswordStage")
|
||||
UserLoginStage = apps.get_model("authentik_stages_user_login", "UserLoginStage")
|
||||
IdentificationStage = apps.get_model(
|
||||
"authentik_stages_identification", "IdentificationStage"
|
||||
)
|
||||
IdentificationStage = apps.get_model("authentik_stages_identification", "IdentificationStage")
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
identification_stage, _ = IdentificationStage.objects.using(
|
||||
db_alias
|
||||
).update_or_create(
|
||||
identification_stage, _ = IdentificationStage.objects.using(db_alias).update_or_create(
|
||||
name="default-authentication-identification",
|
||||
defaults={
|
||||
"user_fields": [UserFields.E_MAIL, UserFields.USERNAME],
|
||||
@ -69,17 +63,13 @@ def create_default_authentication_flow(
|
||||
)
|
||||
|
||||
|
||||
def create_default_invalidation_flow(
|
||||
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
|
||||
):
|
||||
def create_default_invalidation_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
Flow = apps.get_model("authentik_flows", "Flow")
|
||||
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
|
||||
UserLogoutStage = apps.get_model("authentik_stages_user_logout", "UserLogoutStage")
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
UserLogoutStage.objects.using(db_alias).update_or_create(
|
||||
name="default-invalidation-logout"
|
||||
)
|
||||
UserLogoutStage.objects.using(db_alias).update_or_create(name="default-invalidation-logout")
|
||||
|
||||
flow, _ = Flow.objects.using(db_alias).update_or_create(
|
||||
slug="default-invalidation-flow",
|
||||
|
@ -15,16 +15,12 @@ PROMPT_POLICY_EXPRESSION = """# Check if we've not been given a username by the
|
||||
return 'username' not in context.get('prompt_data', {})"""
|
||||
|
||||
|
||||
def create_default_source_enrollment_flow(
|
||||
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
|
||||
):
|
||||
def create_default_source_enrollment_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
Flow = apps.get_model("authentik_flows", "Flow")
|
||||
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
|
||||
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
|
||||
|
||||
ExpressionPolicy = apps.get_model(
|
||||
"authentik_policies_expression", "ExpressionPolicy"
|
||||
)
|
||||
ExpressionPolicy = apps.get_model("authentik_policies_expression", "ExpressionPolicy")
|
||||
|
||||
PromptStage = apps.get_model("authentik_stages_prompt", "PromptStage")
|
||||
Prompt = apps.get_model("authentik_stages_prompt", "Prompt")
|
||||
@ -99,16 +95,12 @@ def create_default_source_enrollment_flow(
|
||||
)
|
||||
|
||||
|
||||
def create_default_source_authentication_flow(
|
||||
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
|
||||
):
|
||||
def create_default_source_authentication_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
Flow = apps.get_model("authentik_flows", "Flow")
|
||||
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
|
||||
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
|
||||
|
||||
ExpressionPolicy = apps.get_model(
|
||||
"authentik_policies_expression", "ExpressionPolicy"
|
||||
)
|
||||
ExpressionPolicy = apps.get_model("authentik_policies_expression", "ExpressionPolicy")
|
||||
|
||||
UserLoginStage = apps.get_model("authentik_stages_user_login", "UserLoginStage")
|
||||
|
||||
|
@ -7,9 +7,7 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from authentik.flows.models import FlowDesignation
|
||||
|
||||
|
||||
def create_default_provider_authorization_flow(
|
||||
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
|
||||
):
|
||||
def create_default_provider_authorization_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
Flow = apps.get_model("authentik_flows", "Flow")
|
||||
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
|
||||
|
||||
|
@ -32,9 +32,7 @@ def create_default_oobe_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor
|
||||
PromptStage = apps.get_model("authentik_stages_prompt", "PromptStage")
|
||||
Prompt = apps.get_model("authentik_stages_prompt", "Prompt")
|
||||
|
||||
ExpressionPolicy = apps.get_model(
|
||||
"authentik_policies_expression", "ExpressionPolicy"
|
||||
)
|
||||
ExpressionPolicy = apps.get_model("authentik_policies_expression", "ExpressionPolicy")
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
@ -52,9 +50,7 @@ def create_default_oobe_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor
|
||||
name="default-oobe-prefill-user",
|
||||
defaults={"expression": PREFILL_POLICY_EXPRESSION},
|
||||
)
|
||||
password_usable_policy, _ = ExpressionPolicy.objects.using(
|
||||
db_alias
|
||||
).update_or_create(
|
||||
password_usable_policy, _ = ExpressionPolicy.objects.using(db_alias).update_or_create(
|
||||
name="default-oobe-password-usable",
|
||||
defaults={"expression": PW_USABLE_POLICY_EXPRESSION},
|
||||
)
|
||||
@ -83,9 +79,7 @@ def create_default_oobe_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor
|
||||
prompt_stage, _ = PromptStage.objects.using(db_alias).update_or_create(
|
||||
name="default-oobe-password",
|
||||
)
|
||||
prompt_stage.fields.set(
|
||||
[prompt_header, prompt_email, password_first, password_second]
|
||||
)
|
||||
prompt_stage.fields.set([prompt_header, prompt_email, password_first, password_second])
|
||||
prompt_stage.save()
|
||||
|
||||
user_write, _ = UserWriteStage.objects.using(db_alias).update_or_create(
|
||||
|
@ -138,9 +138,7 @@ class Flow(SerializerModel, PolicyBindingModel):
|
||||
it is returned as-is"""
|
||||
if not self.background:
|
||||
return "/static/dist/assets/images/flow_background.jpg"
|
||||
if self.background.name.startswith("http") or self.background.name.startswith(
|
||||
"/static"
|
||||
):
|
||||
if self.background.name.startswith("http") or self.background.name.startswith("/static"):
|
||||
return self.background.name
|
||||
return self.background.url
|
||||
|
||||
@ -165,9 +163,7 @@ class Flow(SerializerModel, PolicyBindingModel):
|
||||
if result.passing:
|
||||
LOGGER.debug("with_policy: flow passing", flow=flow)
|
||||
return flow
|
||||
LOGGER.warning(
|
||||
"with_policy: flow not passing", flow=flow, messages=result.messages
|
||||
)
|
||||
LOGGER.warning("with_policy: flow not passing", flow=flow, messages=result.messages)
|
||||
LOGGER.debug("with_policy: no flow found", filters=flow_filter)
|
||||
return None
|
||||
|
||||
|
@ -78,14 +78,10 @@ class FlowPlan:
|
||||
marker = self.markers[0]
|
||||
|
||||
if marker.__class__ is not StageMarker:
|
||||
LOGGER.debug(
|
||||
"f(plan_inst): stage has marker", binding=binding, marker=marker
|
||||
)
|
||||
LOGGER.debug("f(plan_inst): stage has marker", binding=binding, marker=marker)
|
||||
marked_stage = marker.process(self, binding, http_request)
|
||||
if not marked_stage:
|
||||
LOGGER.debug(
|
||||
"f(plan_inst): marker returned none, next stage", binding=binding
|
||||
)
|
||||
LOGGER.debug("f(plan_inst): marker returned none, next stage", binding=binding)
|
||||
self.bindings.remove(binding)
|
||||
self.markers.remove(marker)
|
||||
if not self.has_stages:
|
||||
@ -193,9 +189,9 @@ class FlowPlanner:
|
||||
if default_context:
|
||||
plan.context = default_context
|
||||
# Check Flow policies
|
||||
for binding in FlowStageBinding.objects.filter(
|
||||
target__pk=self.flow.pk
|
||||
).order_by("order"):
|
||||
for binding in FlowStageBinding.objects.filter(target__pk=self.flow.pk).order_by(
|
||||
"order"
|
||||
):
|
||||
binding: FlowStageBinding
|
||||
stage = binding.stage
|
||||
marker = StageMarker()
|
||||
|
@ -26,9 +26,7 @@ def invalidate_flow_cache(sender, instance, **_):
|
||||
LOGGER.debug("Invalidating Flow cache", flow=instance, len=total)
|
||||
if isinstance(instance, FlowStageBinding):
|
||||
total = delete_cache_prefix(f"{cache_key(instance.target)}*")
|
||||
LOGGER.debug(
|
||||
"Invalidating Flow cache from FlowStageBinding", binding=instance, len=total
|
||||
)
|
||||
LOGGER.debug("Invalidating Flow cache from FlowStageBinding", binding=instance, len=total)
|
||||
if isinstance(instance, Stage):
|
||||
total = 0
|
||||
for binding in FlowStageBinding.objects.filter(stage=instance):
|
||||
|
@ -42,14 +42,9 @@ class StageView(View):
|
||||
other things besides the form display.
|
||||
|
||||
If no user is pending, returns request.user"""
|
||||
if (
|
||||
PLAN_CONTEXT_PENDING_USER_IDENTIFIER in self.executor.plan.context
|
||||
and for_display
|
||||
):
|
||||
if PLAN_CONTEXT_PENDING_USER_IDENTIFIER in self.executor.plan.context and for_display:
|
||||
return User(
|
||||
username=self.executor.plan.context.get(
|
||||
PLAN_CONTEXT_PENDING_USER_IDENTIFIER
|
||||
),
|
||||
username=self.executor.plan.context.get(PLAN_CONTEXT_PENDING_USER_IDENTIFIER),
|
||||
email="",
|
||||
)
|
||||
if PLAN_CONTEXT_PENDING_USER in self.executor.plan.context:
|
||||
|
@ -89,14 +89,10 @@ class TestFlowPlanner(TestCase):
|
||||
|
||||
planner = FlowPlanner(flow)
|
||||
planner.plan(request)
|
||||
self.assertEqual(
|
||||
CACHE_MOCK.set.call_count, 1
|
||||
) # Ensure plan is written to cache
|
||||
self.assertEqual(CACHE_MOCK.set.call_count, 1) # Ensure plan is written to cache
|
||||
planner = FlowPlanner(flow)
|
||||
planner.plan(request)
|
||||
self.assertEqual(
|
||||
CACHE_MOCK.set.call_count, 1
|
||||
) # Ensure nothing is written to cache
|
||||
self.assertEqual(CACHE_MOCK.set.call_count, 1) # Ensure nothing is written to cache
|
||||
self.assertEqual(CACHE_MOCK.get.call_count, 2) # Get is called twice
|
||||
|
||||
def test_planner_default_context(self):
|
||||
@ -176,9 +172,7 @@ class TestFlowPlanner(TestCase):
|
||||
request.session.save()
|
||||
|
||||
# Here we patch the dummy policy to evaluate to true so the stage is included
|
||||
with patch(
|
||||
"authentik.policies.dummy.models.DummyPolicy.passes", POLICY_RETURN_TRUE
|
||||
):
|
||||
with patch("authentik.policies.dummy.models.DummyPolicy.passes", POLICY_RETURN_TRUE):
|
||||
planner = FlowPlanner(flow)
|
||||
plan = planner.plan(request)
|
||||
|
||||
|
@ -76,9 +76,7 @@ class TestFlowTransfer(TransactionTestCase):
|
||||
PolicyBinding.objects.create(policy=flow_policy, target=flow, order=0)
|
||||
|
||||
user_login = UserLoginStage.objects.create(name=stage_name)
|
||||
fsb = FlowStageBinding.objects.create(
|
||||
target=flow, stage=user_login, order=0
|
||||
)
|
||||
fsb = FlowStageBinding.objects.create(target=flow, stage=user_login, order=0)
|
||||
PolicyBinding.objects.create(policy=flow_policy, target=fsb, order=0)
|
||||
|
||||
exporter = FlowExporter(flow)
|
||||
|
@ -11,12 +11,7 @@ from authentik.core.models import User
|
||||
from authentik.flows.challenge import ChallengeTypes
|
||||
from authentik.flows.exceptions import FlowNonApplicableException
|
||||
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
||||
from authentik.flows.models import (
|
||||
Flow,
|
||||
FlowDesignation,
|
||||
FlowStageBinding,
|
||||
InvalidResponseAction,
|
||||
)
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, InvalidResponseAction
|
||||
from authentik.flows.planner import FlowPlan, FlowPlanner
|
||||
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView
|
||||
from authentik.flows.views import NEXT_ARG_NAME, SESSION_KEY_PLAN, FlowExecutorView
|
||||
@ -61,9 +56,7 @@ class TestFlowExecutor(TestCase):
|
||||
)
|
||||
stage = DummyStage.objects.create(name="dummy")
|
||||
binding = FlowStageBinding(target=flow, stage=stage, order=0)
|
||||
plan = FlowPlan(
|
||||
flow_pk=flow.pk.hex + "a", bindings=[binding], markers=[StageMarker()]
|
||||
)
|
||||
plan = FlowPlan(flow_pk=flow.pk.hex + "a", bindings=[binding], markers=[StageMarker()])
|
||||
session = self.client.session
|
||||
session[SESSION_KEY_PLAN] = plan
|
||||
session.save()
|
||||
@ -163,9 +156,7 @@ class TestFlowExecutor(TestCase):
|
||||
target=flow, stage=DummyStage.objects.create(name="dummy2"), order=1
|
||||
)
|
||||
|
||||
exec_url = reverse(
|
||||
"authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}
|
||||
)
|
||||
exec_url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug})
|
||||
# First Request, start planning, renders form
|
||||
response = self.client.get(exec_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
@ -209,13 +200,9 @@ class TestFlowExecutor(TestCase):
|
||||
PolicyBinding.objects.create(policy=false_policy, target=binding2, order=0)
|
||||
|
||||
# Here we patch the dummy policy to evaluate to true so the stage is included
|
||||
with patch(
|
||||
"authentik.policies.dummy.models.DummyPolicy.passes", POLICY_RETURN_TRUE
|
||||
):
|
||||
with patch("authentik.policies.dummy.models.DummyPolicy.passes", POLICY_RETURN_TRUE):
|
||||
|
||||
exec_url = reverse(
|
||||
"authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}
|
||||
)
|
||||
exec_url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug})
|
||||
# First request, run the planner
|
||||
response = self.client.get(exec_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
@ -263,13 +250,9 @@ class TestFlowExecutor(TestCase):
|
||||
PolicyBinding.objects.create(policy=false_policy, target=binding2, order=0)
|
||||
|
||||
# Here we patch the dummy policy to evaluate to true so the stage is included
|
||||
with patch(
|
||||
"authentik.policies.dummy.models.DummyPolicy.passes", POLICY_RETURN_TRUE
|
||||
):
|
||||
with patch("authentik.policies.dummy.models.DummyPolicy.passes", POLICY_RETURN_TRUE):
|
||||
|
||||
exec_url = reverse(
|
||||
"authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}
|
||||
)
|
||||
exec_url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug})
|
||||
# First request, run the planner
|
||||
response = self.client.get(exec_url)
|
||||
|
||||
@ -334,13 +317,9 @@ class TestFlowExecutor(TestCase):
|
||||
PolicyBinding.objects.create(policy=true_policy, target=binding2, order=0)
|
||||
|
||||
# Here we patch the dummy policy to evaluate to true so the stage is included
|
||||
with patch(
|
||||
"authentik.policies.dummy.models.DummyPolicy.passes", POLICY_RETURN_TRUE
|
||||
):
|
||||
with patch("authentik.policies.dummy.models.DummyPolicy.passes", POLICY_RETURN_TRUE):
|
||||
|
||||
exec_url = reverse(
|
||||
"authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}
|
||||
)
|
||||
exec_url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug})
|
||||
# First request, run the planner
|
||||
response = self.client.get(exec_url)
|
||||
|
||||
@ -422,13 +401,9 @@ class TestFlowExecutor(TestCase):
|
||||
PolicyBinding.objects.create(policy=false_policy, target=binding3, order=0)
|
||||
|
||||
# Here we patch the dummy policy to evaluate to true so the stage is included
|
||||
with patch(
|
||||
"authentik.policies.dummy.models.DummyPolicy.passes", POLICY_RETURN_TRUE
|
||||
):
|
||||
with patch("authentik.policies.dummy.models.DummyPolicy.passes", POLICY_RETURN_TRUE):
|
||||
|
||||
exec_url = reverse(
|
||||
"authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}
|
||||
)
|
||||
exec_url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug})
|
||||
# First request, run the planner
|
||||
response = self.client.get(exec_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
@ -511,9 +486,7 @@ class TestFlowExecutor(TestCase):
|
||||
)
|
||||
request.user = user
|
||||
planner = FlowPlanner(flow)
|
||||
plan = planner.plan(
|
||||
request, default_context={PLAN_CONTEXT_PENDING_USER_IDENTIFIER: ident}
|
||||
)
|
||||
plan = planner.plan(request, default_context={PLAN_CONTEXT_PENDING_USER_IDENTIFIER: ident})
|
||||
|
||||
executor = FlowExecutorView()
|
||||
executor.plan = plan
|
||||
@ -542,9 +515,7 @@ class TestFlowExecutor(TestCase):
|
||||
evaluate_on_plan=False,
|
||||
re_evaluate_policies=True,
|
||||
)
|
||||
PolicyBinding.objects.create(
|
||||
policy=reputation_policy, target=deny_binding, order=0
|
||||
)
|
||||
PolicyBinding.objects.create(policy=reputation_policy, target=deny_binding, order=0)
|
||||
|
||||
# Stage 1 is an identification stage
|
||||
ident_stage = IdentificationStage.objects.create(
|
||||
@ -557,9 +528,7 @@ class TestFlowExecutor(TestCase):
|
||||
order=1,
|
||||
invalid_response_action=InvalidResponseAction.RESTART_WITH_CONTEXT,
|
||||
)
|
||||
exec_url = reverse(
|
||||
"authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}
|
||||
)
|
||||
exec_url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug})
|
||||
# First request, run the planner
|
||||
response = self.client.get(exec_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
@ -579,9 +548,7 @@ class TestFlowExecutor(TestCase):
|
||||
"user_fields": [UserFields.E_MAIL],
|
||||
},
|
||||
)
|
||||
response = self.client.post(
|
||||
exec_url, {"uid_field": "invalid-string"}, follow=True
|
||||
)
|
||||
response = self.client.post(exec_url, {"uid_field": "invalid-string"}, follow=True)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
|
@ -21,9 +21,7 @@ class TestHelperView(TestCase):
|
||||
response = self.client.get(
|
||||
reverse("authentik_flows:default-invalidation"),
|
||||
)
|
||||
expected_url = reverse(
|
||||
"authentik_core:if-flow", kwargs={"flow_slug": flow.slug}
|
||||
)
|
||||
expected_url = reverse("authentik_core:if-flow", kwargs={"flow_slug": flow.slug})
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(response.url, expected_url)
|
||||
|
||||
@ -40,8 +38,6 @@ class TestHelperView(TestCase):
|
||||
response = self.client.get(
|
||||
reverse("authentik_flows:default-invalidation"),
|
||||
)
|
||||
expected_url = reverse(
|
||||
"authentik_core:if-flow", kwargs={"flow_slug": flow.slug}
|
||||
)
|
||||
expected_url = reverse("authentik_core:if-flow", kwargs={"flow_slug": flow.slug})
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(response.url, expected_url)
|
||||
|
@ -44,9 +44,7 @@ class FlowBundleEntry:
|
||||
attrs: dict[str, Any]
|
||||
|
||||
@staticmethod
|
||||
def from_model(
|
||||
model: SerializerModel, *extra_identifier_names: str
|
||||
) -> "FlowBundleEntry":
|
||||
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "FlowBundleEntry":
|
||||
"""Convert a SerializerModel instance to a Bundle Entry"""
|
||||
identifiers = {
|
||||
"pk": model.pk,
|
||||
|
@ -6,11 +6,7 @@ from uuid import UUID
|
||||
from django.db.models import Q
|
||||
|
||||
from authentik.flows.models import Flow, FlowStageBinding, Stage
|
||||
from authentik.flows.transfer.common import (
|
||||
DataclassEncoder,
|
||||
FlowBundle,
|
||||
FlowBundleEntry,
|
||||
)
|
||||
from authentik.flows.transfer.common import DataclassEncoder, FlowBundle, FlowBundleEntry
|
||||
from authentik.policies.models import Policy, PolicyBinding
|
||||
from authentik.stages.prompt.models import PromptStage
|
||||
|
||||
@ -37,9 +33,7 @@ class FlowExporter:
|
||||
|
||||
def walk_stages(self) -> Iterator[FlowBundleEntry]:
|
||||
"""Convert all stages attached to self.flow into FlowBundleEntry objects"""
|
||||
stages = (
|
||||
Stage.objects.filter(flow=self.flow).select_related().select_subclasses()
|
||||
)
|
||||
stages = Stage.objects.filter(flow=self.flow).select_related().select_subclasses()
|
||||
for stage in stages:
|
||||
if isinstance(stage, PromptStage):
|
||||
pass
|
||||
@ -56,9 +50,7 @@ class FlowExporter:
|
||||
a direct foreign key to a policy."""
|
||||
# Special case for PromptStage as that has a direct M2M to policy, we have to ensure
|
||||
# all policies referenced in there we also include here
|
||||
prompt_stages = PromptStage.objects.filter(flow=self.flow).values_list(
|
||||
"pk", flat=True
|
||||
)
|
||||
prompt_stages = PromptStage.objects.filter(flow=self.flow).values_list("pk", flat=True)
|
||||
query = Q(bindings__in=self.pbm_uuids) | Q(promptstage__in=prompt_stages)
|
||||
policies = Policy.objects.filter(query).select_related()
|
||||
for policy in policies:
|
||||
@ -67,9 +59,7 @@ class FlowExporter:
|
||||
def walk_policy_bindings(self) -> Iterator[FlowBundleEntry]:
|
||||
"""Walk over all policybindings relative to us. This is run at the end of the export, as
|
||||
we are sure all objects exist now."""
|
||||
bindings = PolicyBinding.objects.filter(
|
||||
target__in=self.pbm_uuids
|
||||
).select_related()
|
||||
bindings = PolicyBinding.objects.filter(target__in=self.pbm_uuids).select_related()
|
||||
for binding in bindings:
|
||||
yield FlowBundleEntry.from_model(binding, "policy", "target", "order")
|
||||
|
||||
|
@ -16,11 +16,7 @@ from rest_framework.serializers import BaseSerializer, Serializer
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
from authentik.flows.models import Flow, FlowStageBinding, Stage
|
||||
from authentik.flows.transfer.common import (
|
||||
EntryInvalidError,
|
||||
FlowBundle,
|
||||
FlowBundleEntry,
|
||||
)
|
||||
from authentik.flows.transfer.common import EntryInvalidError, FlowBundle, FlowBundleEntry
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.policies.models import Policy, PolicyBinding
|
||||
from authentik.stages.prompt.models import Prompt
|
||||
@ -105,9 +101,7 @@ class FlowImporter:
|
||||
if isinstance(value, dict) and "pk" in value:
|
||||
del updated_identifiers[key]
|
||||
updated_identifiers[f"{key}"] = value["pk"]
|
||||
existing_models = model.objects.filter(
|
||||
self.__query_from_identifier(updated_identifiers)
|
||||
)
|
||||
existing_models = model.objects.filter(self.__query_from_identifier(updated_identifiers))
|
||||
|
||||
serializer_kwargs = {}
|
||||
if existing_models.exists():
|
||||
@ -120,9 +114,7 @@ class FlowImporter:
|
||||
)
|
||||
serializer_kwargs["instance"] = model_instance
|
||||
else:
|
||||
self.logger.debug(
|
||||
"initialise new instance", model=model, **updated_identifiers
|
||||
)
|
||||
self.logger.debug("initialise new instance", model=model, **updated_identifiers)
|
||||
full_data = self.__update_pks_for_attrs(entry.attrs)
|
||||
full_data.update(updated_identifiers)
|
||||
serializer_kwargs["data"] = full_data
|
||||
|
@ -38,13 +38,7 @@ from authentik.flows.challenge import (
|
||||
WithUserInfoChallenge,
|
||||
)
|
||||
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
||||
from authentik.flows.models import (
|
||||
ConfigurableStage,
|
||||
Flow,
|
||||
FlowDesignation,
|
||||
FlowStageBinding,
|
||||
Stage,
|
||||
)
|
||||
from authentik.flows.models import ConfigurableStage, Flow, FlowDesignation, FlowStageBinding, Stage
|
||||
from authentik.flows.planner import (
|
||||
PLAN_CONTEXT_PENDING_USER,
|
||||
PLAN_CONTEXT_REDIRECT,
|
||||
@ -155,9 +149,7 @@ class FlowExecutorView(APIView):
|
||||
try:
|
||||
self.plan = self._initiate_plan()
|
||||
except FlowNonApplicableException as exc:
|
||||
self._logger.warning(
|
||||
"f(exec): Flow not applicable to current user", exc=exc
|
||||
)
|
||||
self._logger.warning("f(exec): Flow not applicable to current user", exc=exc)
|
||||
return to_stage_response(self.request, self.handle_invalid_flow(exc))
|
||||
except EmptyFlowException as exc:
|
||||
self._logger.warning("f(exec): Flow is empty", exc=exc)
|
||||
@ -174,9 +166,7 @@ class FlowExecutorView(APIView):
|
||||
# in which case we just delete the plan and invalidate everything
|
||||
next_binding = self.plan.next(self.request)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
self._logger.warning(
|
||||
"f(exec): found incompatible flow plan, invalidating run", exc=exc
|
||||
)
|
||||
self._logger.warning("f(exec): found incompatible flow plan, invalidating run", exc=exc)
|
||||
keys = cache.keys("flow_*")
|
||||
cache.delete_many(keys)
|
||||
return self.stage_invalid()
|
||||
@ -314,9 +304,7 @@ class FlowExecutorView(APIView):
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
kwargs = self.kwargs
|
||||
kwargs.update({"flow_slug": self.flow.slug})
|
||||
return redirect_with_qs(
|
||||
"authentik_api:flow-executor", self.request.GET, **kwargs
|
||||
)
|
||||
return redirect_with_qs("authentik_api:flow-executor", self.request.GET, **kwargs)
|
||||
|
||||
def _flow_done(self) -> HttpResponse:
|
||||
"""User Successfully passed all stages"""
|
||||
@ -350,9 +338,7 @@ class FlowExecutorView(APIView):
|
||||
)
|
||||
kwargs = self.kwargs
|
||||
kwargs.update({"flow_slug": self.flow.slug})
|
||||
return redirect_with_qs(
|
||||
"authentik_api:flow-executor", self.request.GET, **kwargs
|
||||
)
|
||||
return redirect_with_qs("authentik_api:flow-executor", self.request.GET, **kwargs)
|
||||
# User passed all stages
|
||||
self._logger.debug(
|
||||
"f(exec): User passed all stages",
|
||||
@ -408,18 +394,13 @@ class FlowErrorResponse(TemplateResponse):
|
||||
super().__init__(request=request, template="flows/error.html")
|
||||
self.error = error
|
||||
|
||||
def resolve_context(
|
||||
self, context: Optional[dict[str, Any]]
|
||||
) -> Optional[dict[str, Any]]:
|
||||
def resolve_context(self, context: Optional[dict[str, Any]]) -> Optional[dict[str, Any]]:
|
||||
if not context:
|
||||
context = {}
|
||||
context["error"] = self.error
|
||||
if self._request.user and self._request.user.is_authenticated:
|
||||
if (
|
||||
self._request.user.is_superuser
|
||||
or self._request.user.group_attributes().get(
|
||||
USER_ATTRIBUTE_DEBUG, False
|
||||
)
|
||||
if self._request.user.is_superuser or self._request.user.group_attributes().get(
|
||||
USER_ATTRIBUTE_DEBUG, False
|
||||
):
|
||||
context["tb"] = "".join(format_tb(self.error.__traceback__))
|
||||
return context
|
||||
@ -464,9 +445,7 @@ class ToDefaultFlow(View):
|
||||
flow_slug=flow.slug,
|
||||
)
|
||||
del self.request.session[SESSION_KEY_PLAN]
|
||||
return redirect_with_qs(
|
||||
"authentik_core:if-flow", request.GET, flow_slug=flow.slug
|
||||
)
|
||||
return redirect_with_qs("authentik_core:if-flow", request.GET, flow_slug=flow.slug)
|
||||
|
||||
|
||||
def to_stage_response(request: HttpRequest, source: HttpResponse) -> HttpResponse:
|
||||
|
@ -115,9 +115,7 @@ class ConfigLoader:
|
||||
for key, value in os.environ.items():
|
||||
if not key.startswith(ENV_PREFIX):
|
||||
continue
|
||||
relative_key = (
|
||||
key.replace(f"{ENV_PREFIX}_", "", 1).replace("__", ".").lower()
|
||||
)
|
||||
relative_key = key.replace(f"{ENV_PREFIX}_", "", 1).replace("__", ".").lower()
|
||||
# Recursively convert path from a.b.c into outer[a][b][c]
|
||||
current_obj = outer
|
||||
dot_parts = relative_key.split(".")
|
||||
|
@ -20,6 +20,7 @@ redis:
|
||||
cache_db: 0
|
||||
message_queue_db: 1
|
||||
ws_db: 2
|
||||
outpost_session_db: 3
|
||||
cache_timeout: 300
|
||||
cache_timeout_flows: 300
|
||||
cache_timeout_policies: 300
|
||||
|
@ -37,15 +37,11 @@ class InheritanceAutoManager(InheritanceManager):
|
||||
return super().get_queryset().select_subclasses()
|
||||
|
||||
|
||||
class InheritanceForwardManyToOneDescriptor(
|
||||
models.fields.related.ForwardManyToOneDescriptor
|
||||
):
|
||||
class InheritanceForwardManyToOneDescriptor(models.fields.related.ForwardManyToOneDescriptor):
|
||||
"""Forward ManyToOne Descriptor that selects subclass. Requires InheritanceAutoManager."""
|
||||
|
||||
def get_queryset(self, **hints):
|
||||
return self.field.remote_field.model.objects.db_manager(
|
||||
hints=hints
|
||||
).select_subclasses()
|
||||
return self.field.remote_field.model.objects.db_manager(hints=hints).select_subclasses()
|
||||
|
||||
|
||||
class InheritanceForeignKey(models.ForeignKey):
|
||||
|
@ -2,17 +2,13 @@
|
||||
from typing import Optional
|
||||
|
||||
from aioredis.errors import ConnectionClosedError, ReplyError
|
||||
from billiard.exceptions import WorkerLostError
|
||||
from billiard.exceptions import SoftTimeLimitExceeded, WorkerLostError
|
||||
from botocore.client import ClientError
|
||||
from botocore.exceptions import BotoCoreError
|
||||
from celery.exceptions import CeleryError
|
||||
from channels.middleware import BaseMiddleware
|
||||
from channels_redis.core import ChannelFull
|
||||
from django.core.exceptions import (
|
||||
ImproperlyConfigured,
|
||||
SuspiciousOperation,
|
||||
ValidationError,
|
||||
)
|
||||
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation, ValidationError
|
||||
from django.db import InternalError, OperationalError, ProgrammingError
|
||||
from django.http.response import Http404
|
||||
from django_redis.exceptions import ConnectionInterrupted
|
||||
@ -49,6 +45,9 @@ class SentryIgnoredException(Exception):
|
||||
|
||||
def before_send(event: dict, hint: dict) -> Optional[dict]:
|
||||
"""Check if error is database error, and ignore if so"""
|
||||
# pylint: disable=no-name-in-module
|
||||
from psycopg2.errors import Error
|
||||
|
||||
ignored_classes = (
|
||||
# Inbuilt types
|
||||
KeyboardInterrupt,
|
||||
@ -56,6 +55,7 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
||||
OSError,
|
||||
PermissionError,
|
||||
# Django Errors
|
||||
Error,
|
||||
ImproperlyConfigured,
|
||||
OperationalError,
|
||||
InternalError,
|
||||
@ -77,6 +77,7 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
||||
# celery errors
|
||||
WorkerLostError,
|
||||
CeleryError,
|
||||
SoftTimeLimitExceeded,
|
||||
# S3 errors
|
||||
BotoCoreError,
|
||||
ClientError,
|
||||
|
@ -26,7 +26,5 @@ class TestEvaluator(TestCase):
|
||||
def test_is_group_member(self):
|
||||
"""Test expr_is_group_member"""
|
||||
self.assertFalse(
|
||||
BaseEvaluator.expr_is_group_member(
|
||||
User.objects.get(username="akadmin"), name="test"
|
||||
)
|
||||
BaseEvaluator.expr_is_group_member(User.objects.get(username="akadmin"), name="test")
|
||||
)
|
||||
|
@ -1,17 +1,8 @@
|
||||
"""Test HTTP Helpers"""
|
||||
from django.test import RequestFactory, TestCase
|
||||
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP,
|
||||
Token,
|
||||
TokenIntents,
|
||||
User,
|
||||
)
|
||||
from authentik.lib.utils.http import (
|
||||
OUTPOST_REMOTE_IP_HEADER,
|
||||
OUTPOST_TOKEN_HEADER,
|
||||
get_client_ip,
|
||||
)
|
||||
from authentik.core.models import USER_ATTRIBUTE_CAN_OVERRIDE_IP, Token, TokenIntents, User
|
||||
from authentik.lib.utils.http import OUTPOST_REMOTE_IP_HEADER, OUTPOST_TOKEN_HEADER, get_client_ip
|
||||
|
||||
|
||||
class TestHTTP(TestCase):
|
||||
|
@ -9,9 +9,7 @@ class TestSentry(TestCase):
|
||||
|
||||
def test_error_not_sent(self):
|
||||
"""Test SentryIgnoredError not sent"""
|
||||
self.assertIsNone(
|
||||
before_send({}, {"exc_info": (0, SentryIgnoredException(), 0)})
|
||||
)
|
||||
self.assertIsNone(before_send({}, {"exc_info": (0, SentryIgnoredException(), 0)}))
|
||||
|
||||
def test_error_sent(self):
|
||||
"""Test error sent"""
|
||||
|
@ -29,16 +29,9 @@ def _get_outpost_override_ip(request: HttpRequest) -> Optional[str]:
|
||||
"""Get the actual remote IP when set by an outpost. Only
|
||||
allowed when the request is authenticated, by a user with USER_ATTRIBUTE_CAN_OVERRIDE_IP set
|
||||
to outpost"""
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP,
|
||||
Token,
|
||||
TokenIntents,
|
||||
)
|
||||
from authentik.core.models import USER_ATTRIBUTE_CAN_OVERRIDE_IP, Token, TokenIntents
|
||||
|
||||
if (
|
||||
OUTPOST_REMOTE_IP_HEADER not in request.META
|
||||
or OUTPOST_TOKEN_HEADER not in request.META
|
||||
):
|
||||
if OUTPOST_REMOTE_IP_HEADER not in request.META or OUTPOST_TOKEN_HEADER not in request.META:
|
||||
return None
|
||||
fake_ip = request.META[OUTPOST_REMOTE_IP_HEADER]
|
||||
tokens = Token.filter_not_expired(
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user