Compare commits
464 Commits
version/20
...
version-20
Author | SHA1 | Date | |
---|---|---|---|
86a4a7dcee | |||
73fe866cb6 | |||
8b95e9f97a | |||
a3eb72d160 | |||
b418db6ecf | |||
6cb1ab1d2b | |||
ae09dac720 | |||
44c9ad19a7 | |||
554272a927 | |||
acf2af8f66 | |||
b45a442447 | |||
75a720ead1 | |||
615ce287ce | |||
aa8d97249a | |||
2390df17f1 | |||
c022052539 | |||
13c050e2a6 | |||
ef371b3750 | |||
bb1f79347b | |||
6ed0d6d124 | |||
4ed60fe36b | |||
ca9fa79095 | |||
a2408cefcf | |||
145eaa5de3 | |||
1991c930f2 | |||
736f84b670 | |||
d4d5c2675b | |||
be232e2b77 | |||
42389188ad | |||
1f6af8c221 | |||
f4955e3e62 | |||
a8ef3096c1 | |||
14f76b2575 | |||
50065d37b9 | |||
a54670fb91 | |||
51fda51cbf | |||
53d0205e86 | |||
0f56d00959 | |||
b7a6fccdf9 | |||
522f49f48c | |||
e685f11514 | |||
1841b9b4c6 | |||
40e37a5c2c | |||
ac838645a9 | |||
be40d67c4d | |||
700cc06f45 | |||
260a7aac63 | |||
37df054f4c | |||
a3df414f24 | |||
dcaa8d6322 | |||
e03dd70f2f | |||
ceb894039e | |||
a77616e942 | |||
47601a767b | |||
c7a825c393 | |||
181c55aef1 | |||
631b1fcc29 | |||
54f170650a | |||
3bdb551e74 | |||
96b2631ec4 | |||
4fffa6d2cc | |||
e46c70e13d | |||
7d4e7f84f4 | |||
d49640ca9b | |||
ed2cf44471 | |||
5b1d15276a | |||
d9275a3350 | |||
2e81dddc1d | |||
abc73deda0 | |||
becec6b7d8 | |||
ab516f782b | |||
d7b3c545aa | |||
81550d9d1d | |||
72e5768c2f | |||
11cf5fc472 | |||
fedb81571d | |||
37528e1bba | |||
97ef2a6f5f | |||
cc1509cf57 | |||
0dfecc6ae2 | |||
c1e4d78672 | |||
0ab427b5bb | |||
a9f095d1d9 | |||
de17207c68 | |||
d9675695fe | |||
ec7f372fa9 | |||
8a675152e6 | |||
228fe01f92 | |||
b9547ece49 | |||
6e9bc143bd | |||
8cd4bf1be8 | |||
76660e4666 | |||
73b2e2cb82 | |||
d741d6dcf1 | |||
2575fa6db7 | |||
7512c57a2e | |||
e6e2dfd757 | |||
920d1f1b0e | |||
680d4fc20d | |||
4d3b25ea66 | |||
5106c0d0c1 | |||
fd09ade054 | |||
01629fe9e3 | |||
5be97e98e4 | |||
b1fd801ceb | |||
62a939b91d | |||
257ac04be4 | |||
ec5e6c14a2 | |||
1e1d9f1bdd | |||
da1ea51dad | |||
6ee3b8d644 | |||
6155c69b7c | |||
136d40d919 | |||
bb1bb9e22a | |||
05e84b63a2 | |||
7ab55f7afa | |||
f5ec5245c5 | |||
4f4f954693 | |||
c57fbcfd89 | |||
025fc3fe96 | |||
4d079522c4 | |||
08acc7ba41 | |||
7bdd32506e | |||
6283fedcd9 | |||
7a0badc81b | |||
1e134aa446 | |||
27bc5489c5 | |||
2dca45917c | |||
66a4338b48 | |||
a4dfc7e068 | |||
f98a9bed9f | |||
5d1bf4a0af | |||
34635ab928 | |||
fabe1130c1 | |||
8feda9c2b1 | |||
074928cac1 | |||
2308f90270 | |||
13adca0763 | |||
50ded723d1 | |||
e9064509fe | |||
6fdf3ad3e5 | |||
fb60cefb72 | |||
61f7db314a | |||
ef7952cab3 | |||
7e5d8624c8 | |||
2c54be85be | |||
2f8dbe9b97 | |||
cebe44403c | |||
7261017e13 | |||
0b3d33f428 | |||
6f0cbd5fa6 | |||
fb94aefd2f | |||
c4c8390eff | |||
8c2e4478fd | |||
94029ee612 | |||
8db49f9eca | |||
7bd25d90f4 | |||
133528ee90 | |||
578bd8fcb3 | |||
4c2ef95253 | |||
702a59222d | |||
48e2121a75 | |||
61249786ff | |||
008af4ccce | |||
02e3010efe | |||
aca4795e0c | |||
ff0febfecd | |||
4daad4b514 | |||
677bcaadd7 | |||
c6e9ecdd37 | |||
c9ecad6262 | |||
e545b3b401 | |||
fec96ea013 | |||
1ac1c50b67 | |||
d2f189c1d0 | |||
fb33906637 | |||
6d3a94f24f | |||
84f594e658 | |||
1486bd5ab2 | |||
2c00f4da2d | |||
c10a23220b | |||
f20243d545 | |||
903c6422ad | |||
f5ab955536 | |||
3a861f0497 | |||
744f250d05 | |||
83d435bd3b | |||
945cdfe212 | |||
fcc0963fab | |||
2ab4fcd757 | |||
bfe31b15ad | |||
49c4b43f32 | |||
19b1f3a8c1 | |||
80f218a6bf | |||
61aaa90226 | |||
7fdda5a387 | |||
94597fd2ad | |||
09808883f4 | |||
81ecb85a55 | |||
21bfaa3927 | |||
1c9c7be1c0 | |||
5a11dc567e | |||
4a1acd377b | |||
c5b84a91d1 | |||
e77ecda3b8 | |||
4e317c10c5 | |||
eb05a3ddb8 | |||
a22d6a0924 | |||
3f0d67779a | |||
0a937ae8e9 | |||
f8d94f3039 | |||
6bb261ac62 | |||
45f2c5bae7 | |||
5d8c1aa0b0 | |||
0101368369 | |||
4854f81592 | |||
4bed6e02e5 | |||
908f123d0e | |||
256dd24a1e | |||
d4284407f9 | |||
80da5dfc52 | |||
b6edf990e0 | |||
a66dcf9382 | |||
9095a840d5 | |||
72259f6479 | |||
0973c74b9d | |||
c7ed4f7ac1 | |||
3d577cf15e | |||
5474a32573 | |||
a5940b88e3 | |||
ff15716012 | |||
c040b13b29 | |||
4915e980c5 | |||
df362dd9ea | |||
d4e4f93cb4 | |||
3af0de6a00 | |||
4f24d61290 | |||
4c5c4dcf2c | |||
660b5cb6c6 | |||
6ff1ea73a9 | |||
3de224690a | |||
d4624b510a | |||
8856d762d0 | |||
5d1cbf14d1 | |||
6d5207f644 | |||
3b6497cd51 | |||
ff7320b0f8 | |||
e5a393c534 | |||
bb4be944dc | |||
21efee8f44 | |||
f61549a60f | |||
0a7bafd1b2 | |||
b3987c5fa0 | |||
0da043a9fe | |||
f336f204cb | |||
3bfcf18492 | |||
dfafe8b43d | |||
b5d43b15f8 | |||
2ccab75021 | |||
9070df6c26 | |||
a1c8ad55ad | |||
872c05c690 | |||
a9528dc1b5 | |||
0e59ade1f2 | |||
5ac49c695d | |||
3a30ecbe76 | |||
1f838bb2aa | |||
cc42830e23 | |||
593eb959ca | |||
5bb6785ad6 | |||
535c11a729 | |||
a0fa8d8524 | |||
c14025c579 | |||
8bc3db7c90 | |||
eaad564e23 | |||
511a94975b | |||
015810a2fd | |||
e70e6b84c2 | |||
d0b9c9a26f | |||
3e403fa348 | |||
48f4a971ef | |||
6314be14ad | |||
1a072c6c39 | |||
ef2eed0bdf | |||
91227b1e96 | |||
67d68629da | |||
e875db8f66 | |||
055a76393d | |||
0754821628 | |||
fca88d9896 | |||
dfe0404c51 | |||
fa61696b46 | |||
e5773738f4 | |||
cac8539d79 | |||
cf600f6f26 | |||
e194715c3e | |||
787f02d5dc | |||
a0ed01a610 | |||
02ba493759 | |||
a7fea5434d | |||
4fb783e953 | |||
affbf85699 | |||
0d92112a3f | |||
b1ad3ec9db | |||
c0601baca6 | |||
057c5c5e9a | |||
05429ab848 | |||
b66d51a699 | |||
f834bc0ff2 | |||
93fd883d7a | |||
7e080d4d68 | |||
3e3ca22d04 | |||
e741caa6b3 | |||
4343246a41 | |||
3f6f83b4b6 | |||
c63e1c9b87 | |||
f44cf06d22 | |||
3f609b8601 | |||
edd89b44a4 | |||
3e58748862 | |||
7088a6b0e6 | |||
6c880e0e62 | |||
cb1e70be7f | |||
6ba150f737 | |||
131769ea73 | |||
e68adbb30d | |||
f1eef09099 | |||
5ab3c7fa9f | |||
d0cec39a0f | |||
e15f53a39a | |||
25fb995663 | |||
eac658c64f | |||
15e2032493 | |||
c87f6cd9d9 | |||
e758995458 | |||
20c284a188 | |||
b0936ea8f3 | |||
bfc0f4a413 | |||
1a9a90cf6a | |||
00f1a6fa48 | |||
33754a06d2 | |||
69b838e1cf | |||
d5e04a2301 | |||
fbf251280f | |||
eaadf62f01 | |||
8c33e7a7c1 | |||
a7d9a80a28 | |||
2ea5dce8d3 | |||
14bf01efe4 | |||
67b24a60e4 | |||
e6775297cb | |||
4e4e2b36b6 | |||
3189c56fc3 | |||
5b5ea47b7a | |||
caa382f898 | |||
2d63488197 | |||
c1c8e4c8d4 | |||
a0e451c5e5 | |||
eaba8006e6 | |||
39ff202f8c | |||
654e0d6245 | |||
ec04443493 | |||
d247c262af | |||
dff49b2bef | |||
50666a76fb | |||
b51a7f9746 | |||
001dfd9f6c | |||
5e4fbeeb25 | |||
2c910bf6ca | |||
9b11319e81 | |||
40dc4b3fb8 | |||
0e37b98968 | |||
7e132eb014 | |||
49dfb4756e | |||
814758e2aa | |||
5c42dac5e2 | |||
88603fa4f7 | |||
0232c4e162 | |||
11753c1fe1 | |||
f5cc6c67ec | |||
8b8ed3527a | |||
1aa0274e7c | |||
ecd33ca0c1 | |||
e93be0de9a | |||
a5adc4f8ed | |||
a6baed9753 | |||
ceaf832e63 | |||
a6b0b14685 | |||
f679250edd | |||
acc4de2235 | |||
56a8276dbf | |||
6dfe6edbef | |||
6af4bd0d9a | |||
7ee7f6bd6a | |||
f8b8334010 | |||
d4b65dc4b4 | |||
e4bbd3b1c0 | |||
87de5e625d | |||
efbe51673e | |||
a95bea53ea | |||
6021fc0f52 | |||
1415b68ff4 | |||
be6853ac52 | |||
7fd6be5abb | |||
91d6f572a5 | |||
016a9ce34e | |||
8adb95af7f | |||
1dc54775d8 | |||
370ef716b5 | |||
16e56ad9ca | |||
b5b5a9eed3 | |||
8b22e7bcc3 | |||
d48b5b9511 | |||
0eccaa3f1e | |||
67d550a80d | |||
ebb5711c32 | |||
79ec872232 | |||
4284e14ff7 | |||
92a09779d0 | |||
14c621631d | |||
c55f503b9b | |||
a908cad976 | |||
c2586557d8 | |||
01c80a82e2 | |||
0d47654651 | |||
1183095833 | |||
c281b11bdc | |||
61fe45a58c | |||
d43aab479c | |||
7f8383427a | |||
a06d6cf33d | |||
5b7cb205c9 | |||
293a932d20 | |||
fff901ff03 | |||
f47c936295 | |||
88d5aec618 | |||
96ae68cf09 | |||
63b3434b6f | |||
947ecec02b | |||
1c2b452406 | |||
47777529ac | |||
949095c376 | |||
4b112c2799 | |||
291a2516b1 | |||
4dcfd021e2 | |||
ca50848db3 | |||
0bb3e3c558 | |||
e4b25809ab | |||
7bf932f8e2 | |||
99d04528b0 | |||
e48d172036 | |||
c2388137a8 | |||
650e2cbc38 | |||
b32800ea71 | |||
e1c0c0b20c | |||
fe39e39dcd | |||
883f213b03 | |||
538996f617 | |||
2f4c92deb9 | |||
ef335ec083 | |||
07b09df3fe | |||
e70e031a1f | |||
c7ba183dc0 | |||
3ed23a37ea |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2022.1.2
|
current_version = 2022.3.3
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
||||||
|
2
.github/stale.yml
vendored
2
.github/stale.yml
vendored
@ -7,7 +7,7 @@ exemptLabels:
|
|||||||
- pinned
|
- pinned
|
||||||
- security
|
- security
|
||||||
- pr_wanted
|
- pr_wanted
|
||||||
- enhancement/confirmed
|
- enhancement
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
markComment: >
|
markComment: >
|
||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had
|
||||||
|
39
.github/workflows/ci-main.yml
vendored
39
.github/workflows/ci-main.yml
vendored
@ -31,9 +31,9 @@ jobs:
|
|||||||
- pending-migrations
|
- pending-migrations
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v3
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v3.0.0
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
- id: cache-poetry
|
- id: cache-poetry
|
||||||
@ -50,8 +50,8 @@ jobs:
|
|||||||
test-migrations:
|
test-migrations:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v3
|
||||||
- id: cache-poetry
|
- id: cache-poetry
|
||||||
uses: actions/cache@v2.1.7
|
uses: actions/cache@v2.1.7
|
||||||
with:
|
with:
|
||||||
@ -66,10 +66,10 @@ jobs:
|
|||||||
test-migrations-from-stable:
|
test-migrations-from-stable:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
id: ev
|
id: ev
|
||||||
run: |
|
run: |
|
||||||
@ -86,10 +86,9 @@ jobs:
|
|||||||
cp authentik/lib/default.yml local.env.yml
|
cp authentik/lib/default.yml local.env.yml
|
||||||
cp -R .github ..
|
cp -R .github ..
|
||||||
cp -R scripts ..
|
cp -R scripts ..
|
||||||
cp -R poetry.lock pyproject.toml ..
|
|
||||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||||
rm -rf .github/ scripts/
|
rm -rf .github/ scripts/
|
||||||
mv ../.github ../scripts ../poetry.lock ../pyproject.toml .
|
mv ../.github ../scripts .
|
||||||
- name: prepare
|
- name: prepare
|
||||||
env:
|
env:
|
||||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||||
@ -115,8 +114,8 @@ jobs:
|
|||||||
test-unittest:
|
test-unittest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v3
|
||||||
- id: cache-poetry
|
- id: cache-poetry
|
||||||
uses: actions/cache@v2.1.7
|
uses: actions/cache@v2.1.7
|
||||||
with:
|
with:
|
||||||
@ -142,8 +141,8 @@ jobs:
|
|||||||
test-integration:
|
test-integration:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v3
|
||||||
- id: cache-poetry
|
- id: cache-poetry
|
||||||
uses: actions/cache@v2.1.7
|
uses: actions/cache@v2.1.7
|
||||||
with:
|
with:
|
||||||
@ -171,9 +170,9 @@ jobs:
|
|||||||
test-e2e-provider:
|
test-e2e-provider:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v3
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v3.0.0
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
@ -216,9 +215,9 @@ jobs:
|
|||||||
test-e2e-rest:
|
test-e2e-rest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v3
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v3.0.0
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
@ -280,7 +279,7 @@ jobs:
|
|||||||
arch:
|
arch:
|
||||||
- 'linux/amd64'
|
- 'linux/amd64'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1.2.0
|
uses: docker/setup-qemu-action@v1.2.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
|
24
.github/workflows/ci-outpost.yml
vendored
24
.github/workflows/ci-outpost.yml
vendored
@ -14,7 +14,7 @@ jobs:
|
|||||||
lint-golint:
|
lint-golint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v2
|
- uses: actions/setup-go@v2
|
||||||
with:
|
with:
|
||||||
go-version: "^1.17"
|
go-version: "^1.17"
|
||||||
@ -30,9 +30,25 @@ jobs:
|
|||||||
-w /app \
|
-w /app \
|
||||||
golangci/golangci-lint:v1.43 \
|
golangci/golangci-lint:v1.43 \
|
||||||
golangci-lint run -v --timeout 200s
|
golangci-lint run -v --timeout 200s
|
||||||
|
test-unittest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
with:
|
||||||
|
go-version: "^1.17"
|
||||||
|
- name: Get dependencies
|
||||||
|
run: |
|
||||||
|
go get github.com/axw/gocov/gocov
|
||||||
|
go get github.com/AlekSi/gocov-xml
|
||||||
|
go get github.com/jstemmer/go-junit-report
|
||||||
|
- name: Go unittests
|
||||||
|
run: |
|
||||||
|
go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./... | go-junit-report > junit.xml
|
||||||
ci-outpost-mark:
|
ci-outpost-mark:
|
||||||
needs:
|
needs:
|
||||||
- lint-golint
|
- lint-golint
|
||||||
|
- test-unittest
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- run: echo mark
|
- run: echo mark
|
||||||
@ -50,7 +66,7 @@ jobs:
|
|||||||
- 'linux/amd64'
|
- 'linux/amd64'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1.2.0
|
uses: docker/setup-qemu-action@v1.2.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
@ -94,11 +110,11 @@ jobs:
|
|||||||
goos: [linux]
|
goos: [linux]
|
||||||
goarch: [amd64, arm64]
|
goarch: [amd64, arm64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v2
|
- uses: actions/setup-go@v2
|
||||||
with:
|
with:
|
||||||
go-version: "^1.17"
|
go-version: "^1.17"
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v3.0.0
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
|
16
.github/workflows/ci-web.yml
vendored
16
.github/workflows/ci-web.yml
vendored
@ -14,8 +14,8 @@ jobs:
|
|||||||
lint-eslint:
|
lint-eslint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v3.0.0
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
@ -32,8 +32,8 @@ jobs:
|
|||||||
lint-prettier:
|
lint-prettier:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v3.0.0
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
@ -50,8 +50,8 @@ jobs:
|
|||||||
lint-lit-analyse:
|
lint-lit-analyse:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v3.0.0
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
@ -78,8 +78,8 @@ jobs:
|
|||||||
- ci-web-mark
|
- ci-web-mark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v3.0.0
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
|
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@ -28,7 +28,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
|
40
.github/workflows/release-publish.yml
vendored
40
.github/workflows/release-publish.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
|||||||
build-server:
|
build-server:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1.2.0
|
uses: docker/setup-qemu-action@v1.2.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
@ -30,21 +30,12 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
tags: |
|
tags: |
|
||||||
beryju/authentik:2022.1.2,
|
beryju/authentik:2022.3.3,
|
||||||
beryju/authentik:latest,
|
beryju/authentik:latest,
|
||||||
ghcr.io/goauthentik/server:2022.1.2,
|
ghcr.io/goauthentik/server:2022.3.3,
|
||||||
ghcr.io/goauthentik/server:latest
|
ghcr.io/goauthentik/server:latest
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
context: .
|
||||||
- name: Building Docker Image (stable)
|
|
||||||
if: ${{ github.event_name == 'release' && !contains('2022.1.2', 'rc') }}
|
|
||||||
run: |
|
|
||||||
docker pull beryju/authentik:latest
|
|
||||||
docker tag beryju/authentik:latest beryju/authentik:stable
|
|
||||||
docker push beryju/authentik:stable
|
|
||||||
docker pull ghcr.io/goauthentik/server:latest
|
|
||||||
docker tag ghcr.io/goauthentik/server:latest ghcr.io/goauthentik/server:stable
|
|
||||||
docker push ghcr.io/goauthentik/server:stable
|
|
||||||
build-outpost:
|
build-outpost:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
@ -54,7 +45,7 @@ jobs:
|
|||||||
- proxy
|
- proxy
|
||||||
- ldap
|
- ldap
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v2
|
- uses: actions/setup-go@v2
|
||||||
with:
|
with:
|
||||||
go-version: "^1.17"
|
go-version: "^1.17"
|
||||||
@ -78,21 +69,12 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
tags: |
|
tags: |
|
||||||
beryju/authentik-${{ matrix.type }}:2022.1.2,
|
beryju/authentik-${{ matrix.type }}:2022.3.3,
|
||||||
beryju/authentik-${{ matrix.type }}:latest,
|
beryju/authentik-${{ matrix.type }}:latest,
|
||||||
ghcr.io/goauthentik/${{ matrix.type }}:2022.1.2,
|
ghcr.io/goauthentik/${{ matrix.type }}:2022.3.3,
|
||||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||||
file: ${{ matrix.type }}.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
- name: Building Docker Image (stable)
|
|
||||||
if: ${{ github.event_name == 'release' && !contains('2022.1.2', 'rc') }}
|
|
||||||
run: |
|
|
||||||
docker pull beryju/authentik-${{ matrix.type }}:latest
|
|
||||||
docker tag beryju/authentik-${{ matrix.type }}:latest beryju/authentik-${{ matrix.type }}:stable
|
|
||||||
docker push beryju/authentik-${{ matrix.type }}:stable
|
|
||||||
docker pull ghcr.io/goauthentik/${{ matrix.type }}:latest
|
|
||||||
docker tag ghcr.io/goauthentik/${{ matrix.type }}:latest ghcr.io/goauthentik/${{ matrix.type }}:stable
|
|
||||||
docker push ghcr.io/goauthentik/${{ matrix.type }}:stable
|
|
||||||
build-outpost-binary:
|
build-outpost-binary:
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -105,11 +87,11 @@ jobs:
|
|||||||
goos: [linux, darwin]
|
goos: [linux, darwin]
|
||||||
goarch: [amd64, arm64]
|
goarch: [amd64, arm64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v2
|
- uses: actions/setup-go@v2
|
||||||
with:
|
with:
|
||||||
go-version: "^1.17"
|
go-version: "^1.17"
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v3.0.0
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
@ -139,7 +121,7 @@ jobs:
|
|||||||
- build-outpost-binary
|
- build-outpost-binary
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Run test suite in final docker images
|
- name: Run test suite in final docker images
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||||
@ -155,7 +137,7 @@ jobs:
|
|||||||
- build-outpost-binary
|
- build-outpost-binary
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Get static files from docker image
|
- name: Get static files from docker image
|
||||||
run: |
|
run: |
|
||||||
docker pull ghcr.io/goauthentik/server:latest
|
docker pull ghcr.io/goauthentik/server:latest
|
||||||
@ -170,7 +152,7 @@ jobs:
|
|||||||
SENTRY_PROJECT: authentik
|
SENTRY_PROJECT: authentik
|
||||||
SENTRY_URL: https://sentry.beryju.org
|
SENTRY_URL: https://sentry.beryju.org
|
||||||
with:
|
with:
|
||||||
version: authentik@2022.1.2
|
version: authentik@2022.3.3
|
||||||
environment: beryjuorg-prod
|
environment: beryjuorg-prod
|
||||||
sourcemaps: './web/dist'
|
sourcemaps: './web/dist'
|
||||||
url_prefix: '~/static/dist'
|
url_prefix: '~/static/dist'
|
||||||
|
4
.github/workflows/release-tag.yml
vendored
4
.github/workflows/release-tag.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
|||||||
name: Create Release from Tag
|
name: Create Release from Tag
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Pre-release test
|
- name: Pre-release test
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||||
@ -27,7 +27,7 @@ jobs:
|
|||||||
docker-compose run -u root server test
|
docker-compose run -u root server test
|
||||||
- name: Extract version number
|
- name: Extract version number
|
||||||
id: get_version
|
id: get_version
|
||||||
uses: actions/github-script@v5
|
uses: actions/github-script@v6
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
|
4
.github/workflows/translation-compile.yml
vendored
4
.github/workflows/translation-compile.yml
vendored
@ -20,8 +20,8 @@ jobs:
|
|||||||
compile:
|
compile:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v3
|
||||||
- id: cache-poetry
|
- id: cache-poetry
|
||||||
uses: actions/cache@v2.1.7
|
uses: actions/cache@v2.1.7
|
||||||
with:
|
with:
|
||||||
|
4
.github/workflows/web-api-publish.yml
vendored
4
.github/workflows/web-api-publish.yml
vendored
@ -8,9 +8,9 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
# Setup .npmrc file to publish to npm
|
# Setup .npmrc file to publish to npm
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v3.0.0
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
registry-url: 'https://registry.npmjs.org'
|
registry-url: 'https://registry.npmjs.org'
|
||||||
|
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@ -12,7 +12,8 @@
|
|||||||
"totp",
|
"totp",
|
||||||
"webauthn",
|
"webauthn",
|
||||||
"traefik",
|
"traefik",
|
||||||
"passwordless"
|
"passwordless",
|
||||||
|
"kubernetes"
|
||||||
],
|
],
|
||||||
"python.linting.pylintEnabled": true,
|
"python.linting.pylintEnabled": true,
|
||||||
"todo-tree.tree.showCountsInTree": true,
|
"todo-tree.tree.showCountsInTree": true,
|
||||||
|
@ -16,7 +16,7 @@ ENV NODE_ENV=production
|
|||||||
RUN cd /work/web && npm i && npm run build
|
RUN cd /work/web && npm i && npm run build
|
||||||
|
|
||||||
# Stage 3: Build go proxy
|
# Stage 3: Build go proxy
|
||||||
FROM docker.io/golang:1.17.6-bullseye AS builder
|
FROM docker.io/golang:1.18.0-bullseye AS builder
|
||||||
|
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
|
|
||||||
@ -32,7 +32,7 @@ COPY ./go.sum /work/go.sum
|
|||||||
RUN go build -o /work/authentik ./cmd/server/main.go
|
RUN go build -o /work/authentik ./cmd/server/main.go
|
||||||
|
|
||||||
# Stage 4: Run
|
# Stage 4: Run
|
||||||
FROM docker.io/python:3.10.2-slim-bullseye
|
FROM docker.io/python:3.10.3-slim-bullseye
|
||||||
|
|
||||||
LABEL org.opencontainers.image.url https://goauthentik.io
|
LABEL org.opencontainers.image.url https://goauthentik.io
|
||||||
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
||||||
@ -60,9 +60,9 @@ RUN apt-get update && \
|
|||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||||
mkdir -p /backups /certs /media && \
|
mkdir -p /certs /media && \
|
||||||
mkdir -p /authentik/.ssh && \
|
mkdir -p /authentik/.ssh && \
|
||||||
chown authentik:authentik /backups /certs /media /authentik/.ssh
|
chown authentik:authentik /certs /media /authentik/.ssh
|
||||||
|
|
||||||
COPY ./authentik/ /authentik
|
COPY ./authentik/ /authentik
|
||||||
COPY ./pyproject.toml /
|
COPY ./pyproject.toml /
|
||||||
|
13
Makefile
13
Makefile
@ -15,6 +15,9 @@ test-e2e-provider:
|
|||||||
test-e2e-rest:
|
test-e2e-rest:
|
||||||
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
|
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
|
||||||
|
|
||||||
|
test-go:
|
||||||
|
go test -timeout 0 -v -race -cover ./...
|
||||||
|
|
||||||
test:
|
test:
|
||||||
coverage run manage.py test authentik
|
coverage run manage.py test authentik
|
||||||
coverage html
|
coverage html
|
||||||
@ -127,3 +130,13 @@ ci-pyright: ci--meta-debug
|
|||||||
|
|
||||||
ci-pending-migrations: ci--meta-debug
|
ci-pending-migrations: ci--meta-debug
|
||||||
./manage.py makemigrations --check
|
./manage.py makemigrations --check
|
||||||
|
|
||||||
|
install:
|
||||||
|
poetry install
|
||||||
|
cd web && npm i
|
||||||
|
cd website && npm i
|
||||||
|
|
||||||
|
a: install
|
||||||
|
tmux \
|
||||||
|
new-session 'make run' \; \
|
||||||
|
split-window 'make web-watch'
|
||||||
|
@ -57,4 +57,4 @@ DigitalOcean provides development and testing resources for authentik.
|
|||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
Netlify hosts the [goauthentik.io](goauthentik.io) site.
|
Netlify hosts the [goauthentik.io](https://goauthentik.io) site.
|
||||||
|
@ -6,8 +6,8 @@
|
|||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| ---------- | ------------------ |
|
| ---------- | ------------------ |
|
||||||
| 2021.10.x | :white_check_mark: |
|
| 2022.2.x | :white_check_mark: |
|
||||||
| 2021.12.x | :white_check_mark: |
|
| 2022.3.x | :white_check_mark: |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from os import environ
|
from os import environ
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
__version__ = "2022.1.2"
|
__version__ = "2022.3.3"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
@ -12,10 +12,13 @@ from rest_framework.permissions import IsAdminUser
|
|||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.viewsets import ViewSet
|
from rest_framework.viewsets import ViewSet
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
class TaskSerializer(PassiveSerializer):
|
class TaskSerializer(PassiveSerializer):
|
||||||
"""Serialize TaskInfo and TaskResult"""
|
"""Serialize TaskInfo and TaskResult"""
|
||||||
@ -89,6 +92,7 @@ class TaskViewSet(ViewSet):
|
|||||||
try:
|
try:
|
||||||
task_module = import_module(task.task_call_module)
|
task_module = import_module(task.task_call_module)
|
||||||
task_func = getattr(task_module, task.task_call_func)
|
task_func = getattr(task_module, task.task_call_func)
|
||||||
|
LOGGER.debug("Running task", task=task_func)
|
||||||
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
||||||
messages.success(
|
messages.success(
|
||||||
self.request,
|
self.request,
|
||||||
@ -96,6 +100,7 @@ class TaskViewSet(ViewSet):
|
|||||||
)
|
)
|
||||||
return Response(status=204)
|
return Response(status=204)
|
||||||
except (ImportError, AttributeError): # pragma: no cover
|
except (ImportError, AttributeError): # pragma: no cover
|
||||||
|
LOGGER.warning("Failed to run task, remove state", task=task)
|
||||||
# if we get an import error, the module path has probably changed
|
# if we get an import error, the module path has probably changed
|
||||||
task.delete()
|
task.delete()
|
||||||
return Response(status=500)
|
return Response(status=500)
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
"""core Configs API"""
|
"""core Configs API"""
|
||||||
from os import environ, path
|
from os import path
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
|
||||||
from rest_framework.fields import (
|
from rest_framework.fields import (
|
||||||
BooleanField,
|
BooleanField,
|
||||||
CharField,
|
CharField,
|
||||||
@ -28,7 +27,6 @@ class Capabilities(models.TextChoices):
|
|||||||
|
|
||||||
CAN_SAVE_MEDIA = "can_save_media"
|
CAN_SAVE_MEDIA = "can_save_media"
|
||||||
CAN_GEO_IP = "can_geo_ip"
|
CAN_GEO_IP = "can_geo_ip"
|
||||||
CAN_BACKUP = "can_backup"
|
|
||||||
|
|
||||||
|
|
||||||
class ErrorReportingConfigSerializer(PassiveSerializer):
|
class ErrorReportingConfigSerializer(PassiveSerializer):
|
||||||
@ -65,13 +63,6 @@ class ConfigView(APIView):
|
|||||||
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
||||||
if GEOIP_READER.enabled:
|
if GEOIP_READER.enabled:
|
||||||
caps.append(Capabilities.CAN_GEO_IP)
|
caps.append(Capabilities.CAN_GEO_IP)
|
||||||
if SERVICE_HOST_ENV_NAME in environ:
|
|
||||||
# Running in k8s, only s3 backup is supported
|
|
||||||
if CONFIG.y("postgresql.s3_backup"):
|
|
||||||
caps.append(Capabilities.CAN_BACKUP)
|
|
||||||
else:
|
|
||||||
# Running in compose, backup is always supported
|
|
||||||
caps.append(Capabilities.CAN_BACKUP)
|
|
||||||
return caps
|
return caps
|
||||||
|
|
||||||
@extend_schema(responses={200: ConfigSerializer(many=False)})
|
@extend_schema(responses={200: ConfigSerializer(many=False)})
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
"""Application API Views"""
|
"""Application API Views"""
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
from django.http.response import HttpResponseBadRequest
|
from django.http.response import HttpResponseBadRequest
|
||||||
@ -7,7 +9,7 @@ from drf_spectacular.types import OpenApiTypes
|
|||||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||||
from guardian.shortcuts import get_objects_for_user
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import ReadOnlyField
|
from rest_framework.fields import ReadOnlyField, SerializerMethodField
|
||||||
from rest_framework.parsers import MultiPartParser
|
from rest_framework.parsers import MultiPartParser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@ -39,11 +41,16 @@ def user_app_cache_key(user_pk: str) -> str:
|
|||||||
class ApplicationSerializer(ModelSerializer):
|
class ApplicationSerializer(ModelSerializer):
|
||||||
"""Application Serializer"""
|
"""Application Serializer"""
|
||||||
|
|
||||||
launch_url = ReadOnlyField(source="get_launch_url")
|
launch_url = SerializerMethodField()
|
||||||
provider_obj = ProviderSerializer(source="get_provider", required=False)
|
provider_obj = ProviderSerializer(source="get_provider", required=False)
|
||||||
|
|
||||||
meta_icon = ReadOnlyField(source="get_meta_icon")
|
meta_icon = ReadOnlyField(source="get_meta_icon")
|
||||||
|
|
||||||
|
def get_launch_url(self, app: Application) -> Optional[str]:
|
||||||
|
"""Allow formatting of launch URL"""
|
||||||
|
user = self.context["request"].user
|
||||||
|
return app.get_launch_url(user)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Application
|
model = Application
|
||||||
|
@ -3,7 +3,7 @@ from typing import Any
|
|||||||
|
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import assign_perm, get_anonymous_user
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.fields import CharField
|
from rest_framework.fields import CharField
|
||||||
@ -95,10 +95,12 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
|||||||
|
|
||||||
def perform_create(self, serializer: TokenSerializer):
|
def perform_create(self, serializer: TokenSerializer):
|
||||||
if not self.request.user.is_superuser:
|
if not self.request.user.is_superuser:
|
||||||
return serializer.save(
|
instance = serializer.save(
|
||||||
user=self.request.user,
|
user=self.request.user,
|
||||||
expiring=self.request.user.attributes.get(USER_ATTRIBUTE_TOKEN_EXPIRING, True),
|
expiring=self.request.user.attributes.get(USER_ATTRIBUTE_TOKEN_EXPIRING, True),
|
||||||
)
|
)
|
||||||
|
assign_perm("authentik_core.view_token_key", self.request.user, instance)
|
||||||
|
return instance
|
||||||
return super().perform_create(serializer)
|
return super().perform_create(serializer)
|
||||||
|
|
||||||
@permission_required("authentik_core.view_token_key")
|
@permission_required("authentik_core.view_token_key")
|
||||||
|
@ -24,7 +24,6 @@ from drf_spectacular.utils import (
|
|||||||
from guardian.shortcuts import get_anonymous_user, get_objects_for_user
|
from guardian.shortcuts import get_anonymous_user, get_objects_for_user
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import CharField, DictField, JSONField, SerializerMethodField
|
from rest_framework.fields import CharField, DictField, JSONField, SerializerMethodField
|
||||||
from rest_framework.permissions import IsAuthenticated
|
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import (
|
from rest_framework.serializers import (
|
||||||
@ -46,9 +45,6 @@ from authentik.core.api.used_by import UsedByMixin
|
|||||||
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
|
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
|
||||||
from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER
|
from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER
|
||||||
from authentik.core.models import (
|
from authentik.core.models import (
|
||||||
USER_ATTRIBUTE_CHANGE_EMAIL,
|
|
||||||
USER_ATTRIBUTE_CHANGE_NAME,
|
|
||||||
USER_ATTRIBUTE_CHANGE_USERNAME,
|
|
||||||
USER_ATTRIBUTE_SA,
|
USER_ATTRIBUTE_SA,
|
||||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||||
Group,
|
Group,
|
||||||
@ -57,7 +53,6 @@ from authentik.core.models import (
|
|||||||
User,
|
User,
|
||||||
)
|
)
|
||||||
from authentik.events.models import EventAction
|
from authentik.events.models import EventAction
|
||||||
from authentik.lib.config import CONFIG
|
|
||||||
from authentik.stages.email.models import EmailStage
|
from authentik.stages.email.models import EmailStage
|
||||||
from authentik.stages.email.tasks import send_mails
|
from authentik.stages.email.tasks import send_mails
|
||||||
from authentik.stages.email.utils import TemplateEmailMessage
|
from authentik.stages.email.utils import TemplateEmailMessage
|
||||||
@ -126,43 +121,6 @@ class UserSelfSerializer(ModelSerializer):
|
|||||||
"pk": group.pk,
|
"pk": group.pk,
|
||||||
}
|
}
|
||||||
|
|
||||||
def validate_email(self, email: str):
|
|
||||||
"""Check if the user is allowed to change their email"""
|
|
||||||
if self.instance.group_attributes().get(
|
|
||||||
USER_ATTRIBUTE_CHANGE_EMAIL, CONFIG.y_bool("default_user_change_email", True)
|
|
||||||
):
|
|
||||||
return email
|
|
||||||
if email != self.instance.email:
|
|
||||||
raise ValidationError("Not allowed to change email.")
|
|
||||||
return email
|
|
||||||
|
|
||||||
def validate_name(self, name: str):
|
|
||||||
"""Check if the user is allowed to change their name"""
|
|
||||||
if self.instance.group_attributes().get(
|
|
||||||
USER_ATTRIBUTE_CHANGE_NAME, CONFIG.y_bool("default_user_change_name", True)
|
|
||||||
):
|
|
||||||
return name
|
|
||||||
if name != self.instance.name:
|
|
||||||
raise ValidationError("Not allowed to change name.")
|
|
||||||
return name
|
|
||||||
|
|
||||||
def validate_username(self, username: str):
|
|
||||||
"""Check if the user is allowed to change their username"""
|
|
||||||
if self.instance.group_attributes().get(
|
|
||||||
USER_ATTRIBUTE_CHANGE_USERNAME, CONFIG.y_bool("default_user_change_username", True)
|
|
||||||
):
|
|
||||||
return username
|
|
||||||
if username != self.instance.username:
|
|
||||||
raise ValidationError("Not allowed to change username.")
|
|
||||||
return username
|
|
||||||
|
|
||||||
def save(self, **kwargs):
|
|
||||||
if self.instance:
|
|
||||||
attributes: dict = self.instance.attributes
|
|
||||||
attributes.update(self.validated_data.get("attributes", {}))
|
|
||||||
self.validated_data["attributes"] = attributes
|
|
||||||
return super().save(**kwargs)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
@ -241,6 +199,7 @@ class UsersFilter(FilterSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
|
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
|
||||||
|
uuid = CharFilter(field_name="uuid")
|
||||||
|
|
||||||
groups_by_name = ModelMultipleChoiceFilter(
|
groups_by_name = ModelMultipleChoiceFilter(
|
||||||
field_name="ak_groups__name",
|
field_name="ak_groups__name",
|
||||||
@ -290,7 +249,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
queryset = User.objects.none()
|
queryset = User.objects.none()
|
||||||
ordering = ["username"]
|
ordering = ["username"]
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
search_fields = ["username", "name", "is_active", "email"]
|
search_fields = ["username", "name", "is_active", "email", "uuid"]
|
||||||
filterset_class = UsersFilter
|
filterset_class = UsersFilter
|
||||||
|
|
||||||
def get_queryset(self): # pragma: no cover
|
def get_queryset(self): # pragma: no cover
|
||||||
@ -407,26 +366,6 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
update_session_auth_hash(self.request, user)
|
update_session_auth_hash(self.request, user)
|
||||||
return Response(status=204)
|
return Response(status=204)
|
||||||
|
|
||||||
@extend_schema(request=UserSelfSerializer, responses={200: SessionUserSerializer(many=False)})
|
|
||||||
@action(
|
|
||||||
methods=["PUT"],
|
|
||||||
detail=False,
|
|
||||||
pagination_class=None,
|
|
||||||
filter_backends=[],
|
|
||||||
permission_classes=[IsAuthenticated],
|
|
||||||
)
|
|
||||||
def update_self(self, request: Request) -> Response:
|
|
||||||
"""Allow users to change information on their own profile"""
|
|
||||||
data = UserSelfSerializer(instance=User.objects.get(pk=request.user.pk), data=request.data)
|
|
||||||
if not data.is_valid():
|
|
||||||
return Response(data.errors, status=400)
|
|
||||||
new_user = data.save()
|
|
||||||
# If we're impersonating, we need to update that user object
|
|
||||||
# since it caches the full object
|
|
||||||
if SESSION_IMPERSONATE_USER in request.session:
|
|
||||||
request.session[SESSION_IMPERSONATE_USER] = new_user
|
|
||||||
return Response({"user": data.data})
|
|
||||||
|
|
||||||
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
|
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
|
||||||
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
|
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
|
@ -30,7 +30,7 @@ class InbuiltBackend(ModelBackend):
|
|||||||
return
|
return
|
||||||
# Since we can't directly pass other variables to signals, and we want to log the method
|
# Since we can't directly pass other variables to signals, and we want to log the method
|
||||||
# and the token used, we assume we're running in a flow and set a variable in the context
|
# and the token used, we assume we're running in a flow and set a variable in the context
|
||||||
flow_plan: FlowPlan = request.session[SESSION_KEY_PLAN]
|
flow_plan: FlowPlan = request.session.get(SESSION_KEY_PLAN, FlowPlan(""))
|
||||||
flow_plan.context[PLAN_CONTEXT_METHOD] = method
|
flow_plan.context[PLAN_CONTEXT_METHOD] = method
|
||||||
flow_plan.context[PLAN_CONTEXT_METHOD_ARGS] = cleanse_dict(sanitize_dict(kwargs))
|
flow_plan.context[PLAN_CONTEXT_METHOD_ARGS] = cleanse_dict(sanitize_dict(kwargs))
|
||||||
request.session[SESSION_KEY_PLAN] = flow_plan
|
request.session[SESSION_KEY_PLAN] = flow_plan
|
||||||
|
@ -14,7 +14,7 @@ from django.db import models
|
|||||||
from django.db.models import Q, QuerySet, options
|
from django.db.models import Q, QuerySet, options
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from django.templatetags.static import static
|
from django.templatetags.static import static
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import SimpleLazyObject, cached_property
|
||||||
from django.utils.html import escape
|
from django.utils.html import escape
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
@ -284,13 +284,24 @@ class Application(PolicyBindingModel):
|
|||||||
return self.meta_icon.name
|
return self.meta_icon.name
|
||||||
return self.meta_icon.url
|
return self.meta_icon.url
|
||||||
|
|
||||||
def get_launch_url(self) -> Optional[str]:
|
def get_launch_url(self, user: Optional["User"] = None) -> Optional[str]:
|
||||||
"""Get launch URL if set, otherwise attempt to get launch URL based on provider."""
|
"""Get launch URL if set, otherwise attempt to get launch URL based on provider."""
|
||||||
if self.meta_launch_url:
|
url = None
|
||||||
return self.meta_launch_url
|
|
||||||
if provider := self.get_provider():
|
if provider := self.get_provider():
|
||||||
return provider.launch_url
|
url = provider.launch_url
|
||||||
return None
|
if self.meta_launch_url:
|
||||||
|
url = self.meta_launch_url
|
||||||
|
if user and url:
|
||||||
|
if isinstance(user, SimpleLazyObject):
|
||||||
|
user._setup()
|
||||||
|
user = user._wrapped
|
||||||
|
try:
|
||||||
|
return url % user.__dict__
|
||||||
|
# pylint: disable=broad-except
|
||||||
|
except Exception as exc:
|
||||||
|
LOGGER.warning("Failed to format launch url", exc=exc)
|
||||||
|
return url
|
||||||
|
return url
|
||||||
|
|
||||||
def get_provider(self) -> Optional[Provider]:
|
def get_provider(self) -> Optional[Provider]:
|
||||||
"""Get casted provider instance"""
|
"""Get casted provider instance"""
|
||||||
|
@ -1,17 +1,7 @@
|
|||||||
"""authentik core tasks"""
|
"""authentik core tasks"""
|
||||||
from datetime import datetime
|
|
||||||
from io import StringIO
|
|
||||||
from os import environ
|
|
||||||
|
|
||||||
from boto3.exceptions import Boto3Error
|
|
||||||
from botocore.exceptions import BotoCoreError, ClientError
|
|
||||||
from dbbackup.db.exceptions import CommandConnectorError
|
|
||||||
from django.contrib.humanize.templatetags.humanize import naturaltime
|
|
||||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||||
from django.core import management
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.models import AuthenticatedSession, ExpiringModel
|
from authentik.core.models import AuthenticatedSession, ExpiringModel
|
||||||
@ -21,7 +11,6 @@ from authentik.events.monitored_tasks import (
|
|||||||
TaskResultStatus,
|
TaskResultStatus,
|
||||||
prefill_task,
|
prefill_task,
|
||||||
)
|
)
|
||||||
from authentik.lib.config import CONFIG
|
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -53,46 +42,3 @@ def clean_expired_models(self: MonitoredTask):
|
|||||||
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
|
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
|
||||||
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")
|
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")
|
||||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, messages))
|
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, messages))
|
||||||
|
|
||||||
|
|
||||||
def should_backup() -> bool:
|
|
||||||
"""Check if we should be doing backups"""
|
|
||||||
if SERVICE_HOST_ENV_NAME in environ and not CONFIG.y("postgresql.s3_backup.bucket"):
|
|
||||||
LOGGER.info("Running in k8s and s3 backups are not configured, skipping")
|
|
||||||
return False
|
|
||||||
if not CONFIG.y_bool("postgresql.backup.enabled"):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
|
||||||
@prefill_task
|
|
||||||
def backup_database(self: MonitoredTask): # pragma: no cover
|
|
||||||
"""Database backup"""
|
|
||||||
self.result_timeout_hours = 25
|
|
||||||
if not should_backup():
|
|
||||||
self.set_status(TaskResult(TaskResultStatus.UNKNOWN, ["Backups are not configured."]))
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
start = datetime.now()
|
|
||||||
out = StringIO()
|
|
||||||
management.call_command("dbbackup", quiet=True, stdout=out)
|
|
||||||
self.set_status(
|
|
||||||
TaskResult(
|
|
||||||
TaskResultStatus.SUCCESSFUL,
|
|
||||||
[
|
|
||||||
f"Successfully finished database backup {naturaltime(start)} {out.getvalue()}",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
LOGGER.info("Successfully backed up database.")
|
|
||||||
except (
|
|
||||||
IOError,
|
|
||||||
BotoCoreError,
|
|
||||||
ClientError,
|
|
||||||
Boto3Error,
|
|
||||||
PermissionError,
|
|
||||||
CommandConnectorError,
|
|
||||||
ValueError,
|
|
||||||
) as exc:
|
|
||||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
{% block head_before %}
|
{% block head_before %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
||||||
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}">
|
||||||
<script src="{% static 'dist/poly.js' %}" type="module"></script>
|
<script src="{% static 'dist/poly.js' %}" type="module"></script>
|
||||||
{% block head %}
|
{% block head %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
@ -10,8 +10,8 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
<ak-message-container></ak-message-container>
|
<ak-message-container data-refresh-on-locale="true"></ak-message-container>
|
||||||
<ak-interface-admin>
|
<ak-interface-admin data-refresh-on-locale="true">
|
||||||
<section class="ak-static-page pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
<section class="ak-static-page pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
||||||
<div class="pf-c-empty-state" style="height: 100vh;">
|
<div class="pf-c-empty-state" style="height: 100vh;">
|
||||||
<div class="pf-c-empty-state__content">
|
<div class="pf-c-empty-state__content">
|
||||||
|
@ -20,8 +20,8 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
<ak-message-container></ak-message-container>
|
<ak-message-container data-refresh-on-locale="true"></ak-message-container>
|
||||||
<ak-flow-executor>
|
<ak-flow-executor data-refresh-on-locale="true">
|
||||||
<section class="ak-static-page pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
<section class="ak-static-page pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
||||||
<div class="pf-c-empty-state" style="height: 100vh;">
|
<div class="pf-c-empty-state" style="height: 100vh;">
|
||||||
<div class="pf-c-empty-state__content">
|
<div class="pf-c-empty-state__content">
|
||||||
|
@ -10,8 +10,8 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
<ak-message-container></ak-message-container>
|
<ak-message-container data-refresh-on-locale="true"></ak-message-container>
|
||||||
<ak-interface-user>
|
<ak-interface-user data-refresh-on-locale="true">
|
||||||
<section class="ak-static-page pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
<section class="ak-static-page pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
||||||
<div class="pf-c-empty-state" style="height: 100vh;">
|
<div class="pf-c-empty-state" style="height: 100vh;">
|
||||||
<div class="pf-c-empty-state__content">
|
<div class="pf-c-empty-state__content">
|
||||||
|
@ -4,8 +4,10 @@ from rest_framework.test import APITestCase
|
|||||||
|
|
||||||
from authentik.core.models import Application
|
from authentik.core.models import Application
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
|
from authentik.flows.models import Flow
|
||||||
from authentik.policies.dummy.models import DummyPolicy
|
from authentik.policies.dummy.models import DummyPolicy
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
|
from authentik.providers.oauth2.models import OAuth2Provider
|
||||||
|
|
||||||
|
|
||||||
class TestApplicationsAPI(APITestCase):
|
class TestApplicationsAPI(APITestCase):
|
||||||
@ -13,7 +15,20 @@ class TestApplicationsAPI(APITestCase):
|
|||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.user = create_test_admin_user()
|
self.user = create_test_admin_user()
|
||||||
self.allowed = Application.objects.create(name="allowed", slug="allowed")
|
self.provider = OAuth2Provider.objects.create(
|
||||||
|
name="test",
|
||||||
|
redirect_uris="http://some-other-domain",
|
||||||
|
authorization_flow=Flow.objects.create(
|
||||||
|
name="test",
|
||||||
|
slug="test",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.allowed = Application.objects.create(
|
||||||
|
name="allowed",
|
||||||
|
slug="allowed",
|
||||||
|
meta_launch_url="https://goauthentik.io/%(username)s",
|
||||||
|
provider=self.provider,
|
||||||
|
)
|
||||||
self.denied = Application.objects.create(name="denied", slug="denied")
|
self.denied = Application.objects.create(name="denied", slug="denied")
|
||||||
PolicyBinding.objects.create(
|
PolicyBinding.objects.create(
|
||||||
target=self.denied,
|
target=self.denied,
|
||||||
@ -62,10 +77,21 @@ class TestApplicationsAPI(APITestCase):
|
|||||||
"pk": str(self.allowed.pk),
|
"pk": str(self.allowed.pk),
|
||||||
"name": "allowed",
|
"name": "allowed",
|
||||||
"slug": "allowed",
|
"slug": "allowed",
|
||||||
"provider": None,
|
"provider": self.provider.pk,
|
||||||
"provider_obj": None,
|
"provider_obj": {
|
||||||
"launch_url": None,
|
"assigned_application_name": "allowed",
|
||||||
"meta_launch_url": "",
|
"assigned_application_slug": "allowed",
|
||||||
|
"authorization_flow": str(self.provider.authorization_flow.pk),
|
||||||
|
"component": "ak-provider-oauth2-form",
|
||||||
|
"meta_model_name": "authentik_providers_oauth2.oauth2provider",
|
||||||
|
"name": self.provider.name,
|
||||||
|
"pk": self.provider.pk,
|
||||||
|
"property_mappings": [],
|
||||||
|
"verbose_name": "OAuth2/OpenID Provider",
|
||||||
|
"verbose_name_plural": "OAuth2/OpenID Providers",
|
||||||
|
},
|
||||||
|
"launch_url": f"https://goauthentik.io/{self.user.username}",
|
||||||
|
"meta_launch_url": "https://goauthentik.io/%(username)s",
|
||||||
"meta_icon": None,
|
"meta_icon": None,
|
||||||
"meta_description": "",
|
"meta_description": "",
|
||||||
"meta_publisher": "",
|
"meta_publisher": "",
|
||||||
@ -98,10 +124,21 @@ class TestApplicationsAPI(APITestCase):
|
|||||||
"pk": str(self.allowed.pk),
|
"pk": str(self.allowed.pk),
|
||||||
"name": "allowed",
|
"name": "allowed",
|
||||||
"slug": "allowed",
|
"slug": "allowed",
|
||||||
"provider": None,
|
"provider": self.provider.pk,
|
||||||
"provider_obj": None,
|
"provider_obj": {
|
||||||
"launch_url": None,
|
"assigned_application_name": "allowed",
|
||||||
"meta_launch_url": "",
|
"assigned_application_slug": "allowed",
|
||||||
|
"authorization_flow": str(self.provider.authorization_flow.pk),
|
||||||
|
"component": "ak-provider-oauth2-form",
|
||||||
|
"meta_model_name": "authentik_providers_oauth2.oauth2provider",
|
||||||
|
"name": self.provider.name,
|
||||||
|
"pk": self.provider.pk,
|
||||||
|
"property_mappings": [],
|
||||||
|
"verbose_name": "OAuth2/OpenID Provider",
|
||||||
|
"verbose_name_plural": "OAuth2/OpenID Providers",
|
||||||
|
},
|
||||||
|
"launch_url": f"https://goauthentik.io/{self.user.username}",
|
||||||
|
"meta_launch_url": "https://goauthentik.io/%(username)s",
|
||||||
"meta_icon": None,
|
"meta_icon": None,
|
||||||
"meta_description": "",
|
"meta_description": "",
|
||||||
"meta_publisher": "",
|
"meta_publisher": "",
|
||||||
|
67
authentik/core/tests/test_applications_views.py
Normal file
67
authentik/core/tests/test_applications_views.py
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
"""Test Applications API"""
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
from django.urls import reverse
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user, create_test_tenant
|
||||||
|
from authentik.flows.models import Flow, FlowDesignation
|
||||||
|
from authentik.flows.tests import FlowTestCase
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
|
|
||||||
|
class TestApplicationsViews(FlowTestCase):
|
||||||
|
"""Test applications Views"""
|
||||||
|
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.user = create_test_admin_user()
|
||||||
|
self.allowed = Application.objects.create(
|
||||||
|
name="allowed", slug="allowed", meta_launch_url="https://goauthentik.io/%(username)s"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_check_redirect(self):
|
||||||
|
"""Test redirect"""
|
||||||
|
empty_flow = Flow.objects.create(
|
||||||
|
name="foo",
|
||||||
|
slug="foo",
|
||||||
|
designation=FlowDesignation.AUTHENTICATION,
|
||||||
|
)
|
||||||
|
tenant: Tenant = create_test_tenant()
|
||||||
|
tenant.flow_authentication = empty_flow
|
||||||
|
tenant.save()
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_core:application-launch",
|
||||||
|
kwargs={"application_slug": self.allowed.slug},
|
||||||
|
),
|
||||||
|
follow=True,
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
with patch(
|
||||||
|
"authentik.flows.stage.StageView.get_pending_user", MagicMock(return_value=self.user)
|
||||||
|
):
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": empty_flow.slug})
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertStageRedirects(response, f"https://goauthentik.io/{self.user.username}")
|
||||||
|
|
||||||
|
def test_check_redirect_auth(self):
|
||||||
|
"""Test redirect"""
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
empty_flow = Flow.objects.create(
|
||||||
|
name="foo",
|
||||||
|
slug="foo",
|
||||||
|
designation=FlowDesignation.AUTHENTICATION,
|
||||||
|
)
|
||||||
|
tenant: Tenant = create_test_tenant()
|
||||||
|
tenant.flow_authentication = empty_flow
|
||||||
|
tenant.save()
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_core:application-launch",
|
||||||
|
kwargs={"application_slug": self.allowed.slug},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 302)
|
||||||
|
self.assertEqual(response.url, f"https://goauthentik.io/{self.user.username}")
|
@ -30,6 +30,7 @@ class TestTokenAPI(APITestCase):
|
|||||||
self.assertEqual(token.user, self.user)
|
self.assertEqual(token.user, self.user)
|
||||||
self.assertEqual(token.intent, TokenIntents.INTENT_API)
|
self.assertEqual(token.intent, TokenIntents.INTENT_API)
|
||||||
self.assertEqual(token.expiring, True)
|
self.assertEqual(token.expiring, True)
|
||||||
|
self.assertTrue(self.user.has_perm("authentik_core.view_token_key", token))
|
||||||
|
|
||||||
def test_token_create_invalid(self):
|
def test_token_create_invalid(self):
|
||||||
"""Test token creation endpoint (invalid data)"""
|
"""Test token creation endpoint (invalid data)"""
|
||||||
|
@ -2,12 +2,7 @@
|
|||||||
from django.urls.base import reverse
|
from django.urls.base import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import (
|
from authentik.core.models import User
|
||||||
USER_ATTRIBUTE_CHANGE_EMAIL,
|
|
||||||
USER_ATTRIBUTE_CHANGE_NAME,
|
|
||||||
USER_ATTRIBUTE_CHANGE_USERNAME,
|
|
||||||
User,
|
|
||||||
)
|
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant
|
||||||
from authentik.flows.models import FlowDesignation
|
from authentik.flows.models import FlowDesignation
|
||||||
from authentik.lib.generators import generate_key
|
from authentik.lib.generators import generate_key
|
||||||
@ -22,51 +17,6 @@ class TestUsersAPI(APITestCase):
|
|||||||
self.admin = create_test_admin_user()
|
self.admin = create_test_admin_user()
|
||||||
self.user = User.objects.create(username="test-user")
|
self.user = User.objects.create(username="test-user")
|
||||||
|
|
||||||
def test_update_self(self):
|
|
||||||
"""Test update_self"""
|
|
||||||
self.admin.attributes["foo"] = "bar"
|
|
||||||
self.admin.save()
|
|
||||||
self.admin.refresh_from_db()
|
|
||||||
self.client.force_login(self.admin)
|
|
||||||
response = self.client.put(
|
|
||||||
reverse("authentik_api:user-update-self"), data={"username": "foo", "name": "foo"}
|
|
||||||
)
|
|
||||||
self.admin.refresh_from_db()
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
self.assertEqual(self.admin.attributes["foo"], "bar")
|
|
||||||
self.assertEqual(self.admin.username, "foo")
|
|
||||||
self.assertEqual(self.admin.name, "foo")
|
|
||||||
|
|
||||||
def test_update_self_name_denied(self):
|
|
||||||
"""Test update_self"""
|
|
||||||
self.admin.attributes[USER_ATTRIBUTE_CHANGE_NAME] = False
|
|
||||||
self.admin.save()
|
|
||||||
self.client.force_login(self.admin)
|
|
||||||
response = self.client.put(
|
|
||||||
reverse("authentik_api:user-update-self"), data={"username": "foo", "name": "foo"}
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 400)
|
|
||||||
|
|
||||||
def test_update_self_username_denied(self):
|
|
||||||
"""Test update_self"""
|
|
||||||
self.admin.attributes[USER_ATTRIBUTE_CHANGE_USERNAME] = False
|
|
||||||
self.admin.save()
|
|
||||||
self.client.force_login(self.admin)
|
|
||||||
response = self.client.put(
|
|
||||||
reverse("authentik_api:user-update-self"), data={"username": "foo", "name": "foo"}
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 400)
|
|
||||||
|
|
||||||
def test_update_self_email_denied(self):
|
|
||||||
"""Test update_self"""
|
|
||||||
self.admin.attributes[USER_ATTRIBUTE_CHANGE_EMAIL] = False
|
|
||||||
self.admin.save()
|
|
||||||
self.client.force_login(self.admin)
|
|
||||||
response = self.client.put(
|
|
||||||
reverse("authentik_api:user-update-self"), data={"email": "foo", "name": "foo"}
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 400)
|
|
||||||
|
|
||||||
def test_metrics(self):
|
def test_metrics(self):
|
||||||
"""Test user's metrics"""
|
"""Test user's metrics"""
|
||||||
self.client.force_login(self.admin)
|
self.client.force_login(self.admin)
|
||||||
|
@ -29,4 +29,4 @@ class UserSettingSerializer(PassiveSerializer):
|
|||||||
component = CharField()
|
component = CharField()
|
||||||
title = CharField()
|
title = CharField()
|
||||||
configure_url = CharField(required=False)
|
configure_url = CharField(required=False)
|
||||||
icon_url = CharField()
|
icon_url = CharField(required=False)
|
||||||
|
@ -5,7 +5,7 @@ from django.views.decorators.csrf import ensure_csrf_cookie
|
|||||||
from django.views.generic import RedirectView
|
from django.views.generic import RedirectView
|
||||||
from django.views.generic.base import TemplateView
|
from django.views.generic.base import TemplateView
|
||||||
|
|
||||||
from authentik.core.views import impersonate
|
from authentik.core.views import apps, impersonate
|
||||||
from authentik.core.views.interface import FlowInterfaceView
|
from authentik.core.views.interface import FlowInterfaceView
|
||||||
from authentik.core.views.session import EndSessionView
|
from authentik.core.views.session import EndSessionView
|
||||||
|
|
||||||
@ -15,6 +15,12 @@ urlpatterns = [
|
|||||||
login_required(RedirectView.as_view(pattern_name="authentik_core:if-user")),
|
login_required(RedirectView.as_view(pattern_name="authentik_core:if-user")),
|
||||||
name="root-redirect",
|
name="root-redirect",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
# We have to use this format since everything else uses applications/o or applications/saml
|
||||||
|
"application/launch/<slug:application_slug>/",
|
||||||
|
apps.RedirectToAppLaunch.as_view(),
|
||||||
|
name="application-launch",
|
||||||
|
),
|
||||||
# Impersonation
|
# Impersonation
|
||||||
path(
|
path(
|
||||||
"-/impersonation/<int:user_id>/",
|
"-/impersonation/<int:user_id>/",
|
||||||
|
75
authentik/core/views/apps.py
Normal file
75
authentik/core/views/apps.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
"""app views"""
|
||||||
|
from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from django.views import View
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.flows.challenge import (
|
||||||
|
ChallengeResponse,
|
||||||
|
ChallengeTypes,
|
||||||
|
HttpChallengeResponse,
|
||||||
|
RedirectChallenge,
|
||||||
|
)
|
||||||
|
from authentik.flows.models import in_memory_stage
|
||||||
|
from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, FlowPlanner
|
||||||
|
from authentik.flows.stage import ChallengeStageView
|
||||||
|
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||||
|
from authentik.lib.utils.urls import redirect_with_qs
|
||||||
|
from authentik.stages.consent.stage import (
|
||||||
|
PLAN_CONTEXT_CONSENT_HEADER,
|
||||||
|
PLAN_CONTEXT_CONSENT_PERMISSIONS,
|
||||||
|
)
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
|
|
||||||
|
class RedirectToAppLaunch(View):
|
||||||
|
"""Application launch view, redirect to the launch URL"""
|
||||||
|
|
||||||
|
def dispatch(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
||||||
|
app = get_object_or_404(Application, slug=application_slug)
|
||||||
|
# Check here if the application has any launch URL set, if not 404
|
||||||
|
launch = app.get_launch_url()
|
||||||
|
if not launch:
|
||||||
|
raise Http404
|
||||||
|
# Check if we're authenticated already, saves us the flow run
|
||||||
|
if request.user.is_authenticated:
|
||||||
|
return HttpResponseRedirect(app.get_launch_url(request.user))
|
||||||
|
# otherwise, do a custom flow plan that includes the application that's
|
||||||
|
# being accessed, to improve usability
|
||||||
|
tenant: Tenant = request.tenant
|
||||||
|
flow = tenant.flow_authentication
|
||||||
|
planner = FlowPlanner(flow)
|
||||||
|
planner.allow_empty_flows = True
|
||||||
|
plan = planner.plan(
|
||||||
|
request,
|
||||||
|
{
|
||||||
|
PLAN_CONTEXT_APPLICATION: app,
|
||||||
|
PLAN_CONTEXT_CONSENT_HEADER: _("You're about to sign into %(application)s.")
|
||||||
|
% {"application": app.name},
|
||||||
|
PLAN_CONTEXT_CONSENT_PERMISSIONS: [],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
plan.insert_stage(in_memory_stage(RedirectToAppStage))
|
||||||
|
request.session[SESSION_KEY_PLAN] = plan
|
||||||
|
return redirect_with_qs("authentik_core:if-flow", request.GET, flow_slug=flow.slug)
|
||||||
|
|
||||||
|
|
||||||
|
class RedirectToAppStage(ChallengeStageView):
|
||||||
|
"""Final stage to be inserted after the user logs in"""
|
||||||
|
|
||||||
|
def get_challenge(self, *args, **kwargs) -> RedirectChallenge:
|
||||||
|
app = self.executor.plan.context[PLAN_CONTEXT_APPLICATION]
|
||||||
|
launch = app.get_launch_url(self.get_pending_user())
|
||||||
|
# sanity check to ensure launch is still set
|
||||||
|
if not launch:
|
||||||
|
raise Http404
|
||||||
|
return RedirectChallenge(
|
||||||
|
instance={
|
||||||
|
"type": ChallengeTypes.REDIRECT.value,
|
||||||
|
"to": launch,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
||||||
|
return HttpChallengeResponse(self.get_challenge())
|
@ -61,7 +61,7 @@ def certificate_discovery(self: MonitoredTask):
|
|||||||
else:
|
else:
|
||||||
cert_name = path.name.replace(path.suffix, "")
|
cert_name = path.name.replace(path.suffix, "")
|
||||||
try:
|
try:
|
||||||
with open(path, "r+", encoding="utf-8") as _file:
|
with open(path, "r", encoding="utf-8") as _file:
|
||||||
body = _file.read()
|
body = _file.read()
|
||||||
if "PRIVATE KEY" in body:
|
if "PRIVATE KEY" in body:
|
||||||
private_keys[cert_name] = ensure_private_key_valid(body)
|
private_keys[cert_name] = ensure_private_key_valid(body)
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
"""events GeoIP Reader"""
|
"""events GeoIP Reader"""
|
||||||
from datetime import datetime
|
|
||||||
from os import stat
|
from os import stat
|
||||||
from time import time
|
|
||||||
from typing import Optional, TypedDict
|
from typing import Optional, TypedDict
|
||||||
|
|
||||||
from geoip2.database import Reader
|
from geoip2.database import Reader
|
||||||
@ -46,14 +44,18 @@ class GeoIPReader:
|
|||||||
LOGGER.warning("Failed to load GeoIP database", exc=exc)
|
LOGGER.warning("Failed to load GeoIP database", exc=exc)
|
||||||
|
|
||||||
def __check_expired(self):
|
def __check_expired(self):
|
||||||
"""Check if the geoip database has been opened longer than 8 hours,
|
"""Check if the modification date of the GeoIP database has
|
||||||
and re-open it, as it will probably will have been re-downloaded"""
|
changed, and reload it if so"""
|
||||||
now = time()
|
path = CONFIG.y("geoip")
|
||||||
diff = datetime.fromtimestamp(now) - datetime.fromtimestamp(self.__last_mtime)
|
try:
|
||||||
diff_hours = diff.total_seconds() // 3600
|
mtime = stat(path).st_mtime
|
||||||
if diff_hours >= 8:
|
diff = self.__last_mtime < mtime
|
||||||
LOGGER.info("GeoIP databased loaded too long, re-opening", diff=diff)
|
if diff > 0:
|
||||||
self.__open()
|
LOGGER.info("Found new GeoIP Database, reopening", diff=diff)
|
||||||
|
self.__open()
|
||||||
|
except OSError as exc:
|
||||||
|
LOGGER.warning("Failed to check GeoIP age", exc=exc)
|
||||||
|
return
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def enabled(self) -> bool:
|
def enabled(self) -> bool:
|
||||||
|
@ -13,7 +13,7 @@ from authentik.core.models import User
|
|||||||
from authentik.events.models import cleanse_dict
|
from authentik.events.models import cleanse_dict
|
||||||
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
||||||
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
||||||
from authentik.flows.models import Flow, FlowStageBinding, Stage
|
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, Stage
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.policies.engine import PolicyEngine
|
from authentik.policies.engine import PolicyEngine
|
||||||
|
|
||||||
@ -156,14 +156,15 @@ class FlowPlanner:
|
|||||||
# User is passing so far, check if we have a cached plan
|
# User is passing so far, check if we have a cached plan
|
||||||
cached_plan_key = cache_key(self.flow, user)
|
cached_plan_key = cache_key(self.flow, user)
|
||||||
cached_plan = cache.get(cached_plan_key, None)
|
cached_plan = cache.get(cached_plan_key, None)
|
||||||
if cached_plan and self.use_cache:
|
if self.flow.designation not in [FlowDesignation.STAGE_CONFIGURATION]:
|
||||||
self._logger.debug(
|
if cached_plan and self.use_cache:
|
||||||
"f(plan): taking plan from cache",
|
self._logger.debug(
|
||||||
key=cached_plan_key,
|
"f(plan): taking plan from cache",
|
||||||
)
|
key=cached_plan_key,
|
||||||
# Reset the context as this isn't factored into caching
|
)
|
||||||
cached_plan.context = default_context or {}
|
# Reset the context as this isn't factored into caching
|
||||||
return cached_plan
|
cached_plan.context = default_context or {}
|
||||||
|
return cached_plan
|
||||||
self._logger.debug(
|
self._logger.debug(
|
||||||
"f(plan): building plan",
|
"f(plan): building plan",
|
||||||
)
|
)
|
||||||
|
@ -5,16 +5,6 @@ postgresql:
|
|||||||
user: authentik
|
user: authentik
|
||||||
port: 5432
|
port: 5432
|
||||||
password: 'env://POSTGRES_PASSWORD'
|
password: 'env://POSTGRES_PASSWORD'
|
||||||
backup:
|
|
||||||
enabled: false
|
|
||||||
s3_backup:
|
|
||||||
access_key: ""
|
|
||||||
secret_key: ""
|
|
||||||
bucket: ""
|
|
||||||
region: eu-central-1
|
|
||||||
host: ""
|
|
||||||
location: ""
|
|
||||||
insecure_skip_verify: false
|
|
||||||
|
|
||||||
web:
|
web:
|
||||||
listen: 0.0.0.0:9000
|
listen: 0.0.0.0:9000
|
||||||
@ -46,7 +36,7 @@ error_reporting:
|
|||||||
enabled: false
|
enabled: false
|
||||||
environment: customer
|
environment: customer
|
||||||
send_pii: false
|
send_pii: false
|
||||||
sample_rate: 0.5
|
sample_rate: 0.3
|
||||||
|
|
||||||
# Global email settings
|
# Global email settings
|
||||||
email:
|
email:
|
||||||
@ -65,18 +55,15 @@ outposts:
|
|||||||
# %(version)s: Current version; 2021.4.1
|
# %(version)s: Current version; 2021.4.1
|
||||||
# %(build_hash)s: Build hash if you're running a beta version
|
# %(build_hash)s: Build hash if you're running a beta version
|
||||||
container_image_base: ghcr.io/goauthentik/%(type)s:%(version)s
|
container_image_base: ghcr.io/goauthentik/%(type)s:%(version)s
|
||||||
|
discover: true
|
||||||
|
|
||||||
cookie_domain: null
|
cookie_domain: null
|
||||||
disable_update_check: false
|
disable_update_check: false
|
||||||
disable_startup_analytics: false
|
disable_startup_analytics: false
|
||||||
avatars: env://AUTHENTIK_AUTHENTIK__AVATARS?gravatar
|
avatars: env://AUTHENTIK_AUTHENTIK__AVATARS?gravatar
|
||||||
geoip: "./GeoLite2-City.mmdb"
|
geoip: "/geoip/GeoLite2-City.mmdb"
|
||||||
|
|
||||||
footer_links:
|
footer_links: []
|
||||||
- name: Documentation
|
|
||||||
href: https://goauthentik.io/docs/?utm_source=authentik
|
|
||||||
- name: authentik Website
|
|
||||||
href: https://goauthentik.io/?utm_source=authentik
|
|
||||||
|
|
||||||
default_user_change_name: true
|
default_user_change_name: true
|
||||||
default_user_change_email: true
|
default_user_change_email: true
|
||||||
|
@ -32,6 +32,7 @@ class BaseEvaluator:
|
|||||||
self._globals = {
|
self._globals = {
|
||||||
"regex_match": BaseEvaluator.expr_regex_match,
|
"regex_match": BaseEvaluator.expr_regex_match,
|
||||||
"regex_replace": BaseEvaluator.expr_regex_replace,
|
"regex_replace": BaseEvaluator.expr_regex_replace,
|
||||||
|
"list_flatten": BaseEvaluator.expr_flatten,
|
||||||
"ak_is_group_member": BaseEvaluator.expr_is_group_member,
|
"ak_is_group_member": BaseEvaluator.expr_is_group_member,
|
||||||
"ak_user_by": BaseEvaluator.expr_user_by,
|
"ak_user_by": BaseEvaluator.expr_user_by,
|
||||||
"ak_logger": get_logger(),
|
"ak_logger": get_logger(),
|
||||||
@ -40,6 +41,15 @@ class BaseEvaluator:
|
|||||||
self._context = {}
|
self._context = {}
|
||||||
self._filename = "BaseEvalautor"
|
self._filename = "BaseEvalautor"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def expr_flatten(value: list[Any] | Any) -> Optional[Any]:
|
||||||
|
"""Flatten `value` if its a list"""
|
||||||
|
if isinstance(value, list):
|
||||||
|
if len(value) < 1:
|
||||||
|
return None
|
||||||
|
return value[0]
|
||||||
|
return value
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def expr_regex_match(value: Any, regex: str) -> bool:
|
def expr_regex_match(value: Any, regex: str) -> bool:
|
||||||
"""Expression Filter to run re.search"""
|
"""Expression Filter to run re.search"""
|
||||||
|
6
authentik/lib/merge.py
Normal file
6
authentik/lib/merge.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
"""merge utils"""
|
||||||
|
from deepmerge import Merger
|
||||||
|
|
||||||
|
MERGE_LIST_UNIQUE = Merger(
|
||||||
|
[(list, ["append_unique"]), (dict, ["merge"]), (set, ["union"])], ["override"], ["override"]
|
||||||
|
)
|
@ -3,8 +3,6 @@ from typing import Optional
|
|||||||
|
|
||||||
from aioredis.errors import ConnectionClosedError, ReplyError
|
from aioredis.errors import ConnectionClosedError, ReplyError
|
||||||
from billiard.exceptions import SoftTimeLimitExceeded, WorkerLostError
|
from billiard.exceptions import SoftTimeLimitExceeded, WorkerLostError
|
||||||
from botocore.client import ClientError
|
|
||||||
from botocore.exceptions import BotoCoreError
|
|
||||||
from celery.exceptions import CeleryError
|
from celery.exceptions import CeleryError
|
||||||
from channels.middleware import BaseMiddleware
|
from channels.middleware import BaseMiddleware
|
||||||
from channels_redis.core import ChannelFull
|
from channels_redis.core import ChannelFull
|
||||||
@ -81,9 +79,6 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
|||||||
WorkerLostError,
|
WorkerLostError,
|
||||||
CeleryError,
|
CeleryError,
|
||||||
SoftTimeLimitExceeded,
|
SoftTimeLimitExceeded,
|
||||||
# S3 errors
|
|
||||||
BotoCoreError,
|
|
||||||
ClientError,
|
|
||||||
# custom baseclass
|
# custom baseclass
|
||||||
SentryIgnoredException,
|
SentryIgnoredException,
|
||||||
# ldap errors
|
# ldap errors
|
||||||
@ -101,8 +96,6 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
|||||||
return None
|
return None
|
||||||
if "logger" in event:
|
if "logger" in event:
|
||||||
if event["logger"] in [
|
if event["logger"] in [
|
||||||
"dbbackup",
|
|
||||||
"botocore",
|
|
||||||
"kombu",
|
"kombu",
|
||||||
"asyncio",
|
"asyncio",
|
||||||
"multiprocessing",
|
"multiprocessing",
|
||||||
|
@ -55,6 +55,10 @@ class OutpostConsumer(AuthJsonConsumer):
|
|||||||
|
|
||||||
first_msg = False
|
first_msg = False
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.logger = get_logger()
|
||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
super().connect()
|
super().connect()
|
||||||
uuid = self.scope["url_route"]["kwargs"]["pk"]
|
uuid = self.scope["url_route"]["kwargs"]["pk"]
|
||||||
@ -65,7 +69,7 @@ class OutpostConsumer(AuthJsonConsumer):
|
|||||||
)
|
)
|
||||||
if not outpost:
|
if not outpost:
|
||||||
raise DenyConnection()
|
raise DenyConnection()
|
||||||
self.logger = get_logger().bind(outpost=outpost)
|
self.logger = self.logger.bind(outpost=outpost)
|
||||||
try:
|
try:
|
||||||
self.accept()
|
self.accept()
|
||||||
except RuntimeError as exc:
|
except RuntimeError as exc:
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from kubernetes.client.models.v1_container_port import V1ContainerPort
|
from kubernetes.client.models.v1_container_port import V1ContainerPort
|
||||||
|
from kubernetes.client.models.v1_service_port import V1ServicePort
|
||||||
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
|
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
|
||||||
|
|
||||||
from authentik.outposts.controllers.k8s.triggers import NeedsRecreate
|
from authentik.outposts.controllers.k8s.triggers import NeedsRecreate
|
||||||
@ -16,10 +17,31 @@ def get_namespace() -> str:
|
|||||||
return "default"
|
return "default"
|
||||||
|
|
||||||
|
|
||||||
def compare_ports(current: list[V1ContainerPort], reference: list[V1ContainerPort]):
|
def compare_port(
|
||||||
|
current: V1ServicePort | V1ContainerPort, reference: V1ServicePort | V1ContainerPort
|
||||||
|
) -> bool:
|
||||||
|
"""Compare a single port"""
|
||||||
|
if current.name != reference.name:
|
||||||
|
return False
|
||||||
|
if current.protocol != reference.protocol:
|
||||||
|
return False
|
||||||
|
if isinstance(current, V1ServicePort) and isinstance(reference, V1ServicePort):
|
||||||
|
# We only care about the target port
|
||||||
|
if current.target_port != reference.target_port:
|
||||||
|
return False
|
||||||
|
if isinstance(current, V1ContainerPort) and isinstance(reference, V1ContainerPort):
|
||||||
|
# We only care about the target port
|
||||||
|
if current.container_port != reference.container_port:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def compare_ports(
|
||||||
|
current: list[V1ServicePort | V1ContainerPort], reference: list[V1ServicePort | V1ContainerPort]
|
||||||
|
):
|
||||||
"""Compare ports of a list"""
|
"""Compare ports of a list"""
|
||||||
if len(current) != len(reference):
|
if len(current) != len(reference):
|
||||||
raise NeedsRecreate()
|
raise NeedsRecreate()
|
||||||
for port in reference:
|
for port in reference:
|
||||||
if port not in current:
|
if not any(compare_port(port, current_port) for current_port in current):
|
||||||
raise NeedsRecreate()
|
raise NeedsRecreate()
|
||||||
|
@ -3,6 +3,8 @@ import os
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tempfile import gettempdir
|
from tempfile import gettempdir
|
||||||
|
|
||||||
|
from docker.errors import DockerException
|
||||||
|
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
|
||||||
HEADER = "### Managed by authentik"
|
HEADER = "### Managed by authentik"
|
||||||
@ -27,6 +29,8 @@ class DockerInlineSSH:
|
|||||||
def __init__(self, host: str, keypair: CertificateKeyPair) -> None:
|
def __init__(self, host: str, keypair: CertificateKeyPair) -> None:
|
||||||
self.host = host
|
self.host = host
|
||||||
self.keypair = keypair
|
self.keypair = keypair
|
||||||
|
if not self.keypair:
|
||||||
|
raise DockerException("keypair must be set for SSH connections")
|
||||||
self.config_path = Path("~/.ssh/config").expanduser()
|
self.config_path = Path("~/.ssh/config").expanduser()
|
||||||
self.header = f"{HEADER} - {self.host}\n"
|
self.header = f"{HEADER} - {self.host}\n"
|
||||||
|
|
||||||
|
@ -23,6 +23,7 @@ from authentik.events.monitored_tasks import (
|
|||||||
TaskResultStatus,
|
TaskResultStatus,
|
||||||
prefill_task,
|
prefill_task,
|
||||||
)
|
)
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.utils.reflection import path_to_class
|
from authentik.lib.utils.reflection import path_to_class
|
||||||
from authentik.outposts.controllers.base import BaseController, ControllerException
|
from authentik.outposts.controllers.base import BaseController, ControllerException
|
||||||
from authentik.outposts.controllers.docker import DockerClient
|
from authentik.outposts.controllers.docker import DockerClient
|
||||||
@ -231,6 +232,9 @@ def _outpost_single_update(outpost: Outpost, layer=None):
|
|||||||
@CELERY_APP.task()
|
@CELERY_APP.task()
|
||||||
def outpost_local_connection():
|
def outpost_local_connection():
|
||||||
"""Checks the local environment and create Service connections."""
|
"""Checks the local environment and create Service connections."""
|
||||||
|
if not CONFIG.y_bool("outposts.discover"):
|
||||||
|
LOGGER.debug("outpost integration discovery is disabled")
|
||||||
|
return
|
||||||
# Explicitly check against token filename, as that's
|
# Explicitly check against token filename, as that's
|
||||||
# only present when the integration is enabled
|
# only present when the integration is enabled
|
||||||
if Path(SERVICE_TOKEN_FILENAME).exists():
|
if Path(SERVICE_TOKEN_FILENAME).exists():
|
||||||
|
@ -2,9 +2,12 @@
|
|||||||
|
|
||||||
GRANT_TYPE_AUTHORIZATION_CODE = "authorization_code"
|
GRANT_TYPE_AUTHORIZATION_CODE = "authorization_code"
|
||||||
GRANT_TYPE_REFRESH_TOKEN = "refresh_token" # nosec
|
GRANT_TYPE_REFRESH_TOKEN = "refresh_token" # nosec
|
||||||
|
GRANT_TYPE_CLIENT_CREDENTIALS = "client_credentials"
|
||||||
|
|
||||||
PROMPT_NONE = "none"
|
PROMPT_NONE = "none"
|
||||||
PROMPT_CONSNET = "consent"
|
PROMPT_CONSNET = "consent"
|
||||||
PROMPT_LOGIN = "login"
|
PROMPT_LOGIN = "login"
|
||||||
|
|
||||||
SCOPE_OPENID = "openid"
|
SCOPE_OPENID = "openid"
|
||||||
SCOPE_OPENID_PROFILE = "profile"
|
SCOPE_OPENID_PROFILE = "profile"
|
||||||
SCOPE_OPENID_EMAIL = "email"
|
SCOPE_OPENID_EMAIL = "email"
|
||||||
|
@ -168,7 +168,7 @@ class TokenError(OAuth2Error):
|
|||||||
https://tools.ietf.org/html/rfc6749#section-5.2
|
https://tools.ietf.org/html/rfc6749#section-5.2
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_errors = {
|
errors = {
|
||||||
"invalid_request": "The request is otherwise malformed",
|
"invalid_request": "The request is otherwise malformed",
|
||||||
"invalid_client": "Client authentication failed (e.g., unknown client, "
|
"invalid_client": "Client authentication failed (e.g., unknown client, "
|
||||||
"no client authentication included, or unsupported "
|
"no client authentication included, or unsupported "
|
||||||
@ -188,7 +188,7 @@ class TokenError(OAuth2Error):
|
|||||||
def __init__(self, error):
|
def __init__(self, error):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.error = error
|
self.error = error
|
||||||
self.description = self._errors[error]
|
self.description = self.errors[error]
|
||||||
|
|
||||||
|
|
||||||
class BearerTokenError(OAuth2Error):
|
class BearerTokenError(OAuth2Error):
|
||||||
|
@ -7,7 +7,7 @@ from dataclasses import asdict, dataclass, field
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
||||||
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
||||||
@ -45,6 +45,13 @@ class GrantTypes(models.TextChoices):
|
|||||||
HYBRID = "hybrid"
|
HYBRID = "hybrid"
|
||||||
|
|
||||||
|
|
||||||
|
class ResponseMode(models.TextChoices):
|
||||||
|
"""https://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#OAuth.Post"""
|
||||||
|
|
||||||
|
QUERY = "query"
|
||||||
|
FRAGMENT = "fragment"
|
||||||
|
|
||||||
|
|
||||||
class SubModes(models.TextChoices):
|
class SubModes(models.TextChoices):
|
||||||
"""Mode after which 'sub' attribute is generateed, for compatibility reasons"""
|
"""Mode after which 'sub' attribute is generateed, for compatibility reasons"""
|
||||||
|
|
||||||
@ -259,8 +266,8 @@ class OAuth2Provider(Provider):
|
|||||||
if self.redirect_uris == "":
|
if self.redirect_uris == "":
|
||||||
return None
|
return None
|
||||||
main_url = self.redirect_uris.split("\n", maxsplit=1)[0]
|
main_url = self.redirect_uris.split("\n", maxsplit=1)[0]
|
||||||
launch_url = urlparse(main_url)
|
launch_url = urlparse(main_url)._replace(path="")
|
||||||
return main_url.replace(launch_url.path, "")
|
return urlunparse(launch_url)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def component(self) -> str:
|
def component(self) -> str:
|
||||||
|
@ -43,7 +43,7 @@ class TestAuthorize(OAuthTestCase):
|
|||||||
name="test",
|
name="test",
|
||||||
client_id="test",
|
client_id="test",
|
||||||
authorization_flow=create_test_flow(),
|
authorization_flow=create_test_flow(),
|
||||||
redirect_uris="http://local.invalid",
|
redirect_uris="http://local.invalid/Foo",
|
||||||
)
|
)
|
||||||
with self.assertRaises(AuthorizeError):
|
with self.assertRaises(AuthorizeError):
|
||||||
request = self.factory.get(
|
request = self.factory.get(
|
||||||
@ -51,7 +51,7 @@ class TestAuthorize(OAuthTestCase):
|
|||||||
data={
|
data={
|
||||||
"response_type": "code",
|
"response_type": "code",
|
||||||
"client_id": "test",
|
"client_id": "test",
|
||||||
"redirect_uri": "http://local.invalid",
|
"redirect_uri": "http://local.invalid/Foo",
|
||||||
"request": "foo",
|
"request": "foo",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -105,26 +105,30 @@ class TestAuthorize(OAuthTestCase):
|
|||||||
name="test",
|
name="test",
|
||||||
client_id="test",
|
client_id="test",
|
||||||
authorization_flow=create_test_flow(),
|
authorization_flow=create_test_flow(),
|
||||||
redirect_uris="http://local.invalid",
|
redirect_uris="http://local.invalid/Foo",
|
||||||
)
|
)
|
||||||
request = self.factory.get(
|
request = self.factory.get(
|
||||||
"/",
|
"/",
|
||||||
data={
|
data={
|
||||||
"response_type": "code",
|
"response_type": "code",
|
||||||
"client_id": "test",
|
"client_id": "test",
|
||||||
"redirect_uri": "http://local.invalid",
|
"redirect_uri": "http://local.invalid/Foo",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
OAuthAuthorizationParams.from_request(request).grant_type,
|
OAuthAuthorizationParams.from_request(request).grant_type,
|
||||||
GrantTypes.AUTHORIZATION_CODE,
|
GrantTypes.AUTHORIZATION_CODE,
|
||||||
)
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
OAuthAuthorizationParams.from_request(request).redirect_uri,
|
||||||
|
"http://local.invalid/Foo",
|
||||||
|
)
|
||||||
request = self.factory.get(
|
request = self.factory.get(
|
||||||
"/",
|
"/",
|
||||||
data={
|
data={
|
||||||
"response_type": "id_token",
|
"response_type": "id_token",
|
||||||
"client_id": "test",
|
"client_id": "test",
|
||||||
"redirect_uri": "http://local.invalid",
|
"redirect_uri": "http://local.invalid/Foo",
|
||||||
"scope": "openid",
|
"scope": "openid",
|
||||||
"state": "foo",
|
"state": "foo",
|
||||||
},
|
},
|
||||||
@ -140,7 +144,7 @@ class TestAuthorize(OAuthTestCase):
|
|||||||
data={
|
data={
|
||||||
"response_type": "id_token",
|
"response_type": "id_token",
|
||||||
"client_id": "test",
|
"client_id": "test",
|
||||||
"redirect_uri": "http://local.invalid",
|
"redirect_uri": "http://local.invalid/Foo",
|
||||||
"state": "foo",
|
"state": "foo",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -153,7 +157,7 @@ class TestAuthorize(OAuthTestCase):
|
|||||||
data={
|
data={
|
||||||
"response_type": "code token",
|
"response_type": "code token",
|
||||||
"client_id": "test",
|
"client_id": "test",
|
||||||
"redirect_uri": "http://local.invalid",
|
"redirect_uri": "http://local.invalid/Foo",
|
||||||
"scope": "openid",
|
"scope": "openid",
|
||||||
"state": "foo",
|
"state": "foo",
|
||||||
},
|
},
|
||||||
@ -167,7 +171,7 @@ class TestAuthorize(OAuthTestCase):
|
|||||||
data={
|
data={
|
||||||
"response_type": "invalid",
|
"response_type": "invalid",
|
||||||
"client_id": "test",
|
"client_id": "test",
|
||||||
"redirect_uri": "http://local.invalid",
|
"redirect_uri": "http://local.invalid/Foo",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
OAuthAuthorizationParams.from_request(request)
|
OAuthAuthorizationParams.from_request(request)
|
||||||
|
@ -0,0 +1,174 @@
|
|||||||
|
"""Test token view"""
|
||||||
|
from json import loads
|
||||||
|
|
||||||
|
from django.test import RequestFactory
|
||||||
|
from django.urls import reverse
|
||||||
|
from jwt import decode
|
||||||
|
|
||||||
|
from authentik.core.models import USER_ATTRIBUTE_SA, Application, Group, Token, TokenIntents
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
||||||
|
from authentik.lib.generators import generate_id, generate_key
|
||||||
|
from authentik.managed.manager import ObjectManager
|
||||||
|
from authentik.policies.models import PolicyBinding
|
||||||
|
from authentik.providers.oauth2.constants import (
|
||||||
|
GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||||
|
SCOPE_OPENID,
|
||||||
|
SCOPE_OPENID_EMAIL,
|
||||||
|
SCOPE_OPENID_PROFILE,
|
||||||
|
)
|
||||||
|
from authentik.providers.oauth2.errors import TokenError
|
||||||
|
from authentik.providers.oauth2.models import OAuth2Provider, ScopeMapping
|
||||||
|
from authentik.providers.oauth2.tests.utils import OAuthTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestTokenClientCredentials(OAuthTestCase):
|
||||||
|
"""Test token (client_credentials) view"""
|
||||||
|
|
||||||
|
def setUp(self) -> None:
|
||||||
|
super().setUp()
|
||||||
|
ObjectManager().run()
|
||||||
|
self.factory = RequestFactory()
|
||||||
|
self.provider = OAuth2Provider.objects.create(
|
||||||
|
name="test",
|
||||||
|
client_id=generate_id(),
|
||||||
|
client_secret=generate_key(),
|
||||||
|
authorization_flow=create_test_flow(),
|
||||||
|
redirect_uris="http://testserver",
|
||||||
|
signing_key=create_test_cert(),
|
||||||
|
)
|
||||||
|
self.provider.property_mappings.set(ScopeMapping.objects.all())
|
||||||
|
self.app = Application.objects.create(name="test", slug="test", provider=self.provider)
|
||||||
|
self.user = create_test_admin_user("sa")
|
||||||
|
self.user.attributes[USER_ATTRIBUTE_SA] = True
|
||||||
|
self.user.save()
|
||||||
|
self.token = Token.objects.create(
|
||||||
|
identifier="sa-token",
|
||||||
|
user=self.user,
|
||||||
|
intent=TokenIntents.INTENT_APP_PASSWORD,
|
||||||
|
expiring=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_wrong_user(self):
|
||||||
|
"""test invalid username"""
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token"),
|
||||||
|
{
|
||||||
|
"grant_type": GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||||
|
"scope": SCOPE_OPENID,
|
||||||
|
"client_id": self.provider.client_id,
|
||||||
|
"username": "saa",
|
||||||
|
"password": self.token.key,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
response.content.decode(),
|
||||||
|
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_wrong_token(self):
|
||||||
|
"""test invalid token"""
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token"),
|
||||||
|
{
|
||||||
|
"grant_type": GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||||
|
"scope": SCOPE_OPENID,
|
||||||
|
"client_id": self.provider.client_id,
|
||||||
|
"username": "sa",
|
||||||
|
"password": self.token.key + "foo",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
response.content.decode(),
|
||||||
|
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_non_sa(self):
|
||||||
|
"""test non service-account"""
|
||||||
|
self.user.attributes[USER_ATTRIBUTE_SA] = False
|
||||||
|
self.user.save()
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token"),
|
||||||
|
{
|
||||||
|
"grant_type": GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||||
|
"scope": SCOPE_OPENID,
|
||||||
|
"client_id": self.provider.client_id,
|
||||||
|
"username": "sa",
|
||||||
|
"password": self.token.key,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
response.content.decode(),
|
||||||
|
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_no_provider(self):
|
||||||
|
"""test no provider"""
|
||||||
|
self.app.provider = None
|
||||||
|
self.app.save()
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token"),
|
||||||
|
{
|
||||||
|
"grant_type": GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||||
|
"scope": SCOPE_OPENID,
|
||||||
|
"client_id": self.provider.client_id,
|
||||||
|
"username": "sa",
|
||||||
|
"password": self.token.key,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
response.content.decode(),
|
||||||
|
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_permission_denied(self):
|
||||||
|
"""test permission denied"""
|
||||||
|
group = Group.objects.create(name="foo")
|
||||||
|
PolicyBinding.objects.create(
|
||||||
|
group=group,
|
||||||
|
target=self.app,
|
||||||
|
order=0,
|
||||||
|
)
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token"),
|
||||||
|
{
|
||||||
|
"grant_type": GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||||
|
"scope": SCOPE_OPENID,
|
||||||
|
"client_id": self.provider.client_id,
|
||||||
|
"username": "sa",
|
||||||
|
"password": self.token.key,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
response.content.decode(),
|
||||||
|
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_successful(self):
|
||||||
|
"""test successful"""
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token"),
|
||||||
|
{
|
||||||
|
"grant_type": GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||||
|
"scope": f"{SCOPE_OPENID} {SCOPE_OPENID_EMAIL} {SCOPE_OPENID_PROFILE}",
|
||||||
|
"client_id": self.provider.client_id,
|
||||||
|
"username": "sa",
|
||||||
|
"password": self.token.key,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = loads(response.content.decode())
|
||||||
|
self.assertEqual(body["token_type"], "bearer")
|
||||||
|
_, alg = self.provider.get_jwt_key()
|
||||||
|
jwt = decode(
|
||||||
|
body["access_token"],
|
||||||
|
key=self.provider.signing_key.public_key,
|
||||||
|
algorithms=[alg],
|
||||||
|
audience=self.provider.client_id,
|
||||||
|
)
|
||||||
|
self.assertEqual(jwt["given_name"], self.user.name)
|
||||||
|
self.assertEqual(jwt["preferred_username"], self.user.username)
|
@ -44,6 +44,7 @@ from authentik.providers.oauth2.models import (
|
|||||||
AuthorizationCode,
|
AuthorizationCode,
|
||||||
GrantTypes,
|
GrantTypes,
|
||||||
OAuth2Provider,
|
OAuth2Provider,
|
||||||
|
ResponseMode,
|
||||||
ResponseTypes,
|
ResponseTypes,
|
||||||
)
|
)
|
||||||
from authentik.providers.oauth2.utils import HttpResponseRedirectScheme
|
from authentik.providers.oauth2.utils import HttpResponseRedirectScheme
|
||||||
@ -99,7 +100,7 @@ class OAuthAuthorizationParams:
|
|||||||
# and POST request.
|
# and POST request.
|
||||||
query_dict = request.POST if request.method == "POST" else request.GET
|
query_dict = request.POST if request.method == "POST" else request.GET
|
||||||
state = query_dict.get("state")
|
state = query_dict.get("state")
|
||||||
redirect_uri = query_dict.get("redirect_uri", "").lower()
|
redirect_uri = query_dict.get("redirect_uri", "")
|
||||||
|
|
||||||
response_type = query_dict.get("response_type", "")
|
response_type = query_dict.get("response_type", "")
|
||||||
grant_type = None
|
grant_type = None
|
||||||
@ -153,7 +154,10 @@ class OAuthAuthorizationParams:
|
|||||||
def check_redirect_uri(self):
|
def check_redirect_uri(self):
|
||||||
"""Redirect URI validation."""
|
"""Redirect URI validation."""
|
||||||
allowed_redirect_urls = self.provider.redirect_uris.split()
|
allowed_redirect_urls = self.provider.redirect_uris.split()
|
||||||
if not self.redirect_uri:
|
# We don't want to actually lowercase the final URL we redirect to,
|
||||||
|
# we only lowercase it for comparison
|
||||||
|
redirect_uri = self.redirect_uri.lower()
|
||||||
|
if not redirect_uri:
|
||||||
LOGGER.warning("Missing redirect uri.")
|
LOGGER.warning("Missing redirect uri.")
|
||||||
raise RedirectUriError("", allowed_redirect_urls)
|
raise RedirectUriError("", allowed_redirect_urls)
|
||||||
|
|
||||||
@ -169,7 +173,7 @@ class OAuthAuthorizationParams:
|
|||||||
allow=self.redirect_uri,
|
allow=self.redirect_uri,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
if self.redirect_uri not in [x.lower() for x in allowed_redirect_urls]:
|
if redirect_uri not in [x.lower() for x in allowed_redirect_urls]:
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Invalid redirect uri",
|
"Invalid redirect uri",
|
||||||
redirect_uri=self.redirect_uri,
|
redirect_uri=self.redirect_uri,
|
||||||
@ -299,13 +303,23 @@ class OAuthFulfillmentStage(StageView):
|
|||||||
code = self.params.create_code(self.request)
|
code = self.params.create_code(self.request)
|
||||||
code.save(force_insert=True)
|
code.save(force_insert=True)
|
||||||
|
|
||||||
if self.params.grant_type == GrantTypes.AUTHORIZATION_CODE:
|
query_dict = self.request.POST if self.request.method == "POST" else self.request.GET
|
||||||
|
response_mode = ResponseMode.QUERY
|
||||||
|
# Get response mode from url param, otherwise decide based on grant type
|
||||||
|
if "response_mode" in query_dict:
|
||||||
|
response_mode = query_dict["response_mode"]
|
||||||
|
elif self.params.grant_type == GrantTypes.AUTHORIZATION_CODE:
|
||||||
|
response_mode = ResponseMode.QUERY
|
||||||
|
elif self.params.grant_type in [GrantTypes.IMPLICIT, GrantTypes.HYBRID]:
|
||||||
|
response_mode = ResponseMode.FRAGMENT
|
||||||
|
|
||||||
|
if response_mode == ResponseMode.QUERY:
|
||||||
query_params["code"] = code.code
|
query_params["code"] = code.code
|
||||||
query_params["state"] = [str(self.params.state) if self.params.state else ""]
|
query_params["state"] = [str(self.params.state) if self.params.state else ""]
|
||||||
|
|
||||||
uri = uri._replace(query=urlencode(query_params, doseq=True))
|
uri = uri._replace(query=urlencode(query_params, doseq=True))
|
||||||
return urlunsplit(uri)
|
return urlunsplit(uri)
|
||||||
if self.params.grant_type in [GrantTypes.IMPLICIT, GrantTypes.HYBRID]:
|
if response_mode == ResponseMode.FRAGMENT:
|
||||||
query_fragment = self.create_implicit_response(code)
|
query_fragment = self.create_implicit_response(code)
|
||||||
|
|
||||||
uri = uri._replace(
|
uri = uri._replace(
|
||||||
|
@ -10,6 +10,7 @@ from authentik.core.models import Application
|
|||||||
from authentik.providers.oauth2.constants import (
|
from authentik.providers.oauth2.constants import (
|
||||||
ACR_AUTHENTIK_DEFAULT,
|
ACR_AUTHENTIK_DEFAULT,
|
||||||
GRANT_TYPE_AUTHORIZATION_CODE,
|
GRANT_TYPE_AUTHORIZATION_CODE,
|
||||||
|
GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||||
GRANT_TYPE_REFRESH_TOKEN,
|
GRANT_TYPE_REFRESH_TOKEN,
|
||||||
SCOPE_OPENID,
|
SCOPE_OPENID,
|
||||||
)
|
)
|
||||||
@ -78,6 +79,7 @@ class ProviderInfoView(View):
|
|||||||
GRANT_TYPE_AUTHORIZATION_CODE,
|
GRANT_TYPE_AUTHORIZATION_CODE,
|
||||||
GRANT_TYPE_REFRESH_TOKEN,
|
GRANT_TYPE_REFRESH_TOKEN,
|
||||||
GrantTypes.IMPLICIT,
|
GrantTypes.IMPLICIT,
|
||||||
|
GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||||
],
|
],
|
||||||
"id_token_signing_alg_values_supported": [supported_alg],
|
"id_token_signing_alg_values_supported": [supported_alg],
|
||||||
# See: http://openid.net/specs/openid-connect-core-1_0.html#SubjectIDTypes
|
# See: http://openid.net/specs/openid-connect-core-1_0.html#SubjectIDTypes
|
||||||
|
@ -8,10 +8,13 @@ from django.http import HttpRequest, HttpResponse
|
|||||||
from django.views import View
|
from django.views import View
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.core.models import USER_ATTRIBUTE_SA, Application, Token, TokenIntents, User
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.lib.utils.time import timedelta_from_string
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
|
from authentik.policies.engine import PolicyEngine
|
||||||
from authentik.providers.oauth2.constants import (
|
from authentik.providers.oauth2.constants import (
|
||||||
GRANT_TYPE_AUTHORIZATION_CODE,
|
GRANT_TYPE_AUTHORIZATION_CODE,
|
||||||
|
GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||||
GRANT_TYPE_REFRESH_TOKEN,
|
GRANT_TYPE_REFRESH_TOKEN,
|
||||||
)
|
)
|
||||||
from authentik.providers.oauth2.errors import TokenError, UserAuthError
|
from authentik.providers.oauth2.errors import TokenError, UserAuthError
|
||||||
@ -42,6 +45,7 @@ class TokenParams:
|
|||||||
|
|
||||||
authorization_code: Optional[AuthorizationCode] = None
|
authorization_code: Optional[AuthorizationCode] = None
|
||||||
refresh_token: Optional[RefreshToken] = None
|
refresh_token: Optional[RefreshToken] = None
|
||||||
|
user: Optional[User] = None
|
||||||
|
|
||||||
code_verifier: Optional[str] = None
|
code_verifier: Optional[str] = None
|
||||||
|
|
||||||
@ -75,50 +79,23 @@ class TokenParams:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def __post_init__(self, raw_code: str, raw_token: str, request: HttpRequest):
|
def __post_init__(self, raw_code: str, raw_token: str, request: HttpRequest):
|
||||||
if self.provider.client_type == ClientTypes.CONFIDENTIAL:
|
if self.grant_type in [GRANT_TYPE_AUTHORIZATION_CODE, GRANT_TYPE_REFRESH_TOKEN]:
|
||||||
if self.provider.client_secret != self.client_secret:
|
if (
|
||||||
|
self.provider.client_type == ClientTypes.CONFIDENTIAL
|
||||||
|
and self.provider.client_secret != self.client_secret
|
||||||
|
):
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Invalid client secret: client does not have secret",
|
"Invalid client secret",
|
||||||
client_id=self.provider.client_id,
|
client_id=self.provider.client_id,
|
||||||
secret=self.provider.client_secret,
|
|
||||||
)
|
)
|
||||||
raise TokenError("invalid_client")
|
raise TokenError("invalid_client")
|
||||||
|
|
||||||
if self.grant_type == GRANT_TYPE_AUTHORIZATION_CODE:
|
if self.grant_type == GRANT_TYPE_AUTHORIZATION_CODE:
|
||||||
self.__post_init_code(raw_code)
|
self.__post_init_code(raw_code)
|
||||||
elif self.grant_type == GRANT_TYPE_REFRESH_TOKEN:
|
elif self.grant_type == GRANT_TYPE_REFRESH_TOKEN:
|
||||||
if not raw_token:
|
self.__post_init_refresh(raw_token, request)
|
||||||
LOGGER.warning("Missing refresh token")
|
elif self.grant_type == GRANT_TYPE_CLIENT_CREDENTIALS:
|
||||||
raise TokenError("invalid_grant")
|
self.__post_init_client_credentials(request)
|
||||||
|
|
||||||
try:
|
|
||||||
self.refresh_token = RefreshToken.objects.get(
|
|
||||||
refresh_token=raw_token, provider=self.provider
|
|
||||||
)
|
|
||||||
if self.refresh_token.is_expired:
|
|
||||||
LOGGER.warning(
|
|
||||||
"Refresh token is expired",
|
|
||||||
token=raw_token,
|
|
||||||
)
|
|
||||||
raise TokenError("invalid_grant")
|
|
||||||
# https://tools.ietf.org/html/rfc6749#section-6
|
|
||||||
# Fallback to original token's scopes when none are given
|
|
||||||
if not self.scope:
|
|
||||||
self.scope = self.refresh_token.scope
|
|
||||||
except RefreshToken.DoesNotExist:
|
|
||||||
LOGGER.warning(
|
|
||||||
"Refresh token does not exist",
|
|
||||||
token=raw_token,
|
|
||||||
)
|
|
||||||
raise TokenError("invalid_grant")
|
|
||||||
if self.refresh_token.revoked:
|
|
||||||
LOGGER.warning("Refresh token is revoked", token=raw_token)
|
|
||||||
Event.new(
|
|
||||||
action=EventAction.SUSPICIOUS_REQUEST,
|
|
||||||
message="Revoked refresh token was used",
|
|
||||||
token=raw_token,
|
|
||||||
).from_http(request)
|
|
||||||
raise TokenError("invalid_grant")
|
|
||||||
else:
|
else:
|
||||||
LOGGER.warning("Invalid grant type", grant_type=self.grant_type)
|
LOGGER.warning("Invalid grant type", grant_type=self.grant_type)
|
||||||
raise TokenError("unsupported_grant_type")
|
raise TokenError("unsupported_grant_type")
|
||||||
@ -175,6 +152,77 @@ class TokenParams:
|
|||||||
LOGGER.warning("Code challenge not matching")
|
LOGGER.warning("Code challenge not matching")
|
||||||
raise TokenError("invalid_grant")
|
raise TokenError("invalid_grant")
|
||||||
|
|
||||||
|
def __post_init_refresh(self, raw_token: str, request: HttpRequest):
|
||||||
|
if not raw_token:
|
||||||
|
LOGGER.warning("Missing refresh token")
|
||||||
|
raise TokenError("invalid_grant")
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.refresh_token = RefreshToken.objects.get(
|
||||||
|
refresh_token=raw_token, provider=self.provider
|
||||||
|
)
|
||||||
|
if self.refresh_token.is_expired:
|
||||||
|
LOGGER.warning(
|
||||||
|
"Refresh token is expired",
|
||||||
|
token=raw_token,
|
||||||
|
)
|
||||||
|
raise TokenError("invalid_grant")
|
||||||
|
# https://tools.ietf.org/html/rfc6749#section-6
|
||||||
|
# Fallback to original token's scopes when none are given
|
||||||
|
if not self.scope:
|
||||||
|
self.scope = self.refresh_token.scope
|
||||||
|
except RefreshToken.DoesNotExist:
|
||||||
|
LOGGER.warning(
|
||||||
|
"Refresh token does not exist",
|
||||||
|
token=raw_token,
|
||||||
|
)
|
||||||
|
raise TokenError("invalid_grant")
|
||||||
|
if self.refresh_token.revoked:
|
||||||
|
LOGGER.warning("Refresh token is revoked", token=raw_token)
|
||||||
|
Event.new(
|
||||||
|
action=EventAction.SUSPICIOUS_REQUEST,
|
||||||
|
message="Revoked refresh token was used",
|
||||||
|
token=raw_token,
|
||||||
|
).from_http(request)
|
||||||
|
raise TokenError("invalid_grant")
|
||||||
|
|
||||||
|
def __post_init_client_credentials(self, request: HttpRequest):
|
||||||
|
# Authenticate user based on credentials
|
||||||
|
username = request.POST.get("username")
|
||||||
|
password = request.POST.get("password")
|
||||||
|
user = User.objects.filter(username=username).first()
|
||||||
|
if not user:
|
||||||
|
raise TokenError("invalid_grant")
|
||||||
|
token: Token = Token.filter_not_expired(
|
||||||
|
key=password, intent=TokenIntents.INTENT_APP_PASSWORD
|
||||||
|
).first()
|
||||||
|
if not token or token.user.uid != user.uid:
|
||||||
|
raise TokenError("invalid_grant")
|
||||||
|
self.user = user
|
||||||
|
if not self.user.attributes.get(USER_ATTRIBUTE_SA, False):
|
||||||
|
# Non-service accounts are not allowed
|
||||||
|
LOGGER.info("Non-service-account tried to use client credentials", user=self.user)
|
||||||
|
raise TokenError("invalid_grant")
|
||||||
|
|
||||||
|
Event.new(
|
||||||
|
action=EventAction.LOGIN,
|
||||||
|
PLAN_CONTEXT_METHOD="token",
|
||||||
|
PLAN_CONTEXT_METHOD_ARGS={
|
||||||
|
"identifier": token.identifier,
|
||||||
|
},
|
||||||
|
).from_http(request, user=user)
|
||||||
|
|
||||||
|
# Authorize user access
|
||||||
|
app = Application.objects.filter(provider=self.provider).first()
|
||||||
|
if not app or not app.provider:
|
||||||
|
raise TokenError("invalid_grant")
|
||||||
|
engine = PolicyEngine(app, self.user, request)
|
||||||
|
engine.build()
|
||||||
|
result = engine.result
|
||||||
|
if not result.passing:
|
||||||
|
LOGGER.info("User not authenticated for application", user=self.user, app=app)
|
||||||
|
raise TokenError("invalid_grant")
|
||||||
|
|
||||||
|
|
||||||
class TokenView(View):
|
class TokenView(View):
|
||||||
"""Generate tokens for clients"""
|
"""Generate tokens for clients"""
|
||||||
@ -208,11 +256,14 @@ class TokenView(View):
|
|||||||
self.params = TokenParams.parse(request, self.provider, client_id, client_secret)
|
self.params = TokenParams.parse(request, self.provider, client_id, client_secret)
|
||||||
|
|
||||||
if self.params.grant_type == GRANT_TYPE_AUTHORIZATION_CODE:
|
if self.params.grant_type == GRANT_TYPE_AUTHORIZATION_CODE:
|
||||||
LOGGER.info("Converting authorization code to refresh token")
|
LOGGER.debug("Converting authorization code to refresh token")
|
||||||
return TokenResponse(self.create_code_response())
|
return TokenResponse(self.create_code_response())
|
||||||
if self.params.grant_type == GRANT_TYPE_REFRESH_TOKEN:
|
if self.params.grant_type == GRANT_TYPE_REFRESH_TOKEN:
|
||||||
LOGGER.info("Refreshing refresh token")
|
LOGGER.debug("Refreshing refresh token")
|
||||||
return TokenResponse(self.create_refresh_response())
|
return TokenResponse(self.create_refresh_response())
|
||||||
|
if self.params.grant_type == GRANT_TYPE_CLIENT_CREDENTIALS:
|
||||||
|
LOGGER.debug("Client credentials grant")
|
||||||
|
return TokenResponse(self.create_client_credentials_response())
|
||||||
raise ValueError(f"Invalid grant_type: {self.params.grant_type}")
|
raise ValueError(f"Invalid grant_type: {self.params.grant_type}")
|
||||||
except TokenError as error:
|
except TokenError as error:
|
||||||
return TokenResponse(error.create_dict(), status=400)
|
return TokenResponse(error.create_dict(), status=400)
|
||||||
@ -292,3 +343,30 @@ class TokenView(View):
|
|||||||
),
|
),
|
||||||
"id_token": self.params.provider.encode(refresh_token.id_token.to_dict()),
|
"id_token": self.params.provider.encode(refresh_token.id_token.to_dict()),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def create_client_credentials_response(self) -> dict[str, Any]:
|
||||||
|
"""See https://datatracker.ietf.org/doc/html/rfc6749#section-4.4"""
|
||||||
|
provider: OAuth2Provider = self.params.provider
|
||||||
|
|
||||||
|
refresh_token: RefreshToken = provider.create_refresh_token(
|
||||||
|
user=self.params.user,
|
||||||
|
scope=self.params.scope,
|
||||||
|
request=self.request,
|
||||||
|
)
|
||||||
|
refresh_token.id_token = refresh_token.create_id_token(
|
||||||
|
user=self.params.user,
|
||||||
|
request=self.request,
|
||||||
|
)
|
||||||
|
refresh_token.id_token.at_hash = refresh_token.at_hash
|
||||||
|
|
||||||
|
# Store the refresh_token.
|
||||||
|
refresh_token.save()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"access_token": refresh_token.access_token,
|
||||||
|
"token_type": "bearer",
|
||||||
|
"expires_in": int(
|
||||||
|
timedelta_from_string(refresh_token.provider.token_validity).total_seconds()
|
||||||
|
),
|
||||||
|
"id_token": self.params.provider.encode(refresh_token.id_token.to_dict()),
|
||||||
|
}
|
||||||
|
@ -12,4 +12,8 @@ class AuthentikProviderProxyConfig(AppConfig):
|
|||||||
verbose_name = "authentik Providers.Proxy"
|
verbose_name = "authentik Providers.Proxy"
|
||||||
|
|
||||||
def ready(self) -> None:
|
def ready(self) -> None:
|
||||||
|
from authentik.providers.proxy.tasks import proxy_set_defaults
|
||||||
|
|
||||||
import_module("authentik.providers.proxy.managed")
|
import_module("authentik.providers.proxy.managed")
|
||||||
|
|
||||||
|
proxy_set_defaults.delay()
|
||||||
|
@ -23,17 +23,17 @@ class ProxyDockerController(DockerController):
|
|||||||
proxy_provider: ProxyProvider
|
proxy_provider: ProxyProvider
|
||||||
external_host_name = urlparse(proxy_provider.external_host)
|
external_host_name = urlparse(proxy_provider.external_host)
|
||||||
hosts.append(f"`{external_host_name.netloc}`")
|
hosts.append(f"`{external_host_name.netloc}`")
|
||||||
traefik_name = f"ak-outpost-{self.outpost.pk.hex}"
|
traefik_name = self.name
|
||||||
labels = super()._get_labels()
|
labels = super()._get_labels()
|
||||||
labels["traefik.enable"] = "true"
|
labels["traefik.enable"] = "true"
|
||||||
labels[
|
labels[
|
||||||
f"traefik.http.routers.{traefik_name}-router.rule"
|
f"traefik.http.routers.{traefik_name}-router.rule"
|
||||||
] = f"Host({','.join(hosts)}) && PathPrefix('/akprox')"
|
] = f"Host({','.join(hosts)}) && PathPrefix(`/outpost.goauthentik.io`)"
|
||||||
labels[f"traefik.http.routers.{traefik_name}-router.tls"] = "true"
|
labels[f"traefik.http.routers.{traefik_name}-router.tls"] = "true"
|
||||||
labels[f"traefik.http.routers.{traefik_name}-router.service"] = f"{traefik_name}-service"
|
labels[f"traefik.http.routers.{traefik_name}-router.service"] = f"{traefik_name}-service"
|
||||||
labels[
|
labels[
|
||||||
f"traefik.http.services.{traefik_name}-service.loadbalancer.healthcheck.path"
|
f"traefik.http.services.{traefik_name}-service.loadbalancer.healthcheck.path"
|
||||||
] = "/akprox/ping"
|
] = "/outpost.goauthentik.io/ping"
|
||||||
labels[
|
labels[
|
||||||
f"traefik.http.services.{traefik_name}-service.loadbalancer.healthcheck.port"
|
f"traefik.http.services.{traefik_name}-service.loadbalancer.healthcheck.port"
|
||||||
] = "9300"
|
] = "9300"
|
||||||
|
@ -92,6 +92,8 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]):
|
|||||||
# Buffer sizes for large headers with JWTs
|
# Buffer sizes for large headers with JWTs
|
||||||
"nginx.ingress.kubernetes.io/proxy-buffers-number": "4",
|
"nginx.ingress.kubernetes.io/proxy-buffers-number": "4",
|
||||||
"nginx.ingress.kubernetes.io/proxy-buffer-size": "16k",
|
"nginx.ingress.kubernetes.io/proxy-buffer-size": "16k",
|
||||||
|
# Enable TLS in traefik
|
||||||
|
"traefik.ingress.kubernetes.io/router.tls": "true",
|
||||||
}
|
}
|
||||||
annotations.update(self.controller.outpost.config.kubernetes_ingress_annotations)
|
annotations.update(self.controller.outpost.config.kubernetes_ingress_annotations)
|
||||||
return annotations
|
return annotations
|
||||||
@ -126,8 +128,8 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]):
|
|||||||
port=V1ServiceBackendPort(name="http"),
|
port=V1ServiceBackendPort(name="http"),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
path="/akprox",
|
path="/outpost.goauthentik.io",
|
||||||
path_type="ImplementationSpecific",
|
path_type="Prefix",
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
@ -145,7 +147,7 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]):
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
path="/",
|
path="/",
|
||||||
path_type="ImplementationSpecific",
|
path_type="Prefix",
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
|
@ -119,7 +119,10 @@ class TraefikMiddlewareReconciler(KubernetesObjectReconciler[TraefikMiddleware])
|
|||||||
),
|
),
|
||||||
spec=TraefikMiddlewareSpec(
|
spec=TraefikMiddlewareSpec(
|
||||||
forwardAuth=TraefikMiddlewareSpecForwardAuth(
|
forwardAuth=TraefikMiddlewareSpecForwardAuth(
|
||||||
address=f"http://{self.name}.{self.namespace}:9000/akprox/auth/traefik",
|
address=(
|
||||||
|
f"http://{self.name}.{self.namespace}:9000/"
|
||||||
|
"outpost.goauthentik.io/auth/traefik"
|
||||||
|
),
|
||||||
authResponseHeaders=[
|
authResponseHeaders=[
|
||||||
"X-authentik-username",
|
"X-authentik-username",
|
||||||
"X-authentik-groups",
|
"X-authentik-groups",
|
||||||
|
@ -27,7 +27,7 @@ def get_cookie_secret():
|
|||||||
|
|
||||||
|
|
||||||
def _get_callback_url(uri: str) -> str:
|
def _get_callback_url(uri: str) -> str:
|
||||||
return urljoin(uri, "/akprox/callback")
|
return urljoin(uri, "outpost.goauthentik.io/callback")
|
||||||
|
|
||||||
|
|
||||||
class ProxyMode(models.TextChoices):
|
class ProxyMode(models.TextChoices):
|
||||||
|
11
authentik/providers/proxy/tasks.py
Normal file
11
authentik/providers/proxy/tasks.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
"""proxy provider tasks"""
|
||||||
|
from authentik.providers.proxy.models import ProxyProvider
|
||||||
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def proxy_set_defaults():
|
||||||
|
"""Ensure correct defaults are set for all providers"""
|
||||||
|
for provider in ProxyProvider.objects.all():
|
||||||
|
provider.set_oauth_defaults()
|
||||||
|
provider.save()
|
@ -15,6 +15,7 @@ from authentik.providers.saml.processors.request_parser import AuthNRequestParse
|
|||||||
from authentik.sources.saml.exceptions import MismatchedRequestID
|
from authentik.sources.saml.exceptions import MismatchedRequestID
|
||||||
from authentik.sources.saml.models import SAMLSource
|
from authentik.sources.saml.models import SAMLSource
|
||||||
from authentik.sources.saml.processors.constants import (
|
from authentik.sources.saml.processors.constants import (
|
||||||
|
SAML_BINDING_REDIRECT,
|
||||||
SAML_NAME_ID_FORMAT_EMAIL,
|
SAML_NAME_ID_FORMAT_EMAIL,
|
||||||
SAML_NAME_ID_FORMAT_UNSPECIFIED,
|
SAML_NAME_ID_FORMAT_UNSPECIFIED,
|
||||||
)
|
)
|
||||||
@ -98,6 +99,9 @@ class TestAuthNRequest(TestCase):
|
|||||||
|
|
||||||
# First create an AuthNRequest
|
# First create an AuthNRequest
|
||||||
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
||||||
|
auth_n = request_proc.get_auth_n()
|
||||||
|
self.assertEqual(auth_n.attrib["ProtocolBinding"], SAML_BINDING_REDIRECT)
|
||||||
|
|
||||||
request = request_proc.build_auth_n()
|
request = request_proc.build_auth_n()
|
||||||
# Now we check the ID and signature
|
# Now we check the ID and signature
|
||||||
parsed_request = AuthNRequestParser(self.provider).parse(
|
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||||
|
@ -1,14 +1,4 @@
|
|||||||
"""
|
"""root settings for authentik"""
|
||||||
Django settings for authentik project.
|
|
||||||
|
|
||||||
Generated by 'django-admin startproject' using Django 2.1.3.
|
|
||||||
|
|
||||||
For more information on this file, see
|
|
||||||
https://docs.djangoproject.com/en/2.1/topics/settings/
|
|
||||||
|
|
||||||
For the full list of settings and their values, see
|
|
||||||
https://docs.djangoproject.com/en/2.1/ref/settings/
|
|
||||||
"""
|
|
||||||
|
|
||||||
import importlib
|
import importlib
|
||||||
import logging
|
import logging
|
||||||
@ -16,26 +6,23 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
from hashlib import sha512
|
from hashlib import sha512
|
||||||
from json import dumps
|
from json import dumps
|
||||||
from tempfile import gettempdir
|
|
||||||
from time import time
|
from time import time
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote_plus
|
||||||
|
|
||||||
import structlog
|
import structlog
|
||||||
from celery.schedules import crontab
|
from celery.schedules import crontab
|
||||||
from sentry_sdk import init as sentry_init
|
from sentry_sdk import init as sentry_init
|
||||||
from sentry_sdk.api import set_tag
|
from sentry_sdk.api import set_tag
|
||||||
from sentry_sdk.integrations.boto3 import Boto3Integration
|
|
||||||
from sentry_sdk.integrations.celery import CeleryIntegration
|
from sentry_sdk.integrations.celery import CeleryIntegration
|
||||||
from sentry_sdk.integrations.django import DjangoIntegration
|
from sentry_sdk.integrations.django import DjangoIntegration
|
||||||
from sentry_sdk.integrations.redis import RedisIntegration
|
from sentry_sdk.integrations.redis import RedisIntegration
|
||||||
from sentry_sdk.integrations.threading import ThreadingIntegration
|
from sentry_sdk.integrations.threading import ThreadingIntegration
|
||||||
|
|
||||||
from authentik import ENV_GIT_HASH_KEY, __version__, get_build_hash, get_full_version
|
from authentik import ENV_GIT_HASH_KEY, __version__, get_build_hash
|
||||||
from authentik.core.middleware import structlog_add_request_id
|
from authentik.core.middleware import structlog_add_request_id
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.logging import add_process_id
|
from authentik.lib.logging import add_process_id
|
||||||
from authentik.lib.sentry import before_send
|
from authentik.lib.sentry import before_send
|
||||||
from authentik.lib.utils.http import get_http_session
|
|
||||||
from authentik.lib.utils.reflection import get_env
|
from authentik.lib.utils.reflection import get_env
|
||||||
from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BACKEND_LDAP
|
from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BACKEND_LDAP
|
||||||
|
|
||||||
@ -149,7 +136,6 @@ INSTALLED_APPS = [
|
|||||||
"guardian",
|
"guardian",
|
||||||
"django_prometheus",
|
"django_prometheus",
|
||||||
"channels",
|
"channels",
|
||||||
"dbbackup",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
GUARDIAN_MONKEY_PATCH = False
|
GUARDIAN_MONKEY_PATCH = False
|
||||||
@ -220,7 +206,7 @@ if CONFIG.y_bool("redis.tls", False):
|
|||||||
REDIS_CELERY_TLS_REQUIREMENTS = f"?ssl_cert_reqs={CONFIG.y('redis.tls_reqs')}"
|
REDIS_CELERY_TLS_REQUIREMENTS = f"?ssl_cert_reqs={CONFIG.y('redis.tls_reqs')}"
|
||||||
_redis_url = (
|
_redis_url = (
|
||||||
f"{REDIS_PROTOCOL_PREFIX}:"
|
f"{REDIS_PROTOCOL_PREFIX}:"
|
||||||
f"{quote(CONFIG.y('redis.password'))}@{quote(CONFIG.y('redis.host'))}:"
|
f"{quote_plus(CONFIG.y('redis.password'))}@{quote_plus(CONFIG.y('redis.host'))}:"
|
||||||
f"{int(CONFIG.y('redis.port'))}"
|
f"{int(CONFIG.y('redis.port'))}"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -347,6 +333,7 @@ LOCALE_PATHS = ["./locale"]
|
|||||||
# Celery settings
|
# Celery settings
|
||||||
# Add a 10 minute timeout to all Celery tasks.
|
# Add a 10 minute timeout to all Celery tasks.
|
||||||
CELERY_TASK_SOFT_TIME_LIMIT = 600
|
CELERY_TASK_SOFT_TIME_LIMIT = 600
|
||||||
|
CELERY_WORKER_MAX_TASKS_PER_CHILD = 50
|
||||||
CELERY_BEAT_SCHEDULE = {
|
CELERY_BEAT_SCHEDULE = {
|
||||||
"clean_expired_models": {
|
"clean_expired_models": {
|
||||||
"task": "authentik.core.tasks.clean_expired_models",
|
"task": "authentik.core.tasks.clean_expired_models",
|
||||||
@ -368,32 +355,6 @@ CELERY_RESULT_BACKEND = (
|
|||||||
f"{_redis_url}/{CONFIG.y('redis.message_queue_db')}{REDIS_CELERY_TLS_REQUIREMENTS}"
|
f"{_redis_url}/{CONFIG.y('redis.message_queue_db')}{REDIS_CELERY_TLS_REQUIREMENTS}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Database backup
|
|
||||||
DBBACKUP_STORAGE = "django.core.files.storage.FileSystemStorage"
|
|
||||||
DBBACKUP_STORAGE_OPTIONS = {"location": "./backups" if DEBUG else "/backups"}
|
|
||||||
DBBACKUP_FILENAME_TEMPLATE = f"authentik-backup-{__version__}-{{datetime}}.sql"
|
|
||||||
DBBACKUP_CONNECTOR_MAPPING = {
|
|
||||||
"django_prometheus.db.backends.postgresql": "dbbackup.db.postgresql.PgDumpConnector",
|
|
||||||
}
|
|
||||||
DBBACKUP_TMP_DIR = gettempdir() if DEBUG else "/tmp" # nosec
|
|
||||||
DBBACKUP_CLEANUP_KEEP = 10
|
|
||||||
if CONFIG.y("postgresql.s3_backup.bucket", "") != "":
|
|
||||||
DBBACKUP_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
|
||||||
DBBACKUP_STORAGE_OPTIONS = {
|
|
||||||
"access_key": CONFIG.y("postgresql.s3_backup.access_key"),
|
|
||||||
"secret_key": CONFIG.y("postgresql.s3_backup.secret_key"),
|
|
||||||
"bucket_name": CONFIG.y("postgresql.s3_backup.bucket"),
|
|
||||||
"region_name": CONFIG.y("postgresql.s3_backup.region", "eu-central-1"),
|
|
||||||
"default_acl": "private",
|
|
||||||
"endpoint_url": CONFIG.y("postgresql.s3_backup.host"),
|
|
||||||
"location": CONFIG.y("postgresql.s3_backup.location", ""),
|
|
||||||
"verify": not CONFIG.y_bool("postgresql.s3_backup.insecure_skip_verify", False),
|
|
||||||
}
|
|
||||||
j_print(
|
|
||||||
"Database backup to S3 is configured",
|
|
||||||
host=CONFIG.y("postgresql.s3_backup.host"),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sentry integration
|
# Sentry integration
|
||||||
SENTRY_DSN = "https://a579bb09306d4f8b8d8847c052d3a1d3@sentry.beryju.org/8"
|
SENTRY_DSN = "https://a579bb09306d4f8b8d8847c052d3a1d3@sentry.beryju.org/8"
|
||||||
|
|
||||||
@ -407,7 +368,6 @@ if _ERROR_REPORTING:
|
|||||||
DjangoIntegration(transaction_style="function_name"),
|
DjangoIntegration(transaction_style="function_name"),
|
||||||
CeleryIntegration(),
|
CeleryIntegration(),
|
||||||
RedisIntegration(),
|
RedisIntegration(),
|
||||||
Boto3Integration(),
|
|
||||||
ThreadingIntegration(propagate_hub=True),
|
ThreadingIntegration(propagate_hub=True),
|
||||||
],
|
],
|
||||||
before_send=before_send,
|
before_send=before_send,
|
||||||
@ -424,29 +384,6 @@ if _ERROR_REPORTING:
|
|||||||
"Error reporting is enabled",
|
"Error reporting is enabled",
|
||||||
env=CONFIG.y("error_reporting.environment", "customer"),
|
env=CONFIG.y("error_reporting.environment", "customer"),
|
||||||
)
|
)
|
||||||
if not CONFIG.y_bool("disable_startup_analytics", False):
|
|
||||||
should_send = env not in ["dev", "ci"]
|
|
||||||
if should_send:
|
|
||||||
try:
|
|
||||||
get_http_session().post(
|
|
||||||
"https://goauthentik.io/api/event",
|
|
||||||
json={
|
|
||||||
"domain": "authentik",
|
|
||||||
"name": "pageview",
|
|
||||||
"referrer": get_full_version(),
|
|
||||||
"url": (
|
|
||||||
f"http://localhost/{env}?utm_source={get_full_version()}&utm_medium={env}"
|
|
||||||
),
|
|
||||||
},
|
|
||||||
headers={
|
|
||||||
"User-Agent": sha512(str(SECRET_KEY).encode("ascii")).hexdigest()[:16],
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
timeout=5,
|
|
||||||
)
|
|
||||||
# pylint: disable=bare-except
|
|
||||||
except: # nosec
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Static files (CSS, JavaScript, Images)
|
# Static files (CSS, JavaScript, Images)
|
||||||
# https://docs.djangoproject.com/en/2.1/howto/static-files/
|
# https://docs.djangoproject.com/en/2.1/howto/static-files/
|
||||||
@ -528,12 +465,9 @@ _LOGGING_HANDLER_MAP = {
|
|||||||
"urllib3": "WARNING",
|
"urllib3": "WARNING",
|
||||||
"websockets": "WARNING",
|
"websockets": "WARNING",
|
||||||
"daphne": "WARNING",
|
"daphne": "WARNING",
|
||||||
"dbbackup": "ERROR",
|
|
||||||
"kubernetes": "INFO",
|
"kubernetes": "INFO",
|
||||||
"asyncio": "WARNING",
|
"asyncio": "WARNING",
|
||||||
"aioredis": "WARNING",
|
"aioredis": "WARNING",
|
||||||
"s3transfer": "WARNING",
|
|
||||||
"botocore": "WARNING",
|
|
||||||
}
|
}
|
||||||
for handler_name, level in _LOGGING_HANDLER_MAP.items():
|
for handler_name, level in _LOGGING_HANDLER_MAP.items():
|
||||||
# pyright: reportGeneralTypeIssues=false
|
# pyright: reportGeneralTypeIssues=false
|
||||||
|
@ -35,21 +35,21 @@ class LDAPProviderManager(ObjectManager):
|
|||||||
"goauthentik.io/sources/ldap/ms-userprincipalname",
|
"goauthentik.io/sources/ldap/ms-userprincipalname",
|
||||||
name="authentik default Active Directory Mapping: userPrincipalName",
|
name="authentik default Active Directory Mapping: userPrincipalName",
|
||||||
object_field="attributes.upn",
|
object_field="attributes.upn",
|
||||||
expression="return ldap.get('userPrincipalName')",
|
expression="return list_flatten(ldap.get('userPrincipalName'))",
|
||||||
),
|
),
|
||||||
EnsureExists(
|
EnsureExists(
|
||||||
LDAPPropertyMapping,
|
LDAPPropertyMapping,
|
||||||
"goauthentik.io/sources/ldap/ms-givenName",
|
"goauthentik.io/sources/ldap/ms-givenName",
|
||||||
name="authentik default Active Directory Mapping: givenName",
|
name="authentik default Active Directory Mapping: givenName",
|
||||||
object_field="attributes.givenName",
|
object_field="attributes.givenName",
|
||||||
expression="return ldap.get('givenName')",
|
expression="return list_flatten(ldap.get('givenName'))",
|
||||||
),
|
),
|
||||||
EnsureExists(
|
EnsureExists(
|
||||||
LDAPPropertyMapping,
|
LDAPPropertyMapping,
|
||||||
"goauthentik.io/sources/ldap/ms-sn",
|
"goauthentik.io/sources/ldap/ms-sn",
|
||||||
name="authentik default Active Directory Mapping: sn",
|
name="authentik default Active Directory Mapping: sn",
|
||||||
object_field="attributes.sn",
|
object_field="attributes.sn",
|
||||||
expression="return ldap.get('sn')",
|
expression="return list_flatten(ldap.get('sn'))",
|
||||||
),
|
),
|
||||||
# OpenLDAP specific mappings
|
# OpenLDAP specific mappings
|
||||||
EnsureExists(
|
EnsureExists(
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
"""Sync LDAP Users and groups into authentik"""
|
"""Sync LDAP Users and groups into authentik"""
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from deepmerge import always_merger
|
|
||||||
from django.db.models.base import Model
|
from django.db.models.base import Model
|
||||||
from django.db.models.query import QuerySet
|
from django.db.models.query import QuerySet
|
||||||
from structlog.stdlib import BoundLogger, get_logger
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
from authentik.core.exceptions import PropertyMappingExpressionException
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.merge import MERGE_LIST_UNIQUE
|
||||||
from authentik.sources.ldap.auth import LDAP_DISTINGUISHED_NAME
|
from authentik.sources.ldap.auth import LDAP_DISTINGUISHED_NAME
|
||||||
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
||||||
|
|
||||||
@ -123,8 +123,8 @@ class BaseLDAPSynchronizer:
|
|||||||
continue
|
continue
|
||||||
setattr(instance, key, value)
|
setattr(instance, key, value)
|
||||||
final_atttributes = {}
|
final_atttributes = {}
|
||||||
always_merger.merge(final_atttributes, instance.attributes)
|
MERGE_LIST_UNIQUE.merge(final_atttributes, instance.attributes)
|
||||||
always_merger.merge(final_atttributes, data.get("attributes", {}))
|
MERGE_LIST_UNIQUE.merge(final_atttributes, data.get("attributes", {}))
|
||||||
instance.attributes = final_atttributes
|
instance.attributes = final_atttributes
|
||||||
instance.save()
|
instance.save()
|
||||||
return (instance, False)
|
return (instance, False)
|
||||||
|
@ -37,6 +37,7 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||||||
uniq = self._flatten(attributes[self._source.object_uniqueness_field])
|
uniq = self._flatten(attributes[self._source.object_uniqueness_field])
|
||||||
try:
|
try:
|
||||||
defaults = self.build_group_properties(group_dn, **attributes)
|
defaults = self.build_group_properties(group_dn, **attributes)
|
||||||
|
defaults["parent"] = self._source.sync_parent_group
|
||||||
self._logger.debug("Creating group with attributes", **defaults)
|
self._logger.debug("Creating group with attributes", **defaults)
|
||||||
if "name" not in defaults:
|
if "name" not in defaults:
|
||||||
raise IntegrityError("Name was not set by propertymappings")
|
raise IntegrityError("Name was not set by propertymappings")
|
||||||
@ -47,7 +48,6 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||||||
Group,
|
Group,
|
||||||
{
|
{
|
||||||
f"attributes__{LDAP_UNIQUENESS}": uniq,
|
f"attributes__{LDAP_UNIQUENESS}": uniq,
|
||||||
"parent": self._source.sync_parent_group,
|
|
||||||
},
|
},
|
||||||
defaults,
|
defaults,
|
||||||
)
|
)
|
||||||
|
@ -3,6 +3,7 @@ from ldap3.core.exceptions import LDAPException
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||||
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
from authentik.lib.utils.reflection import class_to_path, path_to_class
|
from authentik.lib.utils.reflection import class_to_path, path_to_class
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
from authentik.sources.ldap.models import LDAPSource
|
from authentik.sources.ldap.models import LDAPSource
|
||||||
@ -52,5 +53,5 @@ def ldap_sync(self: MonitoredTask, source_pk: str, sync_class: str):
|
|||||||
)
|
)
|
||||||
except LDAPException as exc:
|
except LDAPException as exc:
|
||||||
# No explicit event is created here as .set_status with an error will do that
|
# No explicit event is created here as .set_status with an error will do that
|
||||||
LOGGER.debug(exc)
|
LOGGER.warning(exception_to_string(exc))
|
||||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||||
|
@ -5,6 +5,7 @@ from django.db.models import Q
|
|||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|
||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.lib.generators import generate_key
|
from authentik.lib.generators import generate_key
|
||||||
from authentik.managed.manager import ObjectManager
|
from authentik.managed.manager import ObjectManager
|
||||||
@ -24,7 +25,7 @@ class LDAPSyncTests(TestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
ObjectManager().run()
|
ObjectManager().run()
|
||||||
self.source = LDAPSource.objects.create(
|
self.source: LDAPSource = LDAPSource.objects.create(
|
||||||
name="ldap",
|
name="ldap",
|
||||||
slug="ldap",
|
slug="ldap",
|
||||||
base_dn="dc=goauthentik,dc=io",
|
base_dn="dc=goauthentik,dc=io",
|
||||||
@ -120,6 +121,9 @@ class LDAPSyncTests(TestCase):
|
|||||||
self.source.property_mappings_group.set(
|
self.source.property_mappings_group.set(
|
||||||
LDAPPropertyMapping.objects.filter(managed="goauthentik.io/sources/ldap/default-name")
|
LDAPPropertyMapping.objects.filter(managed="goauthentik.io/sources/ldap/default-name")
|
||||||
)
|
)
|
||||||
|
_user = create_test_admin_user()
|
||||||
|
parent_group = Group.objects.get(name=_user.username)
|
||||||
|
self.source.sync_parent_group = parent_group
|
||||||
connection = PropertyMock(return_value=mock_ad_connection(LDAP_PASSWORD))
|
connection = PropertyMock(return_value=mock_ad_connection(LDAP_PASSWORD))
|
||||||
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
||||||
self.source.save()
|
self.source.save()
|
||||||
@ -127,8 +131,9 @@ class LDAPSyncTests(TestCase):
|
|||||||
group_sync.sync()
|
group_sync.sync()
|
||||||
membership_sync = MembershipLDAPSynchronizer(self.source)
|
membership_sync = MembershipLDAPSynchronizer(self.source)
|
||||||
membership_sync.sync()
|
membership_sync.sync()
|
||||||
group = Group.objects.filter(name="test-group")
|
group: Group = Group.objects.filter(name="test-group").first()
|
||||||
self.assertTrue(group.exists())
|
self.assertIsNotNone(group)
|
||||||
|
self.assertEqual(group.parent, parent_group)
|
||||||
|
|
||||||
def test_sync_groups_openldap(self):
|
def test_sync_groups_openldap(self):
|
||||||
"""Test group sync"""
|
"""Test group sync"""
|
||||||
|
@ -17,6 +17,7 @@ AUTHENTIK_SOURCES_OAUTH_TYPES = [
|
|||||||
"authentik.sources.oauth.types.okta",
|
"authentik.sources.oauth.types.okta",
|
||||||
"authentik.sources.oauth.types.reddit",
|
"authentik.sources.oauth.types.reddit",
|
||||||
"authentik.sources.oauth.types.twitter",
|
"authentik.sources.oauth.types.twitter",
|
||||||
|
"authentik.sources.oauth.types.mailcow",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ class BaseOAuthClient:
|
|||||||
response = self.do_request("get", profile_url, token=token)
|
response = self.do_request("get", profile_url, token=token)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except RequestException as exc:
|
except RequestException as exc:
|
||||||
LOGGER.warning("Unable to fetch user profile", exc=exc)
|
LOGGER.warning("Unable to fetch user profile", exc=exc, body=response.text)
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return response.json()
|
return response.json()
|
||||||
|
@ -11,7 +11,7 @@ def update_empty_urls(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
|||||||
|
|
||||||
for source in OAuthSource.objects.using(db_alias).all():
|
for source in OAuthSource.objects.using(db_alias).all():
|
||||||
changed = False
|
changed = False
|
||||||
if source.access_token_url == "":
|
if source.access_token_url == "": # nosec
|
||||||
source.access_token_url = None
|
source.access_token_url = None
|
||||||
changed = True
|
changed = True
|
||||||
if source.authorization_url == "":
|
if source.authorization_url == "":
|
||||||
@ -20,7 +20,7 @@ def update_empty_urls(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
|||||||
if source.profile_url == "":
|
if source.profile_url == "":
|
||||||
source.profile_url = None
|
source.profile_url = None
|
||||||
changed = True
|
changed = True
|
||||||
if source.request_token_url == "":
|
if source.request_token_url == "": # nosec
|
||||||
source.request_token_url = None
|
source.request_token_url = None
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
|
@ -111,6 +111,16 @@ class GitHubOAuthSource(OAuthSource):
|
|||||||
verbose_name_plural = _("GitHub OAuth Sources")
|
verbose_name_plural = _("GitHub OAuth Sources")
|
||||||
|
|
||||||
|
|
||||||
|
class MailcowOAuthSource(OAuthSource):
|
||||||
|
"""Social Login using Mailcow."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
abstract = True
|
||||||
|
verbose_name = _("Mailcow OAuth Source")
|
||||||
|
verbose_name_plural = _("Mailcow OAuth Sources")
|
||||||
|
|
||||||
|
|
||||||
class TwitterOAuthSource(OAuthSource):
|
class TwitterOAuthSource(OAuthSource):
|
||||||
"""Social Login using Twitter.com"""
|
"""Social Login using Twitter.com"""
|
||||||
|
|
||||||
|
38
authentik/sources/oauth/tests/test_type_mailcow.py
Normal file
38
authentik/sources/oauth/tests/test_type_mailcow.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
"""Mailcow Type tests"""
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from authentik.sources.oauth.models import OAuthSource
|
||||||
|
from authentik.sources.oauth.types.mailcow import MailcowOAuth2Callback
|
||||||
|
|
||||||
|
# https://community.mailcow.email/d/13-mailcow-oauth-json-format/2
|
||||||
|
MAILCOW_USER = {
|
||||||
|
"success": True,
|
||||||
|
"username": "email@example.com",
|
||||||
|
"identifier": "email@example.com",
|
||||||
|
"email": "email@example.com",
|
||||||
|
"full_name": "Example User",
|
||||||
|
"displayName": "Example User",
|
||||||
|
"created": "2020-05-15 11:33:08",
|
||||||
|
"modified": "2020-05-15 12:23:31",
|
||||||
|
"active": 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TestTypeMailcow(TestCase):
|
||||||
|
"""OAuth Source tests"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.source = OAuthSource.objects.create(
|
||||||
|
name="test",
|
||||||
|
slug="test",
|
||||||
|
provider_type="mailcow",
|
||||||
|
authorization_url="",
|
||||||
|
profile_url="",
|
||||||
|
consumer_key="",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_enroll_context(self):
|
||||||
|
"""Test mailcow Enrollment context"""
|
||||||
|
ak_context = MailcowOAuth2Callback().get_user_enroll_context(MAILCOW_USER)
|
||||||
|
self.assertEqual(ak_context["email"], MAILCOW_USER["email"])
|
||||||
|
self.assertEqual(ak_context["name"], MAILCOW_USER["full_name"])
|
@ -37,7 +37,7 @@ class AzureADClient(OAuth2Client):
|
|||||||
)
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except RequestException as exc:
|
except RequestException as exc:
|
||||||
LOGGER.warning("Unable to fetch user profile", exc=exc)
|
LOGGER.warning("Unable to fetch user profile", exc=exc, body=response.text)
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return response.json()
|
return response.json()
|
||||||
|
69
authentik/sources/oauth/types/mailcow.py
Normal file
69
authentik/sources/oauth/types/mailcow.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
"""Mailcow OAuth Views"""
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
from requests.exceptions import RequestException
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.sources.oauth.clients.oauth2 import OAuth2Client
|
||||||
|
from authentik.sources.oauth.types.manager import MANAGER, SourceType
|
||||||
|
from authentik.sources.oauth.views.callback import OAuthCallback
|
||||||
|
from authentik.sources.oauth.views.redirect import OAuthRedirect
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
class MailcowOAuthRedirect(OAuthRedirect):
|
||||||
|
"""Mailcow OAuth2 Redirect"""
|
||||||
|
|
||||||
|
def get_additional_parameters(self, source): # pragma: no cover
|
||||||
|
return {
|
||||||
|
"scope": ["profile"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class MailcowOAuth2Client(OAuth2Client):
|
||||||
|
"""MailcowOAuth2Client, for some reason, mailcow does not like the default headers"""
|
||||||
|
|
||||||
|
def get_profile_info(self, token: dict[str, str]) -> Optional[dict[str, Any]]:
|
||||||
|
"Fetch user profile information."
|
||||||
|
profile_url = self.source.type.profile_url or ""
|
||||||
|
if self.source.type.urls_customizable and self.source.profile_url:
|
||||||
|
profile_url = self.source.profile_url
|
||||||
|
try:
|
||||||
|
response = self.session.request(
|
||||||
|
"get",
|
||||||
|
f"{profile_url}?access_token={token['access_token']}",
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
except RequestException as exc:
|
||||||
|
LOGGER.warning("Unable to fetch user profile", exc=exc, body=response.text)
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
|
||||||
|
class MailcowOAuth2Callback(OAuthCallback):
|
||||||
|
"""Mailcow OAuth2 Callback"""
|
||||||
|
|
||||||
|
client_class = MailcowOAuth2Client
|
||||||
|
|
||||||
|
def get_user_enroll_context(
|
||||||
|
self,
|
||||||
|
info: dict[str, Any],
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"email": info.get("email"),
|
||||||
|
"name": info.get("full_name"),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@MANAGER.type()
|
||||||
|
class MailcowType(SourceType):
|
||||||
|
"""Mailcow Type definition"""
|
||||||
|
|
||||||
|
callback_view = MailcowOAuth2Callback
|
||||||
|
redirect_view = MailcowOAuthRedirect
|
||||||
|
name = "Mailcow"
|
||||||
|
slug = "mailcow"
|
||||||
|
|
||||||
|
urls_customizable = True
|
@ -18,6 +18,8 @@ from authentik.sources.saml.processors.constants import (
|
|||||||
RSA_SHA256,
|
RSA_SHA256,
|
||||||
RSA_SHA384,
|
RSA_SHA384,
|
||||||
RSA_SHA512,
|
RSA_SHA512,
|
||||||
|
SAML_BINDING_POST,
|
||||||
|
SAML_BINDING_REDIRECT,
|
||||||
SAML_NAME_ID_FORMAT_EMAIL,
|
SAML_NAME_ID_FORMAT_EMAIL,
|
||||||
SAML_NAME_ID_FORMAT_PERSISTENT,
|
SAML_NAME_ID_FORMAT_PERSISTENT,
|
||||||
SAML_NAME_ID_FORMAT_TRANSIENT,
|
SAML_NAME_ID_FORMAT_TRANSIENT,
|
||||||
@ -37,6 +39,15 @@ class SAMLBindingTypes(models.TextChoices):
|
|||||||
POST = "POST", _("POST Binding")
|
POST = "POST", _("POST Binding")
|
||||||
POST_AUTO = "POST_AUTO", _("POST Binding with auto-confirmation")
|
POST_AUTO = "POST_AUTO", _("POST Binding with auto-confirmation")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def uri(self) -> str:
|
||||||
|
"""Convert database field to URI"""
|
||||||
|
return {
|
||||||
|
SAMLBindingTypes.POST: SAML_BINDING_POST,
|
||||||
|
SAMLBindingTypes.POST_AUTO: SAML_BINDING_POST,
|
||||||
|
SAMLBindingTypes.REDIRECT: SAML_BINDING_REDIRECT,
|
||||||
|
}[self]
|
||||||
|
|
||||||
|
|
||||||
class SAMLNameIDPolicy(models.TextChoices):
|
class SAMLNameIDPolicy(models.TextChoices):
|
||||||
"""SAML NameID Policies"""
|
"""SAML NameID Policies"""
|
||||||
|
@ -10,7 +10,7 @@ from lxml.etree import Element # nosec
|
|||||||
from authentik.providers.saml.utils import get_random_id
|
from authentik.providers.saml.utils import get_random_id
|
||||||
from authentik.providers.saml.utils.encoding import deflate_and_base64_encode
|
from authentik.providers.saml.utils.encoding import deflate_and_base64_encode
|
||||||
from authentik.providers.saml.utils.time import get_time_string
|
from authentik.providers.saml.utils.time import get_time_string
|
||||||
from authentik.sources.saml.models import SAMLSource
|
from authentik.sources.saml.models import SAMLBindingTypes, SAMLSource
|
||||||
from authentik.sources.saml.processors.constants import (
|
from authentik.sources.saml.processors.constants import (
|
||||||
DIGEST_ALGORITHM_TRANSLATION_MAP,
|
DIGEST_ALGORITHM_TRANSLATION_MAP,
|
||||||
NS_MAP,
|
NS_MAP,
|
||||||
@ -62,7 +62,7 @@ class RequestProcessor:
|
|||||||
auth_n_request.attrib["Destination"] = self.source.sso_url
|
auth_n_request.attrib["Destination"] = self.source.sso_url
|
||||||
auth_n_request.attrib["ID"] = self.request_id
|
auth_n_request.attrib["ID"] = self.request_id
|
||||||
auth_n_request.attrib["IssueInstant"] = self.issue_instant
|
auth_n_request.attrib["IssueInstant"] = self.issue_instant
|
||||||
auth_n_request.attrib["ProtocolBinding"] = self.source.binding_type
|
auth_n_request.attrib["ProtocolBinding"] = SAMLBindingTypes(self.source.binding_type).uri
|
||||||
auth_n_request.attrib["Version"] = "2.0"
|
auth_n_request.attrib["Version"] = "2.0"
|
||||||
# Create issuer object
|
# Create issuer object
|
||||||
auth_n_request.append(self.get_issuer())
|
auth_n_request.append(self.get_issuer())
|
||||||
|
@ -61,7 +61,7 @@ class StaticDeviceViewSet(
|
|||||||
):
|
):
|
||||||
"""Viewset for static authenticator devices"""
|
"""Viewset for static authenticator devices"""
|
||||||
|
|
||||||
queryset = StaticDevice.objects.all()
|
queryset = StaticDevice.objects.filter(confirmed=True)
|
||||||
serializer_class = StaticDeviceSerializer
|
serializer_class = StaticDeviceSerializer
|
||||||
permission_classes = [OwnerPermissions]
|
permission_classes = [OwnerPermissions]
|
||||||
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||||
|
@ -55,7 +55,7 @@ class AuthenticatorStaticStageView(ChallengeStageView):
|
|||||||
stage: AuthenticatorStaticStage = self.executor.current_stage
|
stage: AuthenticatorStaticStage = self.executor.current_stage
|
||||||
|
|
||||||
if SESSION_STATIC_DEVICE not in self.request.session:
|
if SESSION_STATIC_DEVICE not in self.request.session:
|
||||||
device = StaticDevice(user=user, confirmed=True, name="Static Token")
|
device = StaticDevice(user=user, confirmed=False, name="Static Token")
|
||||||
tokens = []
|
tokens = []
|
||||||
for _ in range(0, stage.token_count):
|
for _ in range(0, stage.token_count):
|
||||||
tokens.append(StaticToken(device=device, token=StaticToken.random_token()))
|
tokens.append(StaticToken(device=device, token=StaticToken.random_token()))
|
||||||
@ -66,6 +66,7 @@ class AuthenticatorStaticStageView(ChallengeStageView):
|
|||||||
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
||||||
"""Verify OTP Token"""
|
"""Verify OTP Token"""
|
||||||
device: StaticDevice = self.request.session[SESSION_STATIC_DEVICE]
|
device: StaticDevice = self.request.session[SESSION_STATIC_DEVICE]
|
||||||
|
device.confirmed = True
|
||||||
device.save()
|
device.save()
|
||||||
for token in self.request.session[SESSION_STATIC_TOKENS]:
|
for token in self.request.session[SESSION_STATIC_TOKENS]:
|
||||||
token.save()
|
token.save()
|
||||||
|
@ -54,7 +54,7 @@ class TOTPDeviceViewSet(
|
|||||||
):
|
):
|
||||||
"""Viewset for totp authenticator devices"""
|
"""Viewset for totp authenticator devices"""
|
||||||
|
|
||||||
queryset = TOTPDevice.objects.all()
|
queryset = TOTPDevice.objects.filter(confirmed=True)
|
||||||
serializer_class = TOTPDeviceSerializer
|
serializer_class = TOTPDeviceSerializer
|
||||||
permission_classes = [OwnerPermissions]
|
permission_classes = [OwnerPermissions]
|
||||||
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||||
|
@ -42,6 +42,7 @@ class AuthenticatorTOTPChallengeResponse(ChallengeResponse):
|
|||||||
"""Validate totp code"""
|
"""Validate totp code"""
|
||||||
if self.device is not None:
|
if self.device is not None:
|
||||||
if not self.device.verify_token(code):
|
if not self.device.verify_token(code):
|
||||||
|
self.device.confirmed = False
|
||||||
raise ValidationError(_("Code does not match"))
|
raise ValidationError(_("Code does not match"))
|
||||||
return code
|
return code
|
||||||
|
|
||||||
@ -82,7 +83,7 @@ class AuthenticatorTOTPStageView(ChallengeStageView):
|
|||||||
|
|
||||||
if SESSION_TOTP_DEVICE not in self.request.session:
|
if SESSION_TOTP_DEVICE not in self.request.session:
|
||||||
device = TOTPDevice(
|
device = TOTPDevice(
|
||||||
user=user, confirmed=True, digits=stage.digits, name="TOTP Authenticator"
|
user=user, confirmed=False, digits=stage.digits, name="TOTP Authenticator"
|
||||||
)
|
)
|
||||||
|
|
||||||
self.request.session[SESSION_TOTP_DEVICE] = device
|
self.request.session[SESSION_TOTP_DEVICE] = device
|
||||||
@ -91,6 +92,7 @@ class AuthenticatorTOTPStageView(ChallengeStageView):
|
|||||||
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
||||||
"""TOTP Token is validated by challenge"""
|
"""TOTP Token is validated by challenge"""
|
||||||
device: TOTPDevice = self.request.session[SESSION_TOTP_DEVICE]
|
device: TOTPDevice = self.request.session[SESSION_TOTP_DEVICE]
|
||||||
|
device.confirmed = True
|
||||||
device.save()
|
device.save()
|
||||||
del self.request.session[SESSION_TOTP_DEVICE]
|
del self.request.session[SESSION_TOTP_DEVICE]
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
|
@ -13,8 +13,8 @@ class AuthenticatorValidateStageSerializer(StageSerializer):
|
|||||||
|
|
||||||
def validate_not_configured_action(self, value):
|
def validate_not_configured_action(self, value):
|
||||||
"""Ensure that a configuration stage is set when not_configured_action is configure"""
|
"""Ensure that a configuration stage is set when not_configured_action is configure"""
|
||||||
configuration_stage = self.initial_data.get("configuration_stage")
|
configuration_stages = self.initial_data.get("configuration_stages")
|
||||||
if value == NotConfiguredAction.CONFIGURE and configuration_stage is None:
|
if value == NotConfiguredAction.CONFIGURE and configuration_stages is None:
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
(
|
(
|
||||||
'When "Not configured action" is set to "Configure", '
|
'When "Not configured action" is set to "Configure", '
|
||||||
@ -29,7 +29,7 @@ class AuthenticatorValidateStageSerializer(StageSerializer):
|
|||||||
fields = StageSerializer.Meta.fields + [
|
fields = StageSerializer.Meta.fields + [
|
||||||
"not_configured_action",
|
"not_configured_action",
|
||||||
"device_classes",
|
"device_classes",
|
||||||
"configuration_stage",
|
"configuration_stages",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -38,5 +38,5 @@ class AuthenticatorValidateStageViewSet(UsedByMixin, ModelViewSet):
|
|||||||
|
|
||||||
queryset = AuthenticatorValidateStage.objects.all()
|
queryset = AuthenticatorValidateStage.objects.all()
|
||||||
serializer_class = AuthenticatorValidateStageSerializer
|
serializer_class = AuthenticatorValidateStageSerializer
|
||||||
filterset_fields = ["name", "not_configured_action", "configuration_stage"]
|
filterset_fields = ["name", "not_configured_action", "configuration_stages"]
|
||||||
ordering = ["name"]
|
ordering = ["name"]
|
||||||
|
@ -0,0 +1,44 @@
|
|||||||
|
# Generated by Django 4.0.1 on 2022-01-05 22:09
|
||||||
|
|
||||||
|
from django.apps.registry import Apps
|
||||||
|
from django.db import migrations, models
|
||||||
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_configuration_stage(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
AuthenticatorValidateStage = apps.get_model(
|
||||||
|
"authentik_stages_authenticator_validate", "AuthenticatorValidateStage"
|
||||||
|
)
|
||||||
|
|
||||||
|
for stage in AuthenticatorValidateStage.objects.using(db_alias).all():
|
||||||
|
if stage.configuration_stage:
|
||||||
|
stage.configuration_stages.set([stage.configuration_stage])
|
||||||
|
stage.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_flows", "0021_auto_20211227_2103"),
|
||||||
|
("authentik_stages_authenticator_validate", "0009_default_stage"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="authenticatorvalidatestage",
|
||||||
|
name="configuration_stages",
|
||||||
|
field=models.ManyToManyField(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Stages used to configure Authenticator when user doesn't have any compatible devices. After this configuration Stage passes, the user is not prompted again.",
|
||||||
|
related_name="+",
|
||||||
|
to="authentik_flows.Stage",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunPython(migrate_configuration_stage),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="authenticatorvalidatestage",
|
||||||
|
name="configuration_stage",
|
||||||
|
),
|
||||||
|
]
|
@ -38,16 +38,14 @@ class AuthenticatorValidateStage(Stage):
|
|||||||
choices=NotConfiguredAction.choices, default=NotConfiguredAction.SKIP
|
choices=NotConfiguredAction.choices, default=NotConfiguredAction.SKIP
|
||||||
)
|
)
|
||||||
|
|
||||||
configuration_stage = models.ForeignKey(
|
configuration_stages = models.ManyToManyField(
|
||||||
Stage,
|
Stage,
|
||||||
null=True,
|
|
||||||
blank=True,
|
blank=True,
|
||||||
default=None,
|
default=None,
|
||||||
on_delete=models.SET_DEFAULT,
|
|
||||||
related_name="+",
|
related_name="+",
|
||||||
help_text=_(
|
help_text=_(
|
||||||
(
|
(
|
||||||
"Stage used to configure Authenticator when user doesn't have any compatible "
|
"Stages used to configure Authenticator when user doesn't have any compatible "
|
||||||
"devices. After this configuration Stage passes, the user is not prompted again."
|
"devices. After this configuration Stage passes, the user is not prompted again."
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
"""Authenticator Validation"""
|
"""Authenticator Validation"""
|
||||||
from django.http import HttpRequest, HttpResponse
|
from django.http import HttpRequest, HttpResponse
|
||||||
from django_otp import devices_for_user
|
from django_otp import devices_for_user
|
||||||
from rest_framework.fields import CharField, IntegerField, JSONField, ListField
|
from rest_framework.fields import CharField, IntegerField, JSONField, ListField, UUIDField
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
from authentik.core.models import User
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.events.utils import cleanse_dict, sanitize_dict
|
from authentik.events.utils import cleanse_dict, sanitize_dict
|
||||||
from authentik.flows.challenge import ChallengeResponse, ChallengeTypes, WithUserInfoChallenge
|
from authentik.flows.challenge import ChallengeResponse, ChallengeTypes, WithUserInfoChallenge
|
||||||
@ -26,6 +28,18 @@ from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
|||||||
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
SESSION_STAGES = "goauthentik.io/stages/authenticator_validate/stages"
|
||||||
|
SESSION_SELECTED_STAGE = "goauthentik.io/stages/authenticator_validate/selected_stage"
|
||||||
|
SESSION_DEVICE_CHALLENGES = "goauthentik.io/stages/authenticator_validate/device_challenges"
|
||||||
|
|
||||||
|
|
||||||
|
class SelectableStageSerializer(PassiveSerializer):
|
||||||
|
"""Serializer for stages which can be selected by users"""
|
||||||
|
|
||||||
|
pk = UUIDField()
|
||||||
|
name = CharField()
|
||||||
|
verbose_name = CharField()
|
||||||
|
meta_model_name = CharField()
|
||||||
|
|
||||||
|
|
||||||
class AuthenticatorValidationChallenge(WithUserInfoChallenge):
|
class AuthenticatorValidationChallenge(WithUserInfoChallenge):
|
||||||
@ -33,12 +47,14 @@ class AuthenticatorValidationChallenge(WithUserInfoChallenge):
|
|||||||
|
|
||||||
device_challenges = ListField(child=DeviceChallenge())
|
device_challenges = ListField(child=DeviceChallenge())
|
||||||
component = CharField(default="ak-stage-authenticator-validate")
|
component = CharField(default="ak-stage-authenticator-validate")
|
||||||
|
configuration_stages = ListField(child=SelectableStageSerializer())
|
||||||
|
|
||||||
|
|
||||||
class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
||||||
"""Challenge used for Code-based and WebAuthn authenticators"""
|
"""Challenge used for Code-based and WebAuthn authenticators"""
|
||||||
|
|
||||||
selected_challenge = DeviceChallenge(required=False)
|
selected_challenge = DeviceChallenge(required=False)
|
||||||
|
selected_stage = CharField(required=False)
|
||||||
|
|
||||||
code = CharField(required=False)
|
code = CharField(required=False)
|
||||||
webauthn = JSONField(required=False)
|
webauthn = JSONField(required=False)
|
||||||
@ -46,7 +62,7 @@ class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
|||||||
component = CharField(default="ak-stage-authenticator-validate")
|
component = CharField(default="ak-stage-authenticator-validate")
|
||||||
|
|
||||||
def _challenge_allowed(self, classes: list):
|
def _challenge_allowed(self, classes: list):
|
||||||
device_challenges: list[dict] = self.stage.request.session.get("device_challenges")
|
device_challenges: list[dict] = self.stage.request.session.get(SESSION_DEVICE_CHALLENGES)
|
||||||
if not any(x["device_class"] in classes for x in device_challenges):
|
if not any(x["device_class"] in classes for x in device_challenges):
|
||||||
raise ValidationError("No compatible device class allowed")
|
raise ValidationError("No compatible device class allowed")
|
||||||
|
|
||||||
@ -71,19 +87,32 @@ class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
|||||||
def validate_selected_challenge(self, challenge: dict) -> dict:
|
def validate_selected_challenge(self, challenge: dict) -> dict:
|
||||||
"""Check which challenge the user has selected. Actual logic only used for SMS stage."""
|
"""Check which challenge the user has selected. Actual logic only used for SMS stage."""
|
||||||
# First check if the challenge is valid
|
# First check if the challenge is valid
|
||||||
for device_challenge in self.stage.request.session.get("device_challenges"):
|
allowed = False
|
||||||
if device_challenge.get("device_class", "") != challenge.get("device_class", ""):
|
for device_challenge in self.stage.request.session.get(SESSION_DEVICE_CHALLENGES):
|
||||||
raise ValidationError("invalid challenge selected")
|
if device_challenge.get("device_class", "") == challenge.get(
|
||||||
if device_challenge.get("device_uid", "") != challenge.get("device_uid", ""):
|
"device_class", ""
|
||||||
raise ValidationError("invalid challenge selected")
|
) and device_challenge.get("device_uid", "") == challenge.get("device_uid", ""):
|
||||||
|
allowed = True
|
||||||
|
if not allowed:
|
||||||
|
raise ValidationError("invalid challenge selected")
|
||||||
|
|
||||||
if challenge.get("device_class", "") != "sms":
|
if challenge.get("device_class", "") != "sms":
|
||||||
return challenge
|
return challenge
|
||||||
devices = SMSDevice.objects.filter(pk=int(challenge.get("device_uid", "0")))
|
devices = SMSDevice.objects.filter(pk=int(challenge.get("device_uid", "0")))
|
||||||
if not devices.exists():
|
if not devices.exists():
|
||||||
raise ValidationError("device does not exist")
|
raise ValidationError("invalid challenge selected")
|
||||||
select_challenge(self.stage.request, devices.first())
|
select_challenge(self.stage.request, devices.first())
|
||||||
return challenge
|
return challenge
|
||||||
|
|
||||||
|
def validate_selected_stage(self, stage_pk: str) -> str:
|
||||||
|
"""Check that the selected stage is valid"""
|
||||||
|
stages = self.stage.request.session.get(SESSION_STAGES, [])
|
||||||
|
if not any(str(stage.pk) == stage_pk for stage in stages):
|
||||||
|
raise ValidationError("Selected stage is invalid")
|
||||||
|
LOGGER.debug("Setting selected stage to ", stage=stage_pk)
|
||||||
|
self.stage.request.session[SESSION_SELECTED_STAGE] = stage_pk
|
||||||
|
return stage_pk
|
||||||
|
|
||||||
def validate(self, attrs: dict):
|
def validate(self, attrs: dict):
|
||||||
# Checking if the given data is from a valid device class is done above
|
# Checking if the given data is from a valid device class is done above
|
||||||
# Here we only check if the any data was sent at all
|
# Here we only check if the any data was sent at all
|
||||||
@ -164,7 +193,7 @@ class AuthenticatorValidateStageView(ChallengeStageView):
|
|||||||
else:
|
else:
|
||||||
LOGGER.debug("No pending user, continuing")
|
LOGGER.debug("No pending user, continuing")
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
self.request.session["device_challenges"] = challenges
|
self.request.session[SESSION_DEVICE_CHALLENGES] = challenges
|
||||||
|
|
||||||
# No allowed devices
|
# No allowed devices
|
||||||
if len(challenges) < 1:
|
if len(challenges) < 1:
|
||||||
@ -175,32 +204,74 @@ class AuthenticatorValidateStageView(ChallengeStageView):
|
|||||||
LOGGER.debug("Authenticator not configured, denying")
|
LOGGER.debug("Authenticator not configured, denying")
|
||||||
return self.executor.stage_invalid()
|
return self.executor.stage_invalid()
|
||||||
if stage.not_configured_action == NotConfiguredAction.CONFIGURE:
|
if stage.not_configured_action == NotConfiguredAction.CONFIGURE:
|
||||||
if not stage.configuration_stage:
|
LOGGER.debug("Authenticator not configured, forcing configure")
|
||||||
Event.new(
|
return self.prepare_stages(user)
|
||||||
EventAction.CONFIGURATION_ERROR,
|
|
||||||
message=(
|
|
||||||
"Authenticator validation stage is set to configure user "
|
|
||||||
"but no configuration flow is set."
|
|
||||||
),
|
|
||||||
stage=self,
|
|
||||||
).from_http(self.request).set_user(user).save()
|
|
||||||
return self.executor.stage_invalid()
|
|
||||||
LOGGER.debug("Authenticator not configured, sending user to configure")
|
|
||||||
# Because the foreign key to stage.configuration_stage points to
|
|
||||||
# a base stage class, we need to do another lookup
|
|
||||||
stage = Stage.objects.get_subclass(pk=stage.configuration_stage.pk)
|
|
||||||
# plan.insert inserts at 1 index, so when stage_ok pops 0,
|
|
||||||
# the configuration stage is next
|
|
||||||
self.executor.plan.insert_stage(stage)
|
|
||||||
return self.executor.stage_ok()
|
|
||||||
return super().get(request, *args, **kwargs)
|
return super().get(request, *args, **kwargs)
|
||||||
|
|
||||||
|
def prepare_stages(self, user: User, *args, **kwargs) -> HttpResponse:
|
||||||
|
"""Check how the user can configure themselves. If no stages are set, return an error.
|
||||||
|
If a single stage is set, insert that stage directly. If multiple are selected, include
|
||||||
|
them in the challenge."""
|
||||||
|
stage: AuthenticatorValidateStage = self.executor.current_stage
|
||||||
|
if not stage.configuration_stages.exists():
|
||||||
|
Event.new(
|
||||||
|
EventAction.CONFIGURATION_ERROR,
|
||||||
|
message=(
|
||||||
|
"Authenticator validation stage is set to configure user "
|
||||||
|
"but no configuration flow is set."
|
||||||
|
),
|
||||||
|
stage=self,
|
||||||
|
).from_http(self.request).set_user(user).save()
|
||||||
|
return self.executor.stage_invalid()
|
||||||
|
if stage.configuration_stages.count() == 1:
|
||||||
|
next_stage = Stage.objects.get_subclass(pk=stage.configuration_stages.first().pk)
|
||||||
|
LOGGER.debug("Single stage configured, auto-selecting", stage=next_stage)
|
||||||
|
self.request.session[SESSION_SELECTED_STAGE] = next_stage
|
||||||
|
# Because that normal insetion only happens on post, we directly inject it here and
|
||||||
|
# return it
|
||||||
|
self.executor.plan.insert_stage(next_stage)
|
||||||
|
return self.executor.stage_ok()
|
||||||
|
stages = Stage.objects.filter(pk__in=stage.configuration_stages.all()).select_subclasses()
|
||||||
|
self.request.session[SESSION_STAGES] = stages
|
||||||
|
return super().get(self.request, *args, **kwargs)
|
||||||
|
|
||||||
|
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
|
res = super().post(request, *args, **kwargs)
|
||||||
|
if (
|
||||||
|
SESSION_SELECTED_STAGE in self.request.session
|
||||||
|
and self.executor.current_stage.not_configured_action == NotConfiguredAction.CONFIGURE
|
||||||
|
):
|
||||||
|
LOGGER.debug("Got selected stage in session, running that")
|
||||||
|
stage_pk = self.request.session.get(SESSION_SELECTED_STAGE)
|
||||||
|
# Because the foreign key to stage.configuration_stage points to
|
||||||
|
# a base stage class, we need to do another lookup
|
||||||
|
stage = Stage.objects.get_subclass(pk=stage_pk)
|
||||||
|
# plan.insert inserts at 1 index, so when stage_ok pops 0,
|
||||||
|
# the configuration stage is next
|
||||||
|
self.executor.plan.insert_stage(stage)
|
||||||
|
return self.executor.stage_ok()
|
||||||
|
return res
|
||||||
|
|
||||||
def get_challenge(self) -> AuthenticatorValidationChallenge:
|
def get_challenge(self) -> AuthenticatorValidationChallenge:
|
||||||
challenges = self.request.session["device_challenges"]
|
challenges = self.request.session.get(SESSION_DEVICE_CHALLENGES, [])
|
||||||
|
stages = self.request.session.get(SESSION_STAGES, [])
|
||||||
|
stage_challenges = []
|
||||||
|
for stage in stages:
|
||||||
|
serializer = SelectableStageSerializer(
|
||||||
|
data={
|
||||||
|
"pk": stage.pk,
|
||||||
|
"name": stage.name,
|
||||||
|
"verbose_name": str(stage._meta.verbose_name),
|
||||||
|
"meta_model_name": f"{stage._meta.app_label}.{stage._meta.model_name}",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
serializer.is_valid()
|
||||||
|
stage_challenges.append(serializer.data)
|
||||||
return AuthenticatorValidationChallenge(
|
return AuthenticatorValidationChallenge(
|
||||||
data={
|
data={
|
||||||
"type": ChallengeTypes.NATIVE.value,
|
"type": ChallengeTypes.NATIVE.value,
|
||||||
"device_challenges": challenges,
|
"device_challenges": challenges,
|
||||||
|
"configuration_stages": stage_challenges,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""Test validator stage"""
|
"""Test validator stage"""
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
from django.contrib.sessions.middleware import SessionMiddleware
|
||||||
from django.test.client import RequestFactory
|
from django.test.client import RequestFactory
|
||||||
from django.urls.base import reverse
|
from django.urls.base import reverse
|
||||||
from django_otp.plugins.otp_totp.models import TOTPDevice
|
from django_otp.plugins.otp_totp.models import TOTPDevice
|
||||||
@ -9,9 +10,11 @@ from webauthn.helpers import bytes_to_base64url
|
|||||||
|
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.flows.models import Flow, FlowStageBinding, NotConfiguredAction
|
from authentik.flows.models import Flow, FlowStageBinding, NotConfiguredAction
|
||||||
|
from authentik.flows.stage import StageView
|
||||||
from authentik.flows.tests import FlowTestCase
|
from authentik.flows.tests import FlowTestCase
|
||||||
|
from authentik.flows.views.executor import FlowExecutorView
|
||||||
from authentik.lib.generators import generate_id, generate_key
|
from authentik.lib.generators import generate_id, generate_key
|
||||||
from authentik.lib.tests.utils import get_request
|
from authentik.lib.tests.utils import dummy_get_response, get_request
|
||||||
from authentik.stages.authenticator_duo.models import AuthenticatorDuoStage, DuoDevice
|
from authentik.stages.authenticator_duo.models import AuthenticatorDuoStage, DuoDevice
|
||||||
from authentik.stages.authenticator_validate.api import AuthenticatorValidateStageSerializer
|
from authentik.stages.authenticator_validate.api import AuthenticatorValidateStageSerializer
|
||||||
from authentik.stages.authenticator_validate.challenge import (
|
from authentik.stages.authenticator_validate.challenge import (
|
||||||
@ -21,6 +24,10 @@ from authentik.stages.authenticator_validate.challenge import (
|
|||||||
validate_challenge_webauthn,
|
validate_challenge_webauthn,
|
||||||
)
|
)
|
||||||
from authentik.stages.authenticator_validate.models import AuthenticatorValidateStage
|
from authentik.stages.authenticator_validate.models import AuthenticatorValidateStage
|
||||||
|
from authentik.stages.authenticator_validate.stage import (
|
||||||
|
SESSION_DEVICE_CHALLENGES,
|
||||||
|
AuthenticatorValidationChallengeResponse,
|
||||||
|
)
|
||||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||||
from authentik.stages.identification.models import IdentificationStage, UserFields
|
from authentik.stages.identification.models import IdentificationStage, UserFields
|
||||||
|
|
||||||
@ -43,8 +50,8 @@ class AuthenticatorValidateStageTests(FlowTestCase):
|
|||||||
stage = AuthenticatorValidateStage.objects.create(
|
stage = AuthenticatorValidateStage.objects.create(
|
||||||
name="foo",
|
name="foo",
|
||||||
not_configured_action=NotConfiguredAction.CONFIGURE,
|
not_configured_action=NotConfiguredAction.CONFIGURE,
|
||||||
configuration_stage=conf_stage,
|
|
||||||
)
|
)
|
||||||
|
stage.configuration_stages.set([conf_stage])
|
||||||
flow = Flow.objects.create(name="test", slug="test", title="test")
|
flow = Flow.objects.create(name="test", slug="test", title="test")
|
||||||
FlowStageBinding.objects.create(target=flow, stage=conf_stage, order=0)
|
FlowStageBinding.objects.create(target=flow, stage=conf_stage, order=0)
|
||||||
FlowStageBinding.objects.create(target=flow, stage=stage, order=1)
|
FlowStageBinding.objects.create(target=flow, stage=stage, order=1)
|
||||||
@ -159,3 +166,39 @@ class AuthenticatorValidateStageTests(FlowTestCase):
|
|||||||
):
|
):
|
||||||
with self.assertRaises(ValidationError):
|
with self.assertRaises(ValidationError):
|
||||||
validate_challenge_duo(duo_device.pk, request, self.user)
|
validate_challenge_duo(duo_device.pk, request, self.user)
|
||||||
|
|
||||||
|
def test_validate_selected_challenge(self):
|
||||||
|
"""Test validate_selected_challenge"""
|
||||||
|
# Prepare request with session
|
||||||
|
request = self.request_factory.get("/")
|
||||||
|
|
||||||
|
middleware = SessionMiddleware(dummy_get_response)
|
||||||
|
middleware.process_request(request)
|
||||||
|
request.session[SESSION_DEVICE_CHALLENGES] = [
|
||||||
|
{
|
||||||
|
"device_class": "static",
|
||||||
|
"device_uid": "1",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"device_class": "totp",
|
||||||
|
"device_uid": "2",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
request.session.save()
|
||||||
|
|
||||||
|
res = AuthenticatorValidationChallengeResponse()
|
||||||
|
res.stage = StageView(FlowExecutorView())
|
||||||
|
res.stage.request = request
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
res.validate_selected_challenge(
|
||||||
|
{
|
||||||
|
"device_class": "baz",
|
||||||
|
"device_uid": "quox",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
res.validate_selected_challenge(
|
||||||
|
{
|
||||||
|
"device_class": "static",
|
||||||
|
"device_uid": "1",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@ -49,6 +49,7 @@ class PromptSerializer(ModelSerializer):
|
|||||||
"order",
|
"order",
|
||||||
"promptstage_set",
|
"promptstage_set",
|
||||||
"sub_text",
|
"sub_text",
|
||||||
|
"placeholder_expression",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -0,0 +1,49 @@
|
|||||||
|
# Generated by Django 4.0.2 on 2022-02-27 19:19
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_stages_prompt", "0006_alter_prompt_type"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="prompt",
|
||||||
|
name="placeholder_expression",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="prompt",
|
||||||
|
name="type",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("text", "Text: Simple Text input"),
|
||||||
|
(
|
||||||
|
"text_read_only",
|
||||||
|
"Text (read-only): Simple Text input, but cannot be edited.",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"username",
|
||||||
|
"Username: Same as Text input, but checks for and prevents duplicate usernames.",
|
||||||
|
),
|
||||||
|
("email", "Email: Text field with Email type."),
|
||||||
|
(
|
||||||
|
"password",
|
||||||
|
"Password: Masked input, password is validated against sources. Policies still have to be applied to this Stage. If two of these are used in the same stage, they are ensured to be identical.",
|
||||||
|
),
|
||||||
|
("number", "Number"),
|
||||||
|
("checkbox", "Checkbox"),
|
||||||
|
("date", "Date"),
|
||||||
|
("date-time", "Date Time"),
|
||||||
|
("separator", "Separator: Static Separator Line"),
|
||||||
|
("hidden", "Hidden: Hidden field, can be used to insert data into form."),
|
||||||
|
("static", "Static: Static value, displayed as-is."),
|
||||||
|
("ak-locale", "authentik: Selection of locales authentik supports"),
|
||||||
|
],
|
||||||
|
max_length=100,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -3,6 +3,7 @@ from typing import Any, Optional
|
|||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
from django.http import HttpRequest
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.views import View
|
from django.views import View
|
||||||
from rest_framework.fields import (
|
from rest_framework.fields import (
|
||||||
@ -16,15 +17,23 @@ from rest_framework.fields import (
|
|||||||
ReadOnlyField,
|
ReadOnlyField,
|
||||||
)
|
)
|
||||||
from rest_framework.serializers import BaseSerializer
|
from rest_framework.serializers import BaseSerializer
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.core.exceptions import PropertyMappingExpressionException
|
||||||
|
from authentik.core.expression import PropertyMappingEvaluator
|
||||||
|
from authentik.core.models import User
|
||||||
from authentik.flows.models import Stage
|
from authentik.flows.models import Stage
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.policies.models import Policy
|
from authentik.policies.models import Policy
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
class FieldTypes(models.TextChoices):
|
class FieldTypes(models.TextChoices):
|
||||||
"""Field types an Prompt can be"""
|
"""Field types an Prompt can be"""
|
||||||
|
|
||||||
|
# update website/docs/flow/stages/prompt.index.md
|
||||||
|
|
||||||
# Simple text field
|
# Simple text field
|
||||||
TEXT = "text", _("Text: Simple Text input")
|
TEXT = "text", _("Text: Simple Text input")
|
||||||
# Simple text field
|
# Simple text field
|
||||||
@ -56,6 +65,8 @@ class FieldTypes(models.TextChoices):
|
|||||||
HIDDEN = "hidden", _("Hidden: Hidden field, can be used to insert data into form.")
|
HIDDEN = "hidden", _("Hidden: Hidden field, can be used to insert data into form.")
|
||||||
STATIC = "static", _("Static: Static value, displayed as-is.")
|
STATIC = "static", _("Static: Static value, displayed as-is.")
|
||||||
|
|
||||||
|
AK_LOCALE = "ak-locale", _("authentik: Selection of locales authentik supports")
|
||||||
|
|
||||||
|
|
||||||
class Prompt(SerializerModel):
|
class Prompt(SerializerModel):
|
||||||
"""Single Prompt, part of a prompt stage."""
|
"""Single Prompt, part of a prompt stage."""
|
||||||
@ -73,12 +84,33 @@ class Prompt(SerializerModel):
|
|||||||
|
|
||||||
order = models.IntegerField(default=0)
|
order = models.IntegerField(default=0)
|
||||||
|
|
||||||
|
placeholder_expression = models.BooleanField(default=False)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> BaseSerializer:
|
def serializer(self) -> BaseSerializer:
|
||||||
from authentik.stages.prompt.api import PromptSerializer
|
from authentik.stages.prompt.api import PromptSerializer
|
||||||
|
|
||||||
return PromptSerializer
|
return PromptSerializer
|
||||||
|
|
||||||
|
def get_placeholder(self, prompt_context: dict, user: User, request: HttpRequest) -> str:
|
||||||
|
"""Get fully interpolated placeholder"""
|
||||||
|
if self.field_key in prompt_context:
|
||||||
|
# We don't want to parse this as an expression since a user will
|
||||||
|
# be able to control the input
|
||||||
|
return prompt_context[self.field_key]
|
||||||
|
|
||||||
|
if self.placeholder_expression:
|
||||||
|
evaluator = PropertyMappingEvaluator()
|
||||||
|
evaluator.set_context(user, request, self, prompt_context=prompt_context)
|
||||||
|
try:
|
||||||
|
return evaluator.evaluate(self.placeholder)
|
||||||
|
except Exception as exc: # pylint:disable=broad-except
|
||||||
|
LOGGER.warning(
|
||||||
|
"failed to evaluate prompt placeholder",
|
||||||
|
exc=PropertyMappingExpressionException(str(exc)),
|
||||||
|
)
|
||||||
|
return self.placeholder
|
||||||
|
|
||||||
def field(self, default: Optional[Any]) -> CharField:
|
def field(self, default: Optional[Any]) -> CharField:
|
||||||
"""Get field type for Challenge and response"""
|
"""Get field type for Challenge and response"""
|
||||||
field_class = CharField
|
field_class = CharField
|
||||||
@ -93,10 +125,6 @@ class Prompt(SerializerModel):
|
|||||||
field_class = EmailField
|
field_class = EmailField
|
||||||
if self.type == FieldTypes.NUMBER:
|
if self.type == FieldTypes.NUMBER:
|
||||||
field_class = IntegerField
|
field_class = IntegerField
|
||||||
if self.type == FieldTypes.HIDDEN:
|
|
||||||
field_class = HiddenField
|
|
||||||
kwargs["required"] = False
|
|
||||||
kwargs["default"] = self.placeholder
|
|
||||||
if self.type == FieldTypes.CHECKBOX:
|
if self.type == FieldTypes.CHECKBOX:
|
||||||
field_class = BooleanField
|
field_class = BooleanField
|
||||||
kwargs["required"] = False
|
kwargs["required"] = False
|
||||||
@ -104,13 +132,22 @@ class Prompt(SerializerModel):
|
|||||||
field_class = DateField
|
field_class = DateField
|
||||||
if self.type == FieldTypes.DATE_TIME:
|
if self.type == FieldTypes.DATE_TIME:
|
||||||
field_class = DateTimeField
|
field_class = DateTimeField
|
||||||
|
|
||||||
|
if self.type == FieldTypes.SEPARATOR:
|
||||||
|
kwargs["required"] = False
|
||||||
|
kwargs["label"] = ""
|
||||||
|
if self.type == FieldTypes.HIDDEN:
|
||||||
|
field_class = HiddenField
|
||||||
|
kwargs["required"] = False
|
||||||
|
kwargs["default"] = self.placeholder
|
||||||
if self.type == FieldTypes.STATIC:
|
if self.type == FieldTypes.STATIC:
|
||||||
kwargs["default"] = self.placeholder
|
kwargs["default"] = self.placeholder
|
||||||
kwargs["required"] = False
|
kwargs["required"] = False
|
||||||
kwargs["label"] = ""
|
kwargs["label"] = ""
|
||||||
if self.type == FieldTypes.SEPARATOR:
|
|
||||||
kwargs["required"] = False
|
if self.type == FieldTypes.AK_LOCALE:
|
||||||
kwargs["label"] = ""
|
kwargs["allow_blank"] = True
|
||||||
|
|
||||||
if default:
|
if default:
|
||||||
kwargs["default"] = default
|
kwargs["default"] = default
|
||||||
# May not set both `required` and `default`
|
# May not set both `required` and `default`
|
||||||
|
@ -165,13 +165,14 @@ class PromptStageView(ChallengeStageView):
|
|||||||
response_class = PromptChallengeResponse
|
response_class = PromptChallengeResponse
|
||||||
|
|
||||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||||
fields = list(self.executor.current_stage.fields.all().order_by("order"))
|
fields: list[Prompt] = list(self.executor.current_stage.fields.all().order_by("order"))
|
||||||
serializers = []
|
serializers = []
|
||||||
context_prompt = self.executor.plan.context.get(PLAN_CONTEXT_PROMPT, {})
|
context_prompt = self.executor.plan.context.get(PLAN_CONTEXT_PROMPT, {})
|
||||||
for field in fields:
|
for field in fields:
|
||||||
data = StagePromptSerializer(field).data
|
data = StagePromptSerializer(field).data
|
||||||
if field.field_key in context_prompt:
|
data["placeholder"] = field.get_placeholder(
|
||||||
data["placeholder"] = context_prompt.get(field.field_key)
|
context_prompt, self.get_pending_user(), self.request
|
||||||
|
)
|
||||||
serializers.append(data)
|
serializers.append(data)
|
||||||
challenge = PromptChallenge(
|
challenge = PromptChallenge(
|
||||||
data={
|
data={
|
||||||
|
@ -1,16 +1,17 @@
|
|||||||
"""Prompt tests"""
|
"""Prompt tests"""
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
from django.test import RequestFactory
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.exceptions import ErrorDetail
|
from rest_framework.exceptions import ErrorDetail
|
||||||
|
|
||||||
from authentik.core.models import User
|
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.flows.markers import StageMarker
|
from authentik.flows.markers import StageMarker
|
||||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||||
from authentik.flows.planner import FlowPlan
|
from authentik.flows.planner import FlowPlan
|
||||||
from authentik.flows.tests import FlowTestCase
|
from authentik.flows.tests import FlowTestCase
|
||||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.policies.expression.models import ExpressionPolicy
|
from authentik.policies.expression.models import ExpressionPolicy
|
||||||
from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage
|
from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage
|
||||||
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT, PromptChallengeResponse
|
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT, PromptChallengeResponse
|
||||||
@ -21,8 +22,8 @@ class TestPromptStage(FlowTestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.user = User.objects.create(username="unittest", email="test@beryju.org")
|
self.user = create_test_admin_user()
|
||||||
|
self.factory = RequestFactory()
|
||||||
self.flow = Flow.objects.create(
|
self.flow = Flow.objects.create(
|
||||||
name="test-prompt",
|
name="test-prompt",
|
||||||
slug="test-prompt",
|
slug="test-prompt",
|
||||||
@ -219,3 +220,95 @@ class TestPromptStage(FlowTestCase):
|
|||||||
self.assertNotEqual(challenge_response.validated_data["hidden_prompt"], "foo")
|
self.assertNotEqual(challenge_response.validated_data["hidden_prompt"], "foo")
|
||||||
self.assertEqual(challenge_response.validated_data["hidden_prompt"], "hidden")
|
self.assertEqual(challenge_response.validated_data["hidden_prompt"], "hidden")
|
||||||
self.assertNotEqual(challenge_response.validated_data["static_prompt"], "foo")
|
self.assertNotEqual(challenge_response.validated_data["static_prompt"], "foo")
|
||||||
|
|
||||||
|
def test_prompt_placeholder(self):
|
||||||
|
"""Test placeholder and expression"""
|
||||||
|
context = {
|
||||||
|
"foo": generate_id(),
|
||||||
|
}
|
||||||
|
prompt: Prompt = Prompt(
|
||||||
|
field_key="text_prompt_expression",
|
||||||
|
label="TEXT_LABEL",
|
||||||
|
type=FieldTypes.TEXT,
|
||||||
|
placeholder="return prompt_context['foo']",
|
||||||
|
placeholder_expression=True,
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
prompt.get_placeholder(context, self.user, self.factory.get("/")), context["foo"]
|
||||||
|
)
|
||||||
|
context["text_prompt_expression"] = generate_id()
|
||||||
|
self.assertEqual(
|
||||||
|
prompt.get_placeholder(context, self.user, self.factory.get("/")),
|
||||||
|
context["text_prompt_expression"],
|
||||||
|
)
|
||||||
|
self.assertNotEqual(
|
||||||
|
prompt.get_placeholder(context, self.user, self.factory.get("/")), context["foo"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_prompt_placeholder_error(self):
|
||||||
|
"""Test placeholder and expression"""
|
||||||
|
context = {}
|
||||||
|
prompt: Prompt = Prompt(
|
||||||
|
field_key="text_prompt_expression",
|
||||||
|
label="TEXT_LABEL",
|
||||||
|
type=FieldTypes.TEXT,
|
||||||
|
placeholder="something invalid dunno",
|
||||||
|
placeholder_expression=True,
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
prompt.get_placeholder(context, self.user, self.factory.get("/")),
|
||||||
|
"something invalid dunno",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_prompt_placeholder_disabled(self):
|
||||||
|
"""Test placeholder and expression"""
|
||||||
|
context = {}
|
||||||
|
prompt: Prompt = Prompt(
|
||||||
|
field_key="text_prompt_expression",
|
||||||
|
label="TEXT_LABEL",
|
||||||
|
type=FieldTypes.TEXT,
|
||||||
|
placeholder="return prompt_context['foo']",
|
||||||
|
placeholder_expression=False,
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
prompt.get_placeholder(context, self.user, self.factory.get("/")), prompt.placeholder
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_field_types(self):
|
||||||
|
"""Ensure all field types can successfully be created"""
|
||||||
|
|
||||||
|
def test_invalid_save(self):
|
||||||
|
"""Ensure field can't be saved with invalid type"""
|
||||||
|
prompt: Prompt = Prompt(
|
||||||
|
field_key="text_prompt_expression",
|
||||||
|
label="TEXT_LABEL",
|
||||||
|
type="foo",
|
||||||
|
placeholder="foo",
|
||||||
|
placeholder_expression=False,
|
||||||
|
sub_text="test",
|
||||||
|
order=123,
|
||||||
|
)
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
prompt.save()
|
||||||
|
|
||||||
|
|
||||||
|
def field_type_tester_factory(field_type: FieldTypes):
|
||||||
|
"""Test field for field_type"""
|
||||||
|
|
||||||
|
def tester(self: TestPromptStage):
|
||||||
|
prompt: Prompt = Prompt(
|
||||||
|
field_key="text_prompt_expression",
|
||||||
|
label="TEXT_LABEL",
|
||||||
|
type=field_type,
|
||||||
|
placeholder="foo",
|
||||||
|
placeholder_expression=False,
|
||||||
|
sub_text="test",
|
||||||
|
order=123,
|
||||||
|
)
|
||||||
|
self.assertIsNotNone(prompt.field("foo"))
|
||||||
|
|
||||||
|
return tester
|
||||||
|
|
||||||
|
|
||||||
|
for _type in FieldTypes:
|
||||||
|
setattr(TestPromptStage, f"test_field_type_{_type}", field_type_tester_factory(_type))
|
||||||
|
@ -25,15 +25,16 @@ LOGGER = get_logger()
|
|||||||
class UserWriteStageView(StageView):
|
class UserWriteStageView(StageView):
|
||||||
"""Finalise Enrollment flow by creating a user object."""
|
"""Finalise Enrollment flow by creating a user object."""
|
||||||
|
|
||||||
def write_attribute(self, user: User, key: str, value: Any):
|
@staticmethod
|
||||||
|
def write_attribute(user: User, key: str, value: Any):
|
||||||
"""Allow use of attributes.foo.bar when writing to a user, with full
|
"""Allow use of attributes.foo.bar when writing to a user, with full
|
||||||
recursion"""
|
recursion"""
|
||||||
parts = key.replace("_", ".").split(".")
|
parts = key.replace("_", ".").split(".")
|
||||||
if len(parts) < 1: # pragma: no cover
|
if len(parts) < 1: # pragma: no cover
|
||||||
return
|
return
|
||||||
# Function will always be called with a key like attribute.
|
# Function will always be called with a key like attributes.
|
||||||
# this is just a sanity check to ensure that is removed
|
# this is just a sanity check to ensure that is removed
|
||||||
if parts[0] == "attribute":
|
if parts[0] == "attributes":
|
||||||
parts = parts[1:]
|
parts = parts[1:]
|
||||||
attrs = user.attributes
|
attrs = user.attributes
|
||||||
for comp in parts[:-1]:
|
for comp in parts[:-1]:
|
||||||
@ -84,16 +85,20 @@ class UserWriteStageView(StageView):
|
|||||||
setter = getattr(user, setter_name)
|
setter = getattr(user, setter_name)
|
||||||
if callable(setter):
|
if callable(setter):
|
||||||
setter(value)
|
setter(value)
|
||||||
|
# For exact attributes match, update the dictionary in place
|
||||||
|
elif key == "attributes":
|
||||||
|
user.attributes.update(value)
|
||||||
# User has this key already
|
# User has this key already
|
||||||
elif hasattr(user, key):
|
elif hasattr(user, key) and not key.startswith("attributes."):
|
||||||
setattr(user, key, value)
|
setattr(user, key, value)
|
||||||
# Otherwise we just save it as custom attribute, but only if the value is prefixed with
|
# Otherwise we just save it as custom attribute, but only if the value is prefixed with
|
||||||
# `attribute_`, to prevent accidentally saving values
|
# `attribute_`, to prevent accidentally saving values
|
||||||
else:
|
else:
|
||||||
if not key.startswith("attribute.") and not key.startswith("attribute_"):
|
if not key.startswith("attributes.") and not key.startswith("attributes_"):
|
||||||
LOGGER.debug("discarding key", key=key)
|
LOGGER.debug("discarding key", key=key)
|
||||||
continue
|
continue
|
||||||
self.write_attribute(user, key, value)
|
UserWriteStageView.write_attribute(user, key, value)
|
||||||
|
print(user.attributes)
|
||||||
# Extra check to prevent flows from saving a user with a blank username
|
# Extra check to prevent flows from saving a user with a blank username
|
||||||
if user.username == "":
|
if user.username == "":
|
||||||
LOGGER.warning("Aborting write to empty username", user=user)
|
LOGGER.warning("Aborting write to empty username", user=user)
|
||||||
|
@ -16,6 +16,7 @@ from authentik.flows.tests.test_executor import TO_STAGE_RESPONSE_MOCK
|
|||||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||||
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
|
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
|
||||||
from authentik.stages.user_write.models import UserWriteStage
|
from authentik.stages.user_write.models import UserWriteStage
|
||||||
|
from authentik.stages.user_write.stage import UserWriteStageView
|
||||||
|
|
||||||
|
|
||||||
class TestUserWriteStage(FlowTestCase):
|
class TestUserWriteStage(FlowTestCase):
|
||||||
@ -77,7 +78,7 @@ class TestUserWriteStage(FlowTestCase):
|
|||||||
plan.context[PLAN_CONTEXT_PROMPT] = {
|
plan.context[PLAN_CONTEXT_PROMPT] = {
|
||||||
"username": "test-user-new",
|
"username": "test-user-new",
|
||||||
"password": new_password,
|
"password": new_password,
|
||||||
"attribute.some.custom-attribute": "test",
|
"attributes.some.custom-attribute": "test",
|
||||||
"some_ignored_attribute": "bar",
|
"some_ignored_attribute": "bar",
|
||||||
}
|
}
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
@ -172,3 +173,43 @@ class TestUserWriteStage(FlowTestCase):
|
|||||||
self.flow,
|
self.flow,
|
||||||
component="ak-stage-access-denied",
|
component="ak-stage-access-denied",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_write_attribute(self):
|
||||||
|
"""Test write_attribute"""
|
||||||
|
user = create_test_admin_user()
|
||||||
|
user.attributes = {
|
||||||
|
"foo": "bar",
|
||||||
|
"baz": {
|
||||||
|
"qwer": [
|
||||||
|
"quox",
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
user.save()
|
||||||
|
UserWriteStageView.write_attribute(user, "attributes.foo", "baz")
|
||||||
|
self.assertEqual(
|
||||||
|
user.attributes,
|
||||||
|
{
|
||||||
|
"foo": "baz",
|
||||||
|
"baz": {
|
||||||
|
"qwer": [
|
||||||
|
"quox",
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
UserWriteStageView.write_attribute(user, "attributes.foob.bar", "baz")
|
||||||
|
self.assertEqual(
|
||||||
|
user.attributes,
|
||||||
|
{
|
||||||
|
"foo": "baz",
|
||||||
|
"foob": {
|
||||||
|
"bar": "baz",
|
||||||
|
},
|
||||||
|
"baz": {
|
||||||
|
"qwer": [
|
||||||
|
"quox",
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
@ -50,6 +50,7 @@ class TenantSerializer(ModelSerializer):
|
|||||||
"flow_invalidation",
|
"flow_invalidation",
|
||||||
"flow_recovery",
|
"flow_recovery",
|
||||||
"flow_unenrollment",
|
"flow_unenrollment",
|
||||||
|
"flow_user_settings",
|
||||||
"event_retention",
|
"event_retention",
|
||||||
"web_certificate",
|
"web_certificate",
|
||||||
]
|
]
|
||||||
@ -72,6 +73,7 @@ class CurrentTenantSerializer(PassiveSerializer):
|
|||||||
flow_invalidation = CharField(source="flow_invalidation.slug", required=False)
|
flow_invalidation = CharField(source="flow_invalidation.slug", required=False)
|
||||||
flow_recovery = CharField(source="flow_recovery.slug", required=False)
|
flow_recovery = CharField(source="flow_recovery.slug", required=False)
|
||||||
flow_unenrollment = CharField(source="flow_unenrollment.slug", required=False)
|
flow_unenrollment = CharField(source="flow_unenrollment.slug", required=False)
|
||||||
|
flow_user_settings = CharField(source="flow_user_settings.slug", required=False)
|
||||||
|
|
||||||
|
|
||||||
class TenantViewSet(UsedByMixin, ModelViewSet):
|
class TenantViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
181
authentik/tenants/migrations/0002_tenant_flow_user_settings.py
Normal file
181
authentik/tenants/migrations/0002_tenant_flow_user_settings.py
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
# Generated by Django 4.0.2 on 2022-02-26 21:14
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.apps.registry import Apps
|
||||||
|
from django.db import migrations, models
|
||||||
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
|
from authentik.flows.models import FlowDesignation
|
||||||
|
from authentik.stages.identification.models import UserFields
|
||||||
|
from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BACKEND_LDAP
|
||||||
|
|
||||||
|
AUTHORIZATION_POLICY = """from authentik.lib.config import CONFIG
|
||||||
|
from authentik.core.models import (
|
||||||
|
USER_ATTRIBUTE_CHANGE_EMAIL,
|
||||||
|
USER_ATTRIBUTE_CHANGE_NAME,
|
||||||
|
USER_ATTRIBUTE_CHANGE_USERNAME
|
||||||
|
)
|
||||||
|
prompt_data = request.context.get("prompt_data")
|
||||||
|
|
||||||
|
if not request.user.group_attributes().get(
|
||||||
|
USER_ATTRIBUTE_CHANGE_EMAIL, CONFIG.y_bool("default_user_change_email", True)
|
||||||
|
):
|
||||||
|
if prompt_data.get("email") != request.user.email:
|
||||||
|
ak_message("Not allowed to change email address.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not request.user.group_attributes().get(
|
||||||
|
USER_ATTRIBUTE_CHANGE_NAME, CONFIG.y_bool("default_user_change_name", True)
|
||||||
|
):
|
||||||
|
if prompt_data.get("name") != request.user.name:
|
||||||
|
ak_message("Not allowed to change name.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not request.user.group_attributes().get(
|
||||||
|
USER_ATTRIBUTE_CHANGE_USERNAME, CONFIG.y_bool("default_user_change_username", True)
|
||||||
|
):
|
||||||
|
if prompt_data.get("username") != request.user.username:
|
||||||
|
ak_message("Not allowed to change username.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def create_default_user_settings_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
from authentik.stages.prompt.models import FieldTypes
|
||||||
|
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
|
||||||
|
Tenant = apps.get_model("authentik_tenants", "Tenant")
|
||||||
|
|
||||||
|
Flow = apps.get_model("authentik_flows", "Flow")
|
||||||
|
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
|
||||||
|
|
||||||
|
ExpressionPolicy = apps.get_model("authentik_policies_expression", "ExpressionPolicy")
|
||||||
|
|
||||||
|
UserWriteStage = apps.get_model("authentik_stages_user_write", "UserWriteStage")
|
||||||
|
PromptStage = apps.get_model("authentik_stages_prompt", "PromptStage")
|
||||||
|
Prompt = apps.get_model("authentik_stages_prompt", "Prompt")
|
||||||
|
|
||||||
|
prompt_username, _ = Prompt.objects.using(db_alias).update_or_create(
|
||||||
|
field_key="username",
|
||||||
|
order=200,
|
||||||
|
defaults={
|
||||||
|
"label": "Username",
|
||||||
|
"type": FieldTypes.TEXT,
|
||||||
|
"placeholder": """try:
|
||||||
|
return user.username
|
||||||
|
except:
|
||||||
|
return ''""",
|
||||||
|
"placeholder_expression": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
prompt_name, _ = Prompt.objects.using(db_alias).update_or_create(
|
||||||
|
field_key="name",
|
||||||
|
order=201,
|
||||||
|
defaults={
|
||||||
|
"label": "Name",
|
||||||
|
"type": FieldTypes.TEXT,
|
||||||
|
"placeholder": """try:
|
||||||
|
return user.name
|
||||||
|
except:
|
||||||
|
return ''""",
|
||||||
|
"placeholder_expression": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
prompt_email, _ = Prompt.objects.using(db_alias).update_or_create(
|
||||||
|
field_key="email",
|
||||||
|
order=202,
|
||||||
|
defaults={
|
||||||
|
"label": "Email",
|
||||||
|
"type": FieldTypes.EMAIL,
|
||||||
|
"placeholder": """try:
|
||||||
|
return user.email
|
||||||
|
except:
|
||||||
|
return ''""",
|
||||||
|
"placeholder_expression": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
prompt_locale, _ = Prompt.objects.using(db_alias).update_or_create(
|
||||||
|
field_key="attributes.settings.locale",
|
||||||
|
order=203,
|
||||||
|
defaults={
|
||||||
|
"label": "Locale",
|
||||||
|
"type": FieldTypes.AK_LOCALE,
|
||||||
|
"placeholder": """try:
|
||||||
|
return user.attributes.get("settings", {}).get("locale", "")
|
||||||
|
except:
|
||||||
|
return ''""",
|
||||||
|
"placeholder_expression": True,
|
||||||
|
"required": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
validation_policy, _ = ExpressionPolicy.objects.using(db_alias).update_or_create(
|
||||||
|
name="default-user-settings-authorization",
|
||||||
|
defaults={
|
||||||
|
"expression": AUTHORIZATION_POLICY,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
prompt_stage, _ = PromptStage.objects.using(db_alias).update_or_create(
|
||||||
|
name="default-user-settings",
|
||||||
|
)
|
||||||
|
prompt_stage.validation_policies.set([validation_policy])
|
||||||
|
prompt_stage.fields.set([prompt_username, prompt_name, prompt_email, prompt_locale])
|
||||||
|
prompt_stage.save()
|
||||||
|
user_write, _ = UserWriteStage.objects.using(db_alias).update_or_create(
|
||||||
|
name="default-user-settings-write"
|
||||||
|
)
|
||||||
|
|
||||||
|
flow, _ = Flow.objects.using(db_alias).update_or_create(
|
||||||
|
slug="default-user-settings-flow",
|
||||||
|
designation=FlowDesignation.STAGE_CONFIGURATION,
|
||||||
|
defaults={
|
||||||
|
"name": "Update your info",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
FlowStageBinding.objects.using(db_alias).update_or_create(
|
||||||
|
target=flow,
|
||||||
|
stage=prompt_stage,
|
||||||
|
defaults={
|
||||||
|
"order": 20,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
FlowStageBinding.objects.using(db_alias).update_or_create(
|
||||||
|
target=flow,
|
||||||
|
stage=user_write,
|
||||||
|
defaults={
|
||||||
|
"order": 100,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
tenant = Tenant.objects.using(db_alias).filter(default=True).first()
|
||||||
|
if not tenant:
|
||||||
|
return
|
||||||
|
tenant.flow_user_settings = flow
|
||||||
|
tenant.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_policies_expression", "__latest__"),
|
||||||
|
("authentik_stages_prompt", "0007_prompt_placeholder_expression"),
|
||||||
|
("authentik_flows", "0021_auto_20211227_2103"),
|
||||||
|
("authentik_tenants", "0001_squashed_0005_tenant_web_certificate"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tenant",
|
||||||
|
name="flow_user_settings",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="tenant_user_settings",
|
||||||
|
to="authentik_flows.flow",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunPython(create_default_user_settings_flow),
|
||||||
|
]
|
@ -40,6 +40,9 @@ class Tenant(models.Model):
|
|||||||
flow_unenrollment = models.ForeignKey(
|
flow_unenrollment = models.ForeignKey(
|
||||||
Flow, null=True, on_delete=models.SET_NULL, related_name="tenant_unenrollment"
|
Flow, null=True, on_delete=models.SET_NULL, related_name="tenant_unenrollment"
|
||||||
)
|
)
|
||||||
|
flow_user_settings = models.ForeignKey(
|
||||||
|
Flow, null=True, on_delete=models.SET_NULL, related_name="tenant_user_settings"
|
||||||
|
)
|
||||||
|
|
||||||
event_retention = models.TextField(
|
event_retention = models.TextField(
|
||||||
default="days=365",
|
default="days=365",
|
||||||
|
@ -8,6 +8,7 @@ import (
|
|||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
|
||||||
"goauthentik.io/internal/common"
|
"goauthentik.io/internal/common"
|
||||||
|
"goauthentik.io/internal/debug"
|
||||||
"goauthentik.io/internal/outpost/ak"
|
"goauthentik.io/internal/outpost/ak"
|
||||||
"goauthentik.io/internal/outpost/ldap"
|
"goauthentik.io/internal/outpost/ldap"
|
||||||
)
|
)
|
||||||
@ -27,6 +28,7 @@ func main() {
|
|||||||
log.FieldKeyTime: "timestamp",
|
log.FieldKeyTime: "timestamp",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
go debug.EnableDebugServer()
|
||||||
akURL, found := os.LookupEnv("AUTHENTIK_HOST")
|
akURL, found := os.LookupEnv("AUTHENTIK_HOST")
|
||||||
if !found {
|
if !found {
|
||||||
fmt.Println("env AUTHENTIK_HOST not set!")
|
fmt.Println("env AUTHENTIK_HOST not set!")
|
||||||
|
@ -9,6 +9,7 @@ import (
|
|||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
|
||||||
"goauthentik.io/internal/common"
|
"goauthentik.io/internal/common"
|
||||||
|
"goauthentik.io/internal/debug"
|
||||||
"goauthentik.io/internal/outpost/ak"
|
"goauthentik.io/internal/outpost/ak"
|
||||||
"goauthentik.io/internal/outpost/proxyv2"
|
"goauthentik.io/internal/outpost/proxyv2"
|
||||||
)
|
)
|
||||||
@ -32,6 +33,7 @@ func main() {
|
|||||||
log.FieldKeyTime: "timestamp",
|
log.FieldKeyTime: "timestamp",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
go debug.EnableDebugServer()
|
||||||
akURL, found := os.LookupEnv("AUTHENTIK_HOST")
|
akURL, found := os.LookupEnv("AUTHENTIK_HOST")
|
||||||
if !found {
|
if !found {
|
||||||
fmt.Println("env AUTHENTIK_HOST not set!")
|
fmt.Println("env AUTHENTIK_HOST not set!")
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
"goauthentik.io/internal/common"
|
"goauthentik.io/internal/common"
|
||||||
"goauthentik.io/internal/config"
|
"goauthentik.io/internal/config"
|
||||||
"goauthentik.io/internal/constants"
|
"goauthentik.io/internal/constants"
|
||||||
|
"goauthentik.io/internal/debug"
|
||||||
"goauthentik.io/internal/gounicorn"
|
"goauthentik.io/internal/gounicorn"
|
||||||
"goauthentik.io/internal/outpost/ak"
|
"goauthentik.io/internal/outpost/ak"
|
||||||
"goauthentik.io/internal/outpost/proxyv2"
|
"goauthentik.io/internal/outpost/proxyv2"
|
||||||
@ -28,6 +29,7 @@ func main() {
|
|||||||
log.FieldKeyTime: "timestamp",
|
log.FieldKeyTime: "timestamp",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
go debug.EnableDebugServer()
|
||||||
l := log.WithField("logger", "authentik.root")
|
l := log.WithField("logger", "authentik.root")
|
||||||
config.DefaultConfig()
|
config.DefaultConfig()
|
||||||
err := config.LoadConfig("./authentik/lib/default.yml")
|
err := config.LoadConfig("./authentik/lib/default.yml")
|
||||||
|
@ -17,7 +17,7 @@ services:
|
|||||||
image: redis:alpine
|
image: redis:alpine
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
server:
|
server:
|
||||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2022.1.2}
|
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2022.3.3}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: server
|
command: server
|
||||||
environment:
|
environment:
|
||||||
@ -38,7 +38,7 @@ services:
|
|||||||
- "0.0.0.0:${AUTHENTIK_PORT_HTTP:-9000}:9000"
|
- "0.0.0.0:${AUTHENTIK_PORT_HTTP:-9000}:9000"
|
||||||
- "0.0.0.0:${AUTHENTIK_PORT_HTTPS:-9443}:9443"
|
- "0.0.0.0:${AUTHENTIK_PORT_HTTPS:-9443}:9443"
|
||||||
worker:
|
worker:
|
||||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2022.1.2}
|
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2022.3.3}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: worker
|
command: worker
|
||||||
environment:
|
environment:
|
||||||
@ -49,11 +49,10 @@ services:
|
|||||||
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS}
|
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS}
|
||||||
# AUTHENTIK_ERROR_REPORTING__ENABLED: "true"
|
# AUTHENTIK_ERROR_REPORTING__ENABLED: "true"
|
||||||
# This is optional, and can be removed. If you remove this, the following will happen
|
# This is optional, and can be removed. If you remove this, the following will happen
|
||||||
# - The permissions for the /backups and /media folders aren't fixed, so make sure they are 1000:1000
|
# - The permissions for the /media folders aren't fixed, so make sure they are 1000:1000
|
||||||
# - The docker socket can't be accessed anymore
|
# - The docker socket can't be accessed anymore
|
||||||
user: root
|
user: root
|
||||||
volumes:
|
volumes:
|
||||||
- ./backups:/backups
|
|
||||||
- ./media:/media
|
- ./media:/media
|
||||||
- ./certs:/certs
|
- ./certs:/certs
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
24
go.mod
24
go.mod
@ -4,33 +4,35 @@ go 1.16
|
|||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Netflix/go-env v0.0.0-20210215222557-e437a7e7f9fb
|
github.com/Netflix/go-env v0.0.0-20210215222557-e437a7e7f9fb
|
||||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect
|
|
||||||
github.com/coreos/go-oidc v2.2.1+incompatible
|
github.com/coreos/go-oidc v2.2.1+incompatible
|
||||||
github.com/garyburd/redigo v1.6.2 // indirect
|
github.com/garyburd/redigo v1.6.2 // indirect
|
||||||
github.com/getsentry/sentry-go v0.12.0
|
github.com/getsentry/sentry-go v0.13.0
|
||||||
github.com/go-ldap/ldap/v3 v3.4.1
|
github.com/go-ldap/ldap/v3 v3.4.2
|
||||||
github.com/go-openapi/runtime v0.21.1
|
github.com/go-openapi/runtime v0.23.3
|
||||||
github.com/go-openapi/strfmt v0.21.1
|
github.com/go-openapi/strfmt v0.21.2
|
||||||
github.com/golang-jwt/jwt v3.2.2+incompatible
|
github.com/golang-jwt/jwt v3.2.2+incompatible
|
||||||
github.com/google/uuid v1.3.0
|
github.com/google/uuid v1.3.0
|
||||||
github.com/gorilla/handlers v1.5.1
|
github.com/gorilla/handlers v1.5.1
|
||||||
github.com/gorilla/mux v1.8.0
|
github.com/gorilla/mux v1.8.0
|
||||||
github.com/gorilla/securecookie v1.1.1
|
github.com/gorilla/securecookie v1.1.1
|
||||||
github.com/gorilla/sessions v1.2.1
|
github.com/gorilla/sessions v1.2.1
|
||||||
github.com/gorilla/websocket v1.4.2
|
github.com/gorilla/websocket v1.5.0
|
||||||
github.com/imdario/mergo v0.3.12
|
github.com/imdario/mergo v0.3.12
|
||||||
github.com/mailru/easyjson v0.7.7 // indirect
|
|
||||||
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
|
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
|
||||||
github.com/nmcclain/ldap v0.0.0-20210720162743-7f8d1e44eeba
|
github.com/nmcclain/ldap v0.0.0-20210720162743-7f8d1e44eeba
|
||||||
github.com/pires/go-proxyproto v0.6.1
|
github.com/pires/go-proxyproto v0.6.2
|
||||||
github.com/pkg/errors v0.9.1
|
github.com/pkg/errors v0.9.1
|
||||||
github.com/pquerna/cachecontrol v0.0.0-20201205024021-ac21108117ac // indirect
|
github.com/pquerna/cachecontrol v0.0.0-20201205024021-ac21108117ac // indirect
|
||||||
github.com/prometheus/client_golang v1.12.0
|
github.com/prometheus/client_golang v1.12.1
|
||||||
|
github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b
|
||||||
github.com/sirupsen/logrus v1.8.1
|
github.com/sirupsen/logrus v1.8.1
|
||||||
goauthentik.io/api v0.2021125.1
|
github.com/stretchr/testify v1.7.1
|
||||||
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c
|
goauthentik.io/api/v3 v3.2022032.1
|
||||||
|
golang.org/x/net v0.0.0-20220225172249-27dd8689420f // indirect
|
||||||
|
golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b
|
||||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
|
||||||
google.golang.org/appengine v1.6.7 // indirect
|
google.golang.org/appengine v1.6.7 // indirect
|
||||||
|
google.golang.org/protobuf v1.27.1 // indirect
|
||||||
gopkg.in/boj/redistore.v1 v1.0.0-20160128113310-fc113767cd6b
|
gopkg.in/boj/redistore.v1 v1.0.0-20160128113310-fc113767cd6b
|
||||||
gopkg.in/square/go-jose.v2 v2.5.1 // indirect
|
gopkg.in/square/go-jose.v2 v2.5.1 // indirect
|
||||||
gopkg.in/yaml.v2 v2.4.0
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user