Compare commits
763 Commits
version/20
...
version/20
Author | SHA1 | Date | |
---|---|---|---|
e9910732bc | |||
246dd4b062 | |||
4425f8d183 | |||
c410bb8c36 | |||
44f62a4773 | |||
b6ff04694f | |||
d4ce0e8e41 | |||
362d72da8c | |||
88d0f8d8a8 | |||
61097b9400 | |||
7a73ddfb60 | |||
d66f13c249 | |||
8cc3cb6a42 | |||
4c5537ddfe | |||
a95779157d | |||
70256727fd | |||
ac6afb2b82 | |||
2ea7bd86e8 | |||
95bce9c9e7 | |||
71a22c2a34 | |||
f3eb85877d | |||
273f5211a0 | |||
db06428ab9 | |||
109d8e48d4 | |||
2ca115285c | |||
f5459645a5 | |||
14c159500d | |||
03da87991f | |||
e38ee9c580 | |||
3bf53b2db1 | |||
f33190caa5 | |||
741822424a | |||
0ca6fbb224 | |||
f72b652b24 | |||
0a2c1eb419 | |||
eb9593a847 | |||
7c71c52791 | |||
59493c02c4 | |||
83089b47d3 | |||
103e723d8c | |||
7d6e88061f | |||
f8aab40e3e | |||
5123bc1316 | |||
30e8408e85 | |||
bb34474101 | |||
a105760123 | |||
f410a77010 | |||
6ff8fdcc49 | |||
50ca3dc772 | |||
2a09fc0ae2 | |||
fbb6756488 | |||
f45fb2eac0 | |||
7b8cde17e6 | |||
186634fc67 | |||
c84b1b7997 | |||
6e83467481 | |||
72db17f23b | |||
ee4e176039 | |||
e18e681c2b | |||
10fe67e08d | |||
fc1db83be7 | |||
3740e65906 | |||
30386cd899 | |||
64a10e9a46 | |||
77d6242cce | |||
9a86dcaec3 | |||
0b00768b84 | |||
d162c79373 | |||
05db352a0f | |||
5bf3d7fe02 | |||
1ae1cbebf4 | |||
8c16dfc478 | |||
c6a3286e4c | |||
44cfd7e5b0 | |||
210d4c5058 | |||
6b39d616b1 | |||
32ace1bece | |||
54f893b84f | |||
b5685ec072 | |||
5854833240 | |||
4b2437a6f1 | |||
2981ac7b10 | |||
59a51c859a | |||
47bab6c182 | |||
4e6714fffe | |||
aa6b595545 | |||
0131b1f6cc | |||
9f53c359dd | |||
28e4dba3e8 | |||
2afd46e1df | |||
f5991b19be | |||
5cc75cb25c | |||
68c1df2d39 | |||
c83724f45c | |||
5f91c150df | |||
0bfe999442 | |||
58440b16c4 | |||
57757a2ff5 | |||
2993f506a7 | |||
e4841d54a1 | |||
4f05dcec89 | |||
ede6bcd31e | |||
728c8e994d | |||
5290b64415 | |||
fec6de1ba2 | |||
69678dcfa6 | |||
4911a243ff | |||
70316b37da | |||
307cb94e3b | |||
ace53a8fa5 | |||
0544dc3f83 | |||
708ff300a3 | |||
4e63f0f215 | |||
141481df3a | |||
29241cc287 | |||
e81e97d404 | |||
a5182e5c24 | |||
cf5ff6e160 | |||
f2b3a2ec91 | |||
69780c67a9 | |||
ac9cf590bc | |||
cb6edcb198 | |||
8eecc28c3c | |||
10b16bc36a | |||
2fe88cfea9 | |||
caab396b56 | |||
5f0f4284a2 | |||
c11be2284d | |||
aa321196d7 | |||
ff03db61a8 | |||
f3b3ce6572 | |||
09b02e1aec | |||
451a9aaf01 | |||
eaee7cb562 | |||
a010c91a52 | |||
709194330f | |||
5914bbf173 | |||
5e9166f859 | |||
35b8ef6592 | |||
772a939f17 | |||
24971801cf | |||
43aebe8cb2 | |||
19cfc87c84 | |||
f920f183c8 | |||
97f979c81e | |||
e61411d396 | |||
c4f985f542 | |||
302dee7ab2 | |||
83c12ad483 | |||
4224fd5c6f | |||
597ce1eb42 | |||
5ef385f0bb | |||
cda4be3d47 | |||
8cdf22fc94 | |||
6efc7578ef | |||
4e2457560d | |||
2ddf122d27 | |||
a24651437a | |||
30bb7acb17 | |||
7859145138 | |||
8a8aafec81 | |||
deebdf2bcc | |||
4982c4abcb | |||
1486f90077 | |||
f4988bc45e | |||
8abc9cc031 | |||
534689895c | |||
8a0dd6be24 | |||
65d2eed82d | |||
e450e7b107 | |||
552ddda909 | |||
bafeff7306 | |||
6791436302 | |||
7eda794070 | |||
e3129c1067 | |||
ff481ba6e7 | |||
a106bad2db | |||
3a1c311d02 | |||
6465333f4f | |||
b761659227 | |||
9321c355f8 | |||
86c8e79ea1 | |||
8916b1f8ab | |||
41fcf2aba6 | |||
87e72b08a9 | |||
b2fcd42e3c | |||
fc1b47a80f | |||
af14e3502e | |||
a2faa5ceb5 | |||
63a19a1381 | |||
b472dcb7e7 | |||
6303909031 | |||
4bdc06865b | |||
2ee48cd039 | |||
893d5f452b | |||
340a9bc8ee | |||
cb3d9f83f1 | |||
4ba55aa8e9 | |||
bab6f501ec | |||
7327939684 | |||
ffb0135f06 | |||
ee0ddc3d17 | |||
5dd979d66c | |||
a9bd34f3c5 | |||
db316b59c5 | |||
6209714f87 | |||
1ed2bddba7 | |||
26b35c9b7b | |||
86a9271f75 | |||
402ed9bd20 | |||
68a0684569 | |||
bd2e453218 | |||
1f31c63e57 | |||
480410efa2 | |||
e9bfee52ed | |||
326b574d54 | |||
0a7abcf2ad | |||
9e5019881e | |||
8071750681 | |||
f2f0931904 | |||
a91204e5b9 | |||
b14c22cbff | |||
b3e40c6aed | |||
873aa4bb22 | |||
c1ea78c422 | |||
3c8bbc2621 | |||
42a9979d91 | |||
b7f94df4d9 | |||
4143d3fe28 | |||
f95c06b76f | |||
e3e9178ccc | |||
b694816e7b | |||
e046000f36 | |||
edb5caae9b | |||
02d27651f3 | |||
44cd4d847d | |||
472256794d | |||
cbb6887983 | |||
317e9ec605 | |||
ada2a16412 | |||
61f6b0f122 | |||
6a3f7e45cf | |||
2b78c4ba86 | |||
680ef641fb | |||
2b5504ff63 | |||
f8a6aa3250 | |||
6c23fc4b2b | |||
639c2f5c2e | |||
e44632f9a0 | |||
3f2ce34468 | |||
426cef998f | |||
8ddb62ed0f | |||
572f6d4ea0 | |||
8db68410c6 | |||
caa3c3de32 | |||
23b5ca761a | |||
f1b9021e3e | |||
99c62af89e | |||
8ae50814fe | |||
2e2b491ec7 | |||
ac432e78e2 | |||
83ac42ac43 | |||
4bd1cd127b | |||
2eb5a5cc76 | |||
75051687e6 | |||
7e316b5fc2 | |||
5594ad0b36 | |||
ea097afeae | |||
b77b4b5c80 | |||
f8dc7f48f2 | |||
692e75b057 | |||
02771683a6 | |||
40404ff41d | |||
fdd5211253 | |||
85a417d22e | |||
66c530ea06 | |||
347c3793fc | |||
cf78c89830 | |||
20c738c384 | |||
4f54ce6afb | |||
f0d7edb963 | |||
e42ad8db93 | |||
e917e756cc | |||
b4963bec76 | |||
0d23796989 | |||
d0ceafe79e | |||
f2023a7af2 | |||
31d597005f | |||
62dc86be7b | |||
7aa8e35f87 | |||
60b95271eb | |||
382b0e8941 | |||
3b068610b9 | |||
9a8f62f42e | |||
632e3cf7dc | |||
e7144649d5 | |||
dd8909c9b2 | |||
e6818c1f6a | |||
10c4e3c717 | |||
b8425867c8 | |||
a05da8cdbf | |||
c3aeefa653 | |||
62c840df21 | |||
45d1db8880 | |||
b34f30f1dd | |||
7a54e84eb4 | |||
917eef96fb | |||
9a393848b2 | |||
a6abeb50c6 | |||
39acb044fb | |||
7d2f622f4b | |||
a2b38caf64 | |||
1193b9fd22 | |||
e3a5ef1907 | |||
e597bb4542 | |||
c31df2b3f9 | |||
3f2637cffa | |||
3b6d9bec0a | |||
b184210610 | |||
d2010808ee | |||
f5b185dd06 | |||
ae161c1ba9 | |||
109283b189 | |||
235d283def | |||
96a86b3298 | |||
db9ea8603c | |||
8b7f698c7b | |||
813c13ce45 | |||
629a0e1a4d | |||
d1e2c018a3 | |||
1e86844823 | |||
b58875d4c7 | |||
03e0eecb1d | |||
7aa61d86e4 | |||
0e6a799e6d | |||
bc6afdf94f | |||
80364b04a9 | |||
0948e0ee1c | |||
5c54de66fc | |||
937edc73bc | |||
2c0d8d8943 | |||
059ccdd592 | |||
0ec0d3f1aa | |||
0a0eee138a | |||
3ed4c38101 | |||
de8cf65503 | |||
121b36f35f | |||
363aed2a47 | |||
ef994e0084 | |||
e1ef196283 | |||
f81ffd54f3 | |||
f9bfae9190 | |||
0d686465a4 | |||
e13b4a561f | |||
f6417f95e5 | |||
9c6bf5f4ae | |||
d2d7acb50e | |||
c7681dde32 | |||
8cf9661e08 | |||
2dbd76cf90 | |||
28d39f4d80 | |||
760428aa18 | |||
49bbac7441 | |||
0b8cfd437b | |||
b69aaf9417 | |||
758d1bdfd4 | |||
ab501ca971 | |||
9657741a3d | |||
29b7368f42 | |||
75724b6f8d | |||
7c9f821bfd | |||
5b9e6bed6c | |||
6113d7d768 | |||
0e3602d7eb | |||
2b94e9a687 | |||
6ed7d842e4 | |||
8794c840cf | |||
9c9c00755a | |||
6703c0a5d1 | |||
060f19ce06 | |||
b2d2e7cbc8 | |||
91fd792f88 | |||
2d9cd28221 | |||
aa64cf898f | |||
27d109c1fe | |||
1b4a14f3ee | |||
9835785864 | |||
d785998c5a | |||
8ba9553220 | |||
6eb132c48b | |||
b523cd064b | |||
355b832cc3 | |||
8f5af464a2 | |||
fb70769358 | |||
ad06778c34 | |||
bcb4451fb7 | |||
110d558572 | |||
e32d4f0095 | |||
0e413acd61 | |||
d3397c349f | |||
fb18a10e61 | |||
9bb0d04aeb | |||
666cf77b04 | |||
90ca1b8e5a | |||
f1e95b8816 | |||
dad8547212 | |||
a957e1fc45 | |||
39e3f02503 | |||
2b999e922c | |||
4224134a19 | |||
eda260dddd | |||
8a1dd521e1 | |||
1c5e91de1d | |||
4b1744fad0 | |||
f17b83010d | |||
12ddf9e73c | |||
0b3b300333 | |||
23f1a19765 | |||
b27e998615 | |||
2b928146a8 | |||
a94b0504b7 | |||
4fcbfa7709 | |||
986e01db20 | |||
9092d1189b | |||
605ed94ba2 | |||
4cbeeb9a0c | |||
993dee6aad | |||
c663deb659 | |||
61621e7d60 | |||
0ee9b07172 | |||
431ba6b4ef | |||
146818793e | |||
0ce663bce4 | |||
923ba4fb42 | |||
bb6eed0db1 | |||
d1bd8f333b | |||
2ac9f5426d | |||
8d1fd48003 | |||
241cb01ec6 | |||
65b4139997 | |||
1431be8c44 | |||
049fceeeee | |||
e6638afa3c | |||
465898c7d0 | |||
c363b1cfde | |||
b30ffd1318 | |||
fe0d3a64c8 | |||
ae9f1c1063 | |||
ea63d384fd | |||
c28d75754d | |||
518b691e00 | |||
cd845be45d | |||
a813d8e05e | |||
75f850f4d2 | |||
c84265c6f0 | |||
a477ea29cd | |||
f6aa85e340 | |||
0aeedb3ad8 | |||
4b29f238b5 | |||
34157db06a | |||
84b9e66a97 | |||
e831e4fb94 | |||
956922820b | |||
b0fac9c9f1 | |||
f4db09cd59 | |||
047030f901 | |||
638e8d741f | |||
425b87a6d0 | |||
e7dc763612 | |||
a80cc94da9 | |||
547dd3cb7a | |||
95739a934c | |||
d12e24017e | |||
e4a0345231 | |||
078633c2af | |||
4b8b800648 | |||
6f9ed001a1 | |||
e4095dfffe | |||
d5341c2284 | |||
357bd65028 | |||
867fb0dac0 | |||
2666aa2c73 | |||
f0e9bafa35 | |||
0d739f5c1a | |||
e08077c73a | |||
7cf8a31057 | |||
c43049a981 | |||
1a9ace6f9d | |||
b8d86bc482 | |||
f7044e41c6 | |||
fa59fec17a | |||
e29afa289e | |||
4d4193a586 | |||
59343ff441 | |||
cab564152d | |||
97b814ab33 | |||
88516ba2ca | |||
f069cfb643 | |||
4ce3c2341c | |||
77e42d60cb | |||
cacb919c6f | |||
2a3b049b01 | |||
e4a5e86c93 | |||
3a51bcd890 | |||
c28f68400d | |||
5d50fc281a | |||
9f7d1466e9 | |||
c815d24806 | |||
d1200a7e40 | |||
edd4f9ceae | |||
1cfe81887b | |||
bb5e0ebab1 | |||
dfda76d896 | |||
8fc5114ce4 | |||
e7b4363d21 | |||
53905d1a89 | |||
0ad1392632 | |||
6db1c914ee | |||
00324f922d | |||
8a24ddad28 | |||
0f85fe3c29 | |||
1f05eaa420 | |||
84e126a32c | |||
9ae69866bd | |||
56576a7f44 | |||
7f0295ba53 | |||
5553b3ff36 | |||
6f969525fe | |||
bac12246fb | |||
b53ef6e529 | |||
39c62afb93 | |||
c98bdbacc5 | |||
1e8d45dc15 | |||
202b057ce9 | |||
d5d8641b37 | |||
9dd37689e3 | |||
cc0832f487 | |||
b515bf7d2e | |||
34fbf3941b | |||
e73606b54d | |||
0a413fe21a | |||
d1b9f1e6b8 | |||
e5a6e128e4 | |||
9295d1ed0b | |||
5d479a6c8f | |||
4a773b2b4f | |||
8003d67844 | |||
58baf97e2d | |||
51783c1cbb | |||
94290c7e36 | |||
123ff7ad1f | |||
8f3e863cce | |||
3d6c459349 | |||
6a583bae49 | |||
78e5879d9a | |||
fdcac2a9ed | |||
e81715caef | |||
ab2b13938e | |||
5c97a3aef3 | |||
e6963c543d | |||
9ca15983a2 | |||
99ef94b7aa | |||
133bedafba | |||
c3faa61ed9 | |||
da74304221 | |||
ed6659a46d | |||
0abb1f94a4 | |||
c7e299e0bf | |||
8a6590bac8 | |||
ed717dcfa2 | |||
b6df42f580 | |||
2ea85bd0c4 | |||
68fa8105e1 | |||
79db0ce4c1 | |||
5e23b11764 | |||
c4e029ffe2 | |||
61b5b36192 | |||
c6cc1b1728 | |||
77dd652160 | |||
1144944adb | |||
7751be284e | |||
74382c6287 | |||
011babbbd9 | |||
3c01a1dd7b | |||
6e832be2de | |||
46017f2f86 | |||
da50eb0369 | |||
b996e3cee7 | |||
12735cc14c | |||
4d36699b78 | |||
8110d2861b | |||
1cc60f572d | |||
90151a13ae | |||
f958aa6930 | |||
13fbac30a2 | |||
4f4cdf16f1 | |||
7d75599627 | |||
924a13e832 | |||
ae83c35dfd | |||
e9102f4e28 | |||
9b8c1cbea5 | |||
6424bf98da | |||
74fb0f9e2a | |||
4380f37a77 | |||
17fccd44e6 | |||
217a8b5610 | |||
2cef220a3e | |||
5a8c66d325 | |||
8de13d3f67 | |||
5c22bedbaf | |||
8a0f993f0b | |||
abcf515a69 | |||
894f704c27 | |||
7798292aa8 | |||
3005ca17bd | |||
909461e533 | |||
df838a4023 | |||
0f86b62dd3 | |||
a40c3aeb68 | |||
4080738ded | |||
4a89be3048 | |||
e587c53e18 | |||
023b97aa69 | |||
51365dba74 | |||
0d3705685e | |||
738e4d5c74 | |||
b14b9cb0dd | |||
2a21ebf7b0 | |||
5bc1301043 | |||
e0e4bf6972 | |||
337677ad12 | |||
3712d5aee2 | |||
dd82d55725 | |||
8d766efecb | |||
9ac3b29418 | |||
5000c5b061 | |||
b362d2af03 | |||
bcd42fce13 | |||
6deddd038f | |||
3b47cb64da | |||
cf5e70c759 | |||
20bc38a54b | |||
672a4ab1f4 | |||
47dd667261 | |||
d1ac69789b | |||
08abf81c6d | |||
76bd987e6f | |||
5374352411 | |||
08eff4cc5d | |||
c87a9f9489 | |||
8f6d700aa8 | |||
c6843b026c | |||
3769c33ef0 | |||
8982afaf44 | |||
58c221e867 | |||
108d3e56e3 | |||
145b32c480 | |||
c788504bb0 | |||
34782b31e5 | |||
5a3ca13d76 | |||
5dc0f3b91b | |||
f51515f3de | |||
f978575293 | |||
cb64eed90d | |||
db1f7f0400 | |||
0d02dbf55c | |||
6da78b8c32 | |||
3a80bc8bda | |||
1aa9c0f9ca | |||
2da7a8fede | |||
89cb402f42 | |||
b617fd213f | |||
97b0f58f25 | |||
49a98bb744 | |||
f93a00d773 | |||
8de40a8a21 | |||
b9c54e97fa | |||
f1c55465f7 | |||
40c2b2860b | |||
a92bce322d | |||
af83308fd4 | |||
73d991e75a | |||
1eba3f1334 | |||
b86251255d | |||
ccab41a6ca | |||
0e051031b1 | |||
aecbe8c585 | |||
da98022704 | |||
e13f9c0b38 | |||
7941fb9d95 | |||
d2392b0881 | |||
b2044d75fb | |||
617b64b7db | |||
2bf5f2709a | |||
f03325df28 | |||
2b71e5bdfd | |||
f861737b85 | |||
6036d88392 | |||
bfc8a56a0b | |||
8d995011b8 | |||
5646141fe2 | |||
96b0bc324e | |||
335d6edd11 | |||
5d9bed130a | |||
0a1ab74707 | |||
ef24b94585 | |||
77b0438aa4 | |||
2788329880 | |||
15ab11be70 | |||
8d5460a132 | |||
5ba2c80813 | |||
06766bdb25 | |||
fdae13316c | |||
ae21886e8e | |||
f5dc81907a | |||
40f8ce3c4c | |||
c934915776 | |||
d70c8fbcc3 | |||
12b26e49ec | |||
0ac548d56e | |||
e771e1857f | |||
479e9750c7 | |||
c5e7801247 | |||
48ea15a946 | |||
e4c06f7356 | |||
4d7d866e4b | |||
72a93c0959 | |||
73733b20b6 | |||
3872314931 | |||
85c6ede448 | |||
49c2bee9d6 | |||
6b2c9d7c44 | |||
381010600f | |||
2a265f706a | |||
1b21b50b77 | |||
fa6324ab1d | |||
9e0daf2bcf | |||
0273ae16df | |||
f2f12ef0ba | |||
61d3df5f02 | |||
971de4fcb9 | |||
9c0bc78ca0 | |||
92085f1a3c | |||
6067406e96 | |||
9ccd4d69fe | |||
17ec48332d | |||
d3f5253a6b | |||
7a70726d57 | |||
be303937fb | |||
2326fc9ae2 | |||
9374b0bcf2 | |||
47e6028099 | |||
24114e8304 | |||
921d9c79a1 | |||
1119989ab7 | |||
e17594f0f7 | |||
5ae3b868d4 | |||
37ee4af5ff | |||
829aaca317 | |||
8eb4d53810 | |||
e60dfc5b3c | |||
cc403d8777 | |||
b81e2e69d1 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2021.10.1-rc2
|
current_version = 2021.12.2
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
||||||
|
1
.github/stale.yml
vendored
1
.github/stale.yml
vendored
@ -7,6 +7,7 @@ exemptLabels:
|
|||||||
- pinned
|
- pinned
|
||||||
- security
|
- security
|
||||||
- pr_wanted
|
- pr_wanted
|
||||||
|
- enhancement/confirmed
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
markComment: >
|
markComment: >
|
||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had
|
||||||
|
198
.github/workflows/ci-main.yml
vendored
198
.github/workflows/ci-main.yml
vendored
@ -18,79 +18,17 @@ env:
|
|||||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-pylint:
|
lint:
|
||||||
runs-on: ubuntu-latest
|
strategy:
|
||||||
steps:
|
fail-fast: false
|
||||||
- uses: actions/checkout@v2
|
matrix:
|
||||||
- uses: actions/setup-python@v2
|
job:
|
||||||
with:
|
- pylint
|
||||||
python-version: '3.9'
|
- black
|
||||||
- id: cache-pipenv
|
- isort
|
||||||
uses: actions/cache@v2.1.6
|
- bandit
|
||||||
with:
|
- pyright
|
||||||
path: ~/.local/share/virtualenvs
|
- pending-migrations
|
||||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
|
||||||
- name: prepare
|
|
||||||
env:
|
|
||||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
|
||||||
run: scripts/ci_prepare.sh
|
|
||||||
- name: run pylint
|
|
||||||
run: pipenv run pylint authentik tests lifecycle
|
|
||||||
lint-black:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.9'
|
|
||||||
- id: cache-pipenv
|
|
||||||
uses: actions/cache@v2.1.6
|
|
||||||
with:
|
|
||||||
path: ~/.local/share/virtualenvs
|
|
||||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
|
||||||
- name: prepare
|
|
||||||
env:
|
|
||||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
|
||||||
run: scripts/ci_prepare.sh
|
|
||||||
- name: run black
|
|
||||||
run: pipenv run black --check authentik tests lifecycle
|
|
||||||
lint-isort:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.9'
|
|
||||||
- id: cache-pipenv
|
|
||||||
uses: actions/cache@v2.1.6
|
|
||||||
with:
|
|
||||||
path: ~/.local/share/virtualenvs
|
|
||||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
|
||||||
- name: prepare
|
|
||||||
env:
|
|
||||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
|
||||||
run: scripts/ci_prepare.sh
|
|
||||||
- name: run isort
|
|
||||||
run: pipenv run isort --check authentik tests lifecycle
|
|
||||||
lint-bandit:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.9'
|
|
||||||
- id: cache-pipenv
|
|
||||||
uses: actions/cache@v2.1.6
|
|
||||||
with:
|
|
||||||
path: ~/.local/share/virtualenvs
|
|
||||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
|
||||||
- name: prepare
|
|
||||||
env:
|
|
||||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
|
||||||
run: scripts/ci_prepare.sh
|
|
||||||
- name: run bandit
|
|
||||||
run: pipenv run bandit -r authentik tests lifecycle
|
|
||||||
lint-pyright:
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@ -100,12 +38,17 @@ jobs:
|
|||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v2
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
|
- id: cache-pipenv
|
||||||
|
uses: actions/cache@v2.1.7
|
||||||
|
with:
|
||||||
|
path: ~/.local/share/virtualenvs
|
||||||
|
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||||
- name: prepare
|
- name: prepare
|
||||||
run: |
|
env:
|
||||||
scripts/ci_prepare.sh
|
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||||
npm install -g pyright@1.1.136
|
run: scripts/ci_prepare.sh
|
||||||
- name: run bandit
|
- name: run pylint
|
||||||
run: pipenv run pyright e2e lifecycle
|
run: pipenv run make ci-${{ matrix.job }}
|
||||||
test-migrations:
|
test-migrations:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@ -114,7 +57,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
- id: cache-pipenv
|
- id: cache-pipenv
|
||||||
uses: actions/cache@v2.1.6
|
uses: actions/cache@v2.1.7
|
||||||
with:
|
with:
|
||||||
path: ~/.local/share/virtualenvs
|
path: ~/.local/share/virtualenvs
|
||||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||||
@ -138,7 +81,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python ./scripts/gh_env.py
|
python ./scripts/gh_env.py
|
||||||
- id: cache-pipenv
|
- id: cache-pipenv
|
||||||
uses: actions/cache@v2.1.6
|
uses: actions/cache@v2.1.7
|
||||||
with:
|
with:
|
||||||
path: ~/.local/share/virtualenvs
|
path: ~/.local/share/virtualenvs
|
||||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||||
@ -146,7 +89,11 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Copy current, latest config to local
|
# Copy current, latest config to local
|
||||||
cp authentik/lib/default.yml local.env.yml
|
cp authentik/lib/default.yml local.env.yml
|
||||||
|
cp -R .github ..
|
||||||
|
cp -R scripts ..
|
||||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||||
|
rm -rf .github/ scripts/
|
||||||
|
mv ../.github ../scripts .
|
||||||
- name: prepare
|
- name: prepare
|
||||||
env:
|
env:
|
||||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||||
@ -160,7 +107,8 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -x
|
set -x
|
||||||
git fetch
|
git fetch
|
||||||
git checkout ${{ steps.ev.outputs.branchName }}
|
git reset --hard HEAD
|
||||||
|
git checkout $GITHUB_HEAD_REF
|
||||||
pipenv sync --dev
|
pipenv sync --dev
|
||||||
- name: prepare
|
- name: prepare
|
||||||
env:
|
env:
|
||||||
@ -176,7 +124,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
- id: cache-pipenv
|
- id: cache-pipenv
|
||||||
uses: actions/cache@v2.1.6
|
uses: actions/cache@v2.1.7
|
||||||
with:
|
with:
|
||||||
path: ~/.local/share/virtualenvs
|
path: ~/.local/share/virtualenvs
|
||||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||||
@ -205,7 +153,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
- id: cache-pipenv
|
- id: cache-pipenv
|
||||||
uses: actions/cache@v2.1.6
|
uses: actions/cache@v2.1.7
|
||||||
with:
|
with:
|
||||||
path: ~/.local/share/virtualenvs
|
path: ~/.local/share/virtualenvs
|
||||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||||
@ -228,7 +176,7 @@ jobs:
|
|||||||
testspace [integration]unittest.xml --link=codecov
|
testspace [integration]unittest.xml --link=codecov
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v2
|
uses: codecov/codecov-action@v2
|
||||||
test-e2e:
|
test-e2e-provider:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@ -244,7 +192,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
domain: ${{github.repository_owner}}
|
domain: ${{github.repository_owner}}
|
||||||
- id: cache-pipenv
|
- id: cache-pipenv
|
||||||
uses: actions/cache@v2.1.6
|
uses: actions/cache@v2.1.7
|
||||||
with:
|
with:
|
||||||
path: ~/.local/share/virtualenvs
|
path: ~/.local/share/virtualenvs
|
||||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||||
@ -255,7 +203,7 @@ jobs:
|
|||||||
scripts/ci_prepare.sh
|
scripts/ci_prepare.sh
|
||||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||||
- id: cache-web
|
- id: cache-web
|
||||||
uses: actions/cache@v2.1.6
|
uses: actions/cache@v2.1.7
|
||||||
with:
|
with:
|
||||||
path: web/dist
|
path: web/dist
|
||||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/**') }}
|
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/**') }}
|
||||||
@ -267,35 +215,92 @@ jobs:
|
|||||||
npm run build
|
npm run build
|
||||||
- name: run e2e
|
- name: run e2e
|
||||||
run: |
|
run: |
|
||||||
pipenv run make test-e2e
|
pipenv run make test-e2e-provider
|
||||||
pipenv run coverage xml
|
pipenv run coverage xml
|
||||||
- name: run testspace
|
- name: run testspace
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: |
|
run: |
|
||||||
testspace [e2e]unittest.xml --link=codecov
|
testspace [e2e-provider]unittest.xml --link=codecov
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v2
|
uses: codecov/codecov-action@v2
|
||||||
build:
|
test-e2e-rest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: '3.9'
|
||||||
|
- uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: '16'
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: web/package-lock.json
|
||||||
|
- uses: testspace-com/setup-testspace@v1
|
||||||
|
with:
|
||||||
|
domain: ${{github.repository_owner}}
|
||||||
|
- id: cache-pipenv
|
||||||
|
uses: actions/cache@v2.1.7
|
||||||
|
with:
|
||||||
|
path: ~/.local/share/virtualenvs
|
||||||
|
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||||
|
- name: prepare
|
||||||
|
env:
|
||||||
|
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||||
|
run: |
|
||||||
|
scripts/ci_prepare.sh
|
||||||
|
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||||
|
- id: cache-web
|
||||||
|
uses: actions/cache@v2.1.7
|
||||||
|
with:
|
||||||
|
path: web/dist
|
||||||
|
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/**') }}
|
||||||
|
- name: prepare web ui
|
||||||
|
if: steps.cache-web.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
cd web
|
||||||
|
npm i
|
||||||
|
npm run build
|
||||||
|
- name: run e2e
|
||||||
|
run: |
|
||||||
|
pipenv run make test-e2e-rest
|
||||||
|
pipenv run coverage xml
|
||||||
|
- name: run testspace
|
||||||
|
if: ${{ always() }}
|
||||||
|
run: |
|
||||||
|
testspace [e2e-rest]unittest.xml --link=codecov
|
||||||
|
- if: ${{ always() }}
|
||||||
|
uses: codecov/codecov-action@v2
|
||||||
|
ci-core-mark:
|
||||||
needs:
|
needs:
|
||||||
- lint-pylint
|
- lint
|
||||||
- lint-black
|
|
||||||
- lint-isort
|
|
||||||
- lint-bandit
|
|
||||||
- lint-pyright
|
|
||||||
- test-migrations
|
- test-migrations
|
||||||
- test-migrations-from-stable
|
- test-migrations-from-stable
|
||||||
- test-unittest
|
- test-unittest
|
||||||
- test-integration
|
- test-integration
|
||||||
- test-e2e
|
- test-e2e-rest
|
||||||
|
- test-e2e-provider
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- run: echo mark
|
||||||
|
build:
|
||||||
|
needs: ci-core-mark
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 120
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch:
|
||||||
|
- 'linux/amd64'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1.2.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v1
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
id: ev
|
id: ev
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
run: |
|
run: |
|
||||||
python ./scripts/gh_env.py
|
python ./scripts/gh_env.py
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
@ -314,3 +319,4 @@ jobs:
|
|||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
|
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
|
||||||
build-args: |
|
build-args: |
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
|
platforms: ${{ matrix.arch }}
|
||||||
|
57
.github/workflows/ci-outpost.yml
vendored
57
.github/workflows/ci-outpost.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-go@v2
|
- uses: actions/setup-go@v2
|
||||||
with:
|
with:
|
||||||
go-version: '^1.16.3'
|
go-version: "^1.17"
|
||||||
- name: Run linter
|
- name: Run linter
|
||||||
run: |
|
run: |
|
||||||
# Create folder structure for go embeds
|
# Create folder structure for go embeds
|
||||||
@ -30,23 +30,35 @@ jobs:
|
|||||||
-w /app \
|
-w /app \
|
||||||
golangci/golangci-lint:v1.39.0 \
|
golangci/golangci-lint:v1.39.0 \
|
||||||
golangci-lint run -v --timeout 200s
|
golangci-lint run -v --timeout 200s
|
||||||
build:
|
ci-outpost-mark:
|
||||||
needs:
|
needs:
|
||||||
- lint-golint
|
- lint-golint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- run: echo mark
|
||||||
|
build:
|
||||||
|
timeout-minutes: 120
|
||||||
|
needs:
|
||||||
|
- ci-outpost-mark
|
||||||
strategy:
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
type:
|
type:
|
||||||
- proxy
|
- proxy
|
||||||
- ldap
|
- ldap
|
||||||
|
arch:
|
||||||
|
- 'linux/amd64'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1.2.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v1
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
id: ev
|
id: ev
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
run: |
|
run: |
|
||||||
python ./scripts/gh_env.py
|
python ./scripts/gh_env.py
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
@ -67,3 +79,42 @@ jobs:
|
|||||||
file: ${{ matrix.type }}.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
build-args: |
|
build-args: |
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
|
platforms: ${{ matrix.arch }}
|
||||||
|
build-outpost-binary:
|
||||||
|
timeout-minutes: 120
|
||||||
|
needs:
|
||||||
|
- ci-outpost-mark
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
type:
|
||||||
|
- proxy
|
||||||
|
- ldap
|
||||||
|
goos: [linux]
|
||||||
|
goarch: [amd64, arm64]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
with:
|
||||||
|
go-version: "^1.17"
|
||||||
|
- uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: '16'
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: web/package-lock.json
|
||||||
|
- name: Build web
|
||||||
|
run: |
|
||||||
|
cd web
|
||||||
|
npm install
|
||||||
|
npm run build-proxy
|
||||||
|
- name: Build outpost
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
export GOOS=${{ matrix.goos }}
|
||||||
|
export GOARCH=${{ matrix.goarch }}
|
||||||
|
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||||
|
path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||||
|
8
.github/workflows/ci-web.yml
vendored
8
.github/workflows/ci-web.yml
vendored
@ -65,12 +65,18 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cd web
|
cd web
|
||||||
npm run lit-analyse
|
npm run lit-analyse
|
||||||
build:
|
ci-web-mark:
|
||||||
needs:
|
needs:
|
||||||
- lint-eslint
|
- lint-eslint
|
||||||
- lint-prettier
|
- lint-prettier
|
||||||
- lint-lit-analyse
|
- lint-lit-analyse
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- run: echo mark
|
||||||
|
build:
|
||||||
|
needs:
|
||||||
|
- ci-web-mark
|
||||||
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v2
|
||||||
|
126
.github/workflows/release-publish.yml
vendored
126
.github/workflows/release-publish.yml
vendored
@ -30,14 +30,14 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
tags: |
|
tags: |
|
||||||
beryju/authentik:2021.10.1-rc2,
|
beryju/authentik:2021.12.2,
|
||||||
beryju/authentik:latest,
|
beryju/authentik:latest,
|
||||||
ghcr.io/goauthentik/server:2021.10.1-rc2,
|
ghcr.io/goauthentik/server:2021.12.2,
|
||||||
ghcr.io/goauthentik/server:latest
|
ghcr.io/goauthentik/server:latest
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
context: .
|
||||||
- name: Building Docker Image (stable)
|
- name: Building Docker Image (stable)
|
||||||
if: ${{ github.event_name == 'release' && !contains('2021.10.1-rc2', 'rc') }}
|
if: ${{ github.event_name == 'release' && !contains('2021.12.2', 'rc') }}
|
||||||
run: |
|
run: |
|
||||||
docker pull beryju/authentik:latest
|
docker pull beryju/authentik:latest
|
||||||
docker tag beryju/authentik:latest beryju/authentik:stable
|
docker tag beryju/authentik:latest beryju/authentik:stable
|
||||||
@ -45,13 +45,19 @@ jobs:
|
|||||||
docker pull ghcr.io/goauthentik/server:latest
|
docker pull ghcr.io/goauthentik/server:latest
|
||||||
docker tag ghcr.io/goauthentik/server:latest ghcr.io/goauthentik/server:stable
|
docker tag ghcr.io/goauthentik/server:latest ghcr.io/goauthentik/server:stable
|
||||||
docker push ghcr.io/goauthentik/server:stable
|
docker push ghcr.io/goauthentik/server:stable
|
||||||
build-proxy:
|
build-outpost:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
type:
|
||||||
|
- proxy
|
||||||
|
- ldap
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-go@v2
|
- uses: actions/setup-go@v2
|
||||||
with:
|
with:
|
||||||
go-version: "^1.15"
|
go-version: "^1.17"
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1.2.0
|
uses: docker/setup-qemu-action@v1.2.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
@ -72,68 +78,65 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
tags: |
|
tags: |
|
||||||
beryju/authentik-proxy:2021.10.1-rc2,
|
beryju/authentik-${{ matrix.type }}:2021.12.2,
|
||||||
beryju/authentik-proxy:latest,
|
beryju/authentik-${{ matrix.type }}:latest,
|
||||||
ghcr.io/goauthentik/proxy:2021.10.1-rc2,
|
ghcr.io/goauthentik/${{ matrix.type }}:2021.12.2,
|
||||||
ghcr.io/goauthentik/proxy:latest
|
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||||
file: proxy.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
- name: Building Docker Image (stable)
|
- name: Building Docker Image (stable)
|
||||||
if: ${{ github.event_name == 'release' && !contains('2021.10.1-rc2', 'rc') }}
|
if: ${{ github.event_name == 'release' && !contains('2021.12.2', 'rc') }}
|
||||||
run: |
|
run: |
|
||||||
docker pull beryju/authentik-proxy:latest
|
docker pull beryju/authentik-${{ matrix.type }}:latest
|
||||||
docker tag beryju/authentik-proxy:latest beryju/authentik-proxy:stable
|
docker tag beryju/authentik-${{ matrix.type }}:latest beryju/authentik-${{ matrix.type }}:stable
|
||||||
docker push beryju/authentik-proxy:stable
|
docker push beryju/authentik-${{ matrix.type }}:stable
|
||||||
docker pull ghcr.io/goauthentik/proxy:latest
|
docker pull ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||||
docker tag ghcr.io/goauthentik/proxy:latest ghcr.io/goauthentik/proxy:stable
|
docker tag ghcr.io/goauthentik/${{ matrix.type }}:latest ghcr.io/goauthentik/${{ matrix.type }}:stable
|
||||||
docker push ghcr.io/goauthentik/proxy:stable
|
docker push ghcr.io/goauthentik/${{ matrix.type }}:stable
|
||||||
build-ldap:
|
build-outpost-binary:
|
||||||
|
timeout-minutes: 120
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
type:
|
||||||
|
- proxy
|
||||||
|
- ldap
|
||||||
|
goos: [linux, darwin]
|
||||||
|
goarch: [amd64, arm64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-go@v2
|
- uses: actions/setup-go@v2
|
||||||
with:
|
with:
|
||||||
go-version: "^1.15"
|
go-version: "^1.17"
|
||||||
- name: Set up QEMU
|
- uses: actions/setup-node@v2
|
||||||
uses: docker/setup-qemu-action@v1.2.0
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
- name: Docker Login Registry
|
|
||||||
uses: docker/login-action@v1
|
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
node-version: '16'
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
cache: 'npm'
|
||||||
- name: Login to GitHub Container Registry
|
cache-dependency-path: web/package-lock.json
|
||||||
uses: docker/login-action@v1
|
- name: Build web
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Building Docker Image
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
push: ${{ github.event_name == 'release' }}
|
|
||||||
tags: |
|
|
||||||
beryju/authentik-ldap:2021.10.1-rc2,
|
|
||||||
beryju/authentik-ldap:latest,
|
|
||||||
ghcr.io/goauthentik/ldap:2021.10.1-rc2,
|
|
||||||
ghcr.io/goauthentik/ldap:latest
|
|
||||||
file: ldap.Dockerfile
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
- name: Building Docker Image (stable)
|
|
||||||
if: ${{ github.event_name == 'release' && !contains('2021.10.1-rc2', 'rc') }}
|
|
||||||
run: |
|
run: |
|
||||||
docker pull beryju/authentik-ldap:latest
|
cd web
|
||||||
docker tag beryju/authentik-ldap:latest beryju/authentik-ldap:stable
|
npm install
|
||||||
docker push beryju/authentik-ldap:stable
|
npm run build-proxy
|
||||||
docker pull ghcr.io/goauthentik/ldap:latest
|
- name: Build outpost
|
||||||
docker tag ghcr.io/goauthentik/ldap:latest ghcr.io/goauthentik/ldap:stable
|
run: |
|
||||||
docker push ghcr.io/goauthentik/ldap:stable
|
set -x
|
||||||
|
export GOOS=${{ matrix.goos }}
|
||||||
|
export GOARCH=${{ matrix.goarch }}
|
||||||
|
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||||
|
- name: Upload binaries to release
|
||||||
|
uses: svenstaro/upload-release-action@v2
|
||||||
|
with:
|
||||||
|
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||||
|
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||||
|
tag: ${{ github.ref }}
|
||||||
test-release:
|
test-release:
|
||||||
needs:
|
needs:
|
||||||
- build-server
|
- build-server
|
||||||
- build-proxy
|
- build-outpost
|
||||||
- build-ldap
|
- build-outpost-binary
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@ -151,16 +154,11 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Setup Node.js environment
|
- name: Get static files from docker image
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: '16'
|
|
||||||
- name: Build web api client and web ui
|
|
||||||
run: |
|
run: |
|
||||||
export NODE_ENV=production
|
docker pull ghcr.io/goauthentik/server:latest
|
||||||
cd web
|
container=$(docker container create ghcr.io/goauthentik/server:latest)
|
||||||
npm i
|
docker cp ${container}:web/ .
|
||||||
npm run build
|
|
||||||
- name: Create a Sentry.io release
|
- name: Create a Sentry.io release
|
||||||
uses: getsentry/action-release@v1
|
uses: getsentry/action-release@v1
|
||||||
if: ${{ github.event_name == 'release' }}
|
if: ${{ github.event_name == 'release' }}
|
||||||
@ -170,7 +168,7 @@ jobs:
|
|||||||
SENTRY_PROJECT: authentik
|
SENTRY_PROJECT: authentik
|
||||||
SENTRY_URL: https://sentry.beryju.org
|
SENTRY_URL: https://sentry.beryju.org
|
||||||
with:
|
with:
|
||||||
version: authentik@2021.10.1-rc2
|
version: authentik@2021.12.2
|
||||||
environment: beryjuorg-prod
|
environment: beryjuorg-prod
|
||||||
sourcemaps: './web/dist'
|
sourcemaps: './web/dist'
|
||||||
url_prefix: '~/static/dist'
|
url_prefix: '~/static/dist'
|
||||||
|
1
.github/workflows/release-tag.yml
vendored
1
.github/workflows/release-tag.yml
vendored
@ -15,6 +15,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||||
|
docker buildx install
|
||||||
docker build \
|
docker build \
|
||||||
--no-cache \
|
--no-cache \
|
||||||
-t testing:latest \
|
-t testing:latest \
|
||||||
|
14
.github/workflows/translation-compile.yml
vendored
14
.github/workflows/translation-compile.yml
vendored
@ -4,6 +4,9 @@ on:
|
|||||||
branches: [ master ]
|
branches: [ master ]
|
||||||
paths:
|
paths:
|
||||||
- '/locale/'
|
- '/locale/'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '/locale/'
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "0 */2 * * *"
|
- cron: "0 */2 * * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
@ -22,7 +25,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
- id: cache-pipenv
|
- id: cache-pipenv
|
||||||
uses: actions/cache@v2.1.6
|
uses: actions/cache@v2.1.7
|
||||||
with:
|
with:
|
||||||
path: ~/.local/share/virtualenvs
|
path: ~/.local/share/virtualenvs
|
||||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||||
@ -37,10 +40,19 @@ jobs:
|
|||||||
run: pipenv run ./manage.py compilemessages
|
run: pipenv run ./manage.py compilemessages
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v3
|
uses: peter-evans/create-pull-request@v3
|
||||||
|
id: cpr
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
branch: compile-backend-translation
|
branch: compile-backend-translation
|
||||||
commit-message: "core: compile backend translations"
|
commit-message: "core: compile backend translations"
|
||||||
title: "core: compile backend translations"
|
title: "core: compile backend translations"
|
||||||
|
body: "core: compile backend translations"
|
||||||
delete-branch: true
|
delete-branch: true
|
||||||
signoff: true
|
signoff: true
|
||||||
|
- name: Enable Pull Request Automerge
|
||||||
|
if: steps.cpr.outputs.pull-request-operation == 'created'
|
||||||
|
uses: peter-evans/enable-pull-request-automerge@v1
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||||
|
merge-method: squash
|
||||||
|
9
.github/workflows/web-api-publish.yml
vendored
9
.github/workflows/web-api-publish.yml
vendored
@ -30,10 +30,19 @@ jobs:
|
|||||||
npm i @goauthentik/api@$VERSION
|
npm i @goauthentik/api@$VERSION
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v3
|
uses: peter-evans/create-pull-request@v3
|
||||||
|
id: cpr
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
branch: update-web-api-client
|
branch: update-web-api-client
|
||||||
commit-message: "web: Update Web API Client version"
|
commit-message: "web: Update Web API Client version"
|
||||||
title: "web: Update Web API Client version"
|
title: "web: Update Web API Client version"
|
||||||
|
body: "web: Update Web API Client version"
|
||||||
delete-branch: true
|
delete-branch: true
|
||||||
signoff: true
|
signoff: true
|
||||||
|
- name: Enable Pull Request Automerge
|
||||||
|
if: steps.cpr.outputs.pull-request-operation == 'created'
|
||||||
|
uses: peter-evans/enable-pull-request-automerge@v1
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||||
|
merge-method: squash
|
||||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -66,7 +66,9 @@ coverage.xml
|
|||||||
unittest.xml
|
unittest.xml
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
# Have to include binary mo files as they are annoying to compile at build time
|
||||||
|
# since a full postgres and redis instance are required
|
||||||
|
# *.mo
|
||||||
|
|
||||||
# Django stuff:
|
# Django stuff:
|
||||||
|
|
||||||
|
1
.python-version
Normal file
1
.python-version
Normal file
@ -0,0 +1 @@
|
|||||||
|
3.9.7
|
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@ -10,7 +10,8 @@
|
|||||||
"plex",
|
"plex",
|
||||||
"saml",
|
"saml",
|
||||||
"totp",
|
"totp",
|
||||||
"webauthn"
|
"webauthn",
|
||||||
|
"traefik"
|
||||||
],
|
],
|
||||||
"python.linting.pylintEnabled": true,
|
"python.linting.pylintEnabled": true,
|
||||||
"todo-tree.tree.showCountsInTree": true,
|
"todo-tree.tree.showCountsInTree": true,
|
||||||
|
43
Dockerfile
43
Dockerfile
@ -1,5 +1,5 @@
|
|||||||
# Stage 1: Lock python dependencies
|
# Stage 1: Lock python dependencies
|
||||||
FROM docker.io/python:3.9-slim-buster as locker
|
FROM docker.io/python:3.10.1-slim-bullseye as locker
|
||||||
|
|
||||||
COPY ./Pipfile /app/
|
COPY ./Pipfile /app/
|
||||||
COPY ./Pipfile.lock /app/
|
COPY ./Pipfile.lock /app/
|
||||||
@ -11,35 +11,32 @@ RUN pip install pipenv && \
|
|||||||
pipenv lock -r --dev-only > requirements-dev.txt
|
pipenv lock -r --dev-only > requirements-dev.txt
|
||||||
|
|
||||||
# Stage 2: Build website
|
# Stage 2: Build website
|
||||||
FROM docker.io/node as website-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:16 as website-builder
|
||||||
|
|
||||||
COPY ./website /static/
|
COPY ./website /work/website/
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
RUN cd /static && npm i && npm run build-docs-only
|
RUN cd /work/website && npm i && npm run build-docs-only
|
||||||
|
|
||||||
# Stage 3: Build webui
|
# Stage 3: Build webui
|
||||||
FROM docker.io/node as web-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:16 as web-builder
|
||||||
|
|
||||||
COPY ./web /static/
|
COPY ./web /work/web/
|
||||||
|
COPY ./website /work/website/
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
RUN cd /static && npm i && npm run build
|
RUN cd /work/web && npm i && npm run build
|
||||||
|
|
||||||
# Stage 4: Build go proxy
|
# Stage 4: Build go proxy
|
||||||
FROM docker.io/golang:1.17.2 AS builder
|
FROM docker.io/golang:1.17.5-bullseye AS builder
|
||||||
|
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
|
|
||||||
COPY --from=web-builder /static/robots.txt /work/web/robots.txt
|
COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
|
||||||
COPY --from=web-builder /static/security.txt /work/web/security.txt
|
COPY --from=web-builder /work/web/security.txt /work/web/security.txt
|
||||||
COPY --from=web-builder /static/dist/ /work/web/dist/
|
|
||||||
COPY --from=web-builder /static/authentik/ /work/web/authentik/
|
|
||||||
COPY --from=website-builder /static/help/ /work/website/help/
|
|
||||||
|
|
||||||
COPY ./cmd /work/cmd
|
COPY ./cmd /work/cmd
|
||||||
COPY ./web/static.go /work/web/static.go
|
COPY ./web/static.go /work/web/static.go
|
||||||
COPY ./website/static.go /work/website/static.go
|
|
||||||
COPY ./internal /work/internal
|
COPY ./internal /work/internal
|
||||||
COPY ./go.mod /work/go.mod
|
COPY ./go.mod /work/go.mod
|
||||||
COPY ./go.sum /work/go.sum
|
COPY ./go.sum /work/go.sum
|
||||||
@ -47,7 +44,7 @@ COPY ./go.sum /work/go.sum
|
|||||||
RUN go build -o /work/authentik ./cmd/server/main.go
|
RUN go build -o /work/authentik ./cmd/server/main.go
|
||||||
|
|
||||||
# Stage 5: Run
|
# Stage 5: Run
|
||||||
FROM docker.io/python:3.9-slim-buster
|
FROM docker.io/python:3.10.1-slim-bullseye
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
COPY --from=locker /app/requirements.txt /
|
COPY --from=locker /app/requirements.txt /
|
||||||
@ -57,19 +54,18 @@ ARG GIT_BUILD_HASH
|
|||||||
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y --no-install-recommends curl ca-certificates gnupg git runit && \
|
apt-get install -y --no-install-recommends \
|
||||||
curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
|
curl ca-certificates gnupg git runit libpq-dev \
|
||||||
echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
postgresql-client build-essential libxmlsec1-dev \
|
||||||
apt-get update && \
|
pkg-config libmaxminddb0 && \
|
||||||
apt-get install -y --no-install-recommends libpq-dev postgresql-client build-essential libxmlsec1-dev pkg-config libmaxminddb0 && \
|
|
||||||
pip install -r /requirements.txt --no-cache-dir && \
|
pip install -r /requirements.txt --no-cache-dir && \
|
||||||
apt-get remove --purge -y build-essential git && \
|
apt-get remove --purge -y build-essential git && \
|
||||||
apt-get autoremove --purge -y && \
|
apt-get autoremove --purge -y && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||||
mkdir /backups && \
|
mkdir -p /backups /certs /media && \
|
||||||
chown authentik:authentik /backups
|
chown authentik:authentik /backups /certs /media
|
||||||
|
|
||||||
COPY ./authentik/ /authentik
|
COPY ./authentik/ /authentik
|
||||||
COPY ./pyproject.toml /
|
COPY ./pyproject.toml /
|
||||||
@ -78,6 +74,9 @@ COPY ./tests /tests
|
|||||||
COPY ./manage.py /
|
COPY ./manage.py /
|
||||||
COPY ./lifecycle/ /lifecycle
|
COPY ./lifecycle/ /lifecycle
|
||||||
COPY --from=builder /work/authentik /authentik-proxy
|
COPY --from=builder /work/authentik /authentik-proxy
|
||||||
|
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||||
|
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||||
|
COPY --from=website-builder /work/website/help/ /website/help/
|
||||||
|
|
||||||
USER authentik
|
USER authentik
|
||||||
|
|
||||||
|
69
Makefile
69
Makefile
@ -4,16 +4,19 @@ UID = $(shell id -u)
|
|||||||
GID = $(shell id -g)
|
GID = $(shell id -g)
|
||||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||||
|
|
||||||
all: lint-fix lint test gen
|
all: lint-fix lint test gen web
|
||||||
|
|
||||||
test-integration:
|
test-integration:
|
||||||
coverage run manage.py test -v 3 tests/integration
|
coverage run manage.py test tests/integration
|
||||||
|
|
||||||
test-e2e:
|
test-e2e-provider:
|
||||||
coverage run manage.py test --failfast -v 3 tests/e2e
|
coverage run manage.py test tests/e2e/test_provider*
|
||||||
|
|
||||||
|
test-e2e-rest:
|
||||||
|
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
|
||||||
|
|
||||||
test:
|
test:
|
||||||
coverage run manage.py test -v 3 authentik
|
coverage run manage.py test authentik
|
||||||
coverage html
|
coverage html
|
||||||
coverage report
|
coverage report
|
||||||
|
|
||||||
@ -30,13 +33,13 @@ lint-fix:
|
|||||||
website/developer-docs
|
website/developer-docs
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
pyright authentik tests lifecycle
|
|
||||||
bandit -r authentik tests lifecycle -x node_modules
|
bandit -r authentik tests lifecycle -x node_modules
|
||||||
pylint authentik tests lifecycle
|
pylint authentik tests lifecycle
|
||||||
|
|
||||||
i18n-extract:
|
i18n-extract: i18n-extract-core web-extract
|
||||||
|
|
||||||
|
i18n-extract-core:
|
||||||
./manage.py makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
./manage.py makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
||||||
cd web && npm run extract
|
|
||||||
|
|
||||||
gen-build:
|
gen-build:
|
||||||
./manage.py spectacular --file schema.yml
|
./manage.py spectacular --file schema.yml
|
||||||
@ -61,18 +64,20 @@ gen-web:
|
|||||||
\cp -rfv web-api/* web/node_modules/@goauthentik/api
|
\cp -rfv web-api/* web/node_modules/@goauthentik/api
|
||||||
|
|
||||||
gen-outpost:
|
gen-outpost:
|
||||||
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O config.yaml
|
||||||
|
mkdir -p templates
|
||||||
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O templates/README.mustache
|
||||||
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O templates/go.mod.mustache
|
||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
openapitools/openapi-generator-cli generate \
|
openapitools/openapi-generator-cli:v5.2.1 generate \
|
||||||
--git-host goauthentik.io \
|
|
||||||
--git-repo-id outpost \
|
|
||||||
--git-user-id api \
|
|
||||||
-i /local/schema.yml \
|
-i /local/schema.yml \
|
||||||
-g go \
|
-g go \
|
||||||
-o /local/api \
|
-o /local/api \
|
||||||
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true,disallowAdditionalPropertiesIfNotPresent=false
|
-c /local/config.yaml
|
||||||
rm -f api/go.mod api/go.sum
|
go mod edit -replace goauthentik.io/api=./api
|
||||||
|
rm -rf config.yaml ./templates/
|
||||||
|
|
||||||
gen: gen-build gen-clean gen-web
|
gen: gen-build gen-clean gen-web
|
||||||
|
|
||||||
@ -81,3 +86,39 @@ migrate:
|
|||||||
|
|
||||||
run:
|
run:
|
||||||
go run -v cmd/server/main.go
|
go run -v cmd/server/main.go
|
||||||
|
|
||||||
|
web-watch:
|
||||||
|
cd web && npm run watch
|
||||||
|
|
||||||
|
web: web-lint-fix web-lint web-extract
|
||||||
|
|
||||||
|
web-lint-fix:
|
||||||
|
cd web && npm run prettier
|
||||||
|
|
||||||
|
web-lint:
|
||||||
|
cd web && npm run lint
|
||||||
|
cd web && npm run lit-analyse
|
||||||
|
|
||||||
|
web-extract:
|
||||||
|
cd web && npm run extract
|
||||||
|
|
||||||
|
# These targets are use by GitHub actions to allow usage of matrix
|
||||||
|
# which makes the YAML File a lot smaller
|
||||||
|
|
||||||
|
ci-pylint:
|
||||||
|
pylint authentik tests lifecycle
|
||||||
|
|
||||||
|
ci-black:
|
||||||
|
black --check authentik tests lifecycle
|
||||||
|
|
||||||
|
ci-isort:
|
||||||
|
isort --check authentik tests lifecycle
|
||||||
|
|
||||||
|
ci-bandit:
|
||||||
|
bandit -r authentik tests lifecycle
|
||||||
|
|
||||||
|
ci-pyright:
|
||||||
|
pyright e2e lifecycle
|
||||||
|
|
||||||
|
ci-pending-migrations:
|
||||||
|
./manage.py makemigrations --check
|
||||||
|
22
Pipfile
22
Pipfile
@ -8,7 +8,10 @@ boto3 = "*"
|
|||||||
celery = "*"
|
celery = "*"
|
||||||
channels = "*"
|
channels = "*"
|
||||||
channels-redis = "*"
|
channels-redis = "*"
|
||||||
|
codespell = "*"
|
||||||
|
colorama = "*"
|
||||||
dacite = "*"
|
dacite = "*"
|
||||||
|
deepmerge = "*"
|
||||||
defusedxml = "*"
|
defusedxml = "*"
|
||||||
django = "*"
|
django = "*"
|
||||||
django-dbbackup = { git = 'https://github.com/django-dbbackup/django-dbbackup.git', ref = '9d1909c30a3271c8c9c8450add30d6e0b996e145' }
|
django-dbbackup = { git = 'https://github.com/django-dbbackup/django-dbbackup.git', ref = '9d1909c30a3271c8c9c8450add30d6e0b996e145' }
|
||||||
@ -23,10 +26,11 @@ djangorestframework = "*"
|
|||||||
djangorestframework-guardian = "*"
|
djangorestframework-guardian = "*"
|
||||||
docker = "*"
|
docker = "*"
|
||||||
drf-spectacular = "*"
|
drf-spectacular = "*"
|
||||||
|
duo-client = "*"
|
||||||
facebook-sdk = "*"
|
facebook-sdk = "*"
|
||||||
geoip2 = "*"
|
geoip2 = "*"
|
||||||
gunicorn = "*"
|
gunicorn = "*"
|
||||||
kubernetes = "==v19.15.0b1"
|
kubernetes = "==v19.15.0"
|
||||||
ldap3 = "*"
|
ldap3 = "*"
|
||||||
lxml = "*"
|
lxml = "*"
|
||||||
packaging = "*"
|
packaging = "*"
|
||||||
@ -35,24 +39,22 @@ pycryptodome = "*"
|
|||||||
pyjwt = "*"
|
pyjwt = "*"
|
||||||
pyyaml = "*"
|
pyyaml = "*"
|
||||||
requests-oauthlib = "*"
|
requests-oauthlib = "*"
|
||||||
sentry-sdk = "*"
|
sentry-sdk = { git = 'https://github.com/beryju/sentry-python.git', ref = '379aee28b15d3b87b381317746c4efd24b3d7bc3' }
|
||||||
service_identity = "*"
|
service_identity = "*"
|
||||||
structlog = "*"
|
structlog = "*"
|
||||||
swagger-spec-validator = "*"
|
swagger-spec-validator = "*"
|
||||||
twisted = "==21.7.0"
|
twisted = "==21.7.0"
|
||||||
|
ua-parser = "*"
|
||||||
urllib3 = {extras = ["secure"],version = "*"}
|
urllib3 = {extras = ["secure"],version = "*"}
|
||||||
uvicorn = {extras = ["standard"],version = "*"}
|
uvicorn = {extras = ["standard"],version = "*"}
|
||||||
webauthn = "*"
|
webauthn = "*"
|
||||||
xmlsec = "*"
|
xmlsec = "*"
|
||||||
duo-client = "*"
|
flower = "*"
|
||||||
ua-parser = "*"
|
wsproto = "*"
|
||||||
deepmerge = "*"
|
|
||||||
colorama = "*"
|
|
||||||
codespell = "*"
|
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
bandit = "*"
|
bandit = "*"
|
||||||
black = "==21.9b0"
|
black = "==21.11b1"
|
||||||
bump2version = "*"
|
bump2version = "*"
|
||||||
colorama = "*"
|
colorama = "*"
|
||||||
coverage = {extras = ["toml"],version = "*"}
|
coverage = {extras = ["toml"],version = "*"}
|
||||||
@ -60,5 +62,7 @@ pylint = "*"
|
|||||||
pylint-django = "*"
|
pylint-django = "*"
|
||||||
pytest = "*"
|
pytest = "*"
|
||||||
pytest-django = "*"
|
pytest-django = "*"
|
||||||
selenium = "*"
|
pytest-randomly = "*"
|
||||||
requests-mock = "*"
|
requests-mock = "*"
|
||||||
|
selenium = "*"
|
||||||
|
importlib-metadata = "*"
|
||||||
|
1651
Pipfile.lock
generated
1651
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
20
README.md
20
README.md
@ -38,3 +38,23 @@ See [Development Documentation](https://goauthentik.io/developer-docs/?utm_sourc
|
|||||||
## Security
|
## Security
|
||||||
|
|
||||||
See [SECURITY.md](SECURITY.md)
|
See [SECURITY.md](SECURITY.md)
|
||||||
|
|
||||||
|
## Sponsors
|
||||||
|
|
||||||
|
This project is proudly sponsored by:
|
||||||
|
|
||||||
|
<p>
|
||||||
|
<a href="https://www.digitalocean.com/?utm_medium=opensource&utm_source=goauthentik.io">
|
||||||
|
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="201px">
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
DigitalOcean provides development and testing resources for authentik.
|
||||||
|
|
||||||
|
<p>
|
||||||
|
<a href="https://www.netlify.com">
|
||||||
|
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" />
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
Netlify hosts the [goauthentik.io](goauthentik.io) site.
|
||||||
|
@ -6,8 +6,8 @@
|
|||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| ---------- | ------------------ |
|
| ---------- | ------------------ |
|
||||||
| 2021.8.x | :white_check_mark: |
|
| 2021.10.x | :white_check_mark: |
|
||||||
| 2021.9.x | :white_check_mark: |
|
| 2021.12.x | :white_check_mark: |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
"""authentik"""
|
"""authentik"""
|
||||||
__version__ = "2021.10.1-rc2"
|
__version__ = "2021.12.2"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
@ -1,13 +1,6 @@
|
|||||||
"""authentik administration metrics"""
|
"""authentik administration metrics"""
|
||||||
import time
|
|
||||||
from collections import Counter
|
|
||||||
from datetime import timedelta
|
|
||||||
|
|
||||||
from django.db.models import Count, ExpressionWrapper, F
|
|
||||||
from django.db.models.fields import DurationField
|
|
||||||
from django.db.models.functions import ExtractHour
|
|
||||||
from django.utils.timezone import now
|
|
||||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||||
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||||
from rest_framework.permissions import IsAdminUser
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
@ -15,31 +8,7 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import EventAction
|
||||||
|
|
||||||
|
|
||||||
def get_events_per_1h(**filter_kwargs) -> list[dict[str, int]]:
|
|
||||||
"""Get event count by hour in the last day, fill with zeros"""
|
|
||||||
date_from = now() - timedelta(days=1)
|
|
||||||
result = (
|
|
||||||
Event.objects.filter(created__gte=date_from, **filter_kwargs)
|
|
||||||
.annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField()))
|
|
||||||
.annotate(age_hours=ExtractHour("age"))
|
|
||||||
.values("age_hours")
|
|
||||||
.annotate(count=Count("pk"))
|
|
||||||
.order_by("age_hours")
|
|
||||||
)
|
|
||||||
data = Counter({int(d["age_hours"]): d["count"] for d in result})
|
|
||||||
results = []
|
|
||||||
_now = now()
|
|
||||||
for hour in range(0, -24, -1):
|
|
||||||
results.append(
|
|
||||||
{
|
|
||||||
"x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple()) * 1000,
|
|
||||||
"y_cord": data[hour * -1],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
class CoordinateSerializer(PassiveSerializer):
|
class CoordinateSerializer(PassiveSerializer):
|
||||||
@ -58,12 +27,22 @@ class LoginMetricsSerializer(PassiveSerializer):
|
|||||||
@extend_schema_field(CoordinateSerializer(many=True))
|
@extend_schema_field(CoordinateSerializer(many=True))
|
||||||
def get_logins_per_1h(self, _):
|
def get_logins_per_1h(self, _):
|
||||||
"""Get successful logins per hour for the last 24 hours"""
|
"""Get successful logins per hour for the last 24 hours"""
|
||||||
return get_events_per_1h(action=EventAction.LOGIN)
|
user = self.context["user"]
|
||||||
|
return (
|
||||||
|
get_objects_for_user(user, "authentik_events.view_event")
|
||||||
|
.filter(action=EventAction.LOGIN)
|
||||||
|
.get_events_per_hour()
|
||||||
|
)
|
||||||
|
|
||||||
@extend_schema_field(CoordinateSerializer(many=True))
|
@extend_schema_field(CoordinateSerializer(many=True))
|
||||||
def get_logins_failed_per_1h(self, _):
|
def get_logins_failed_per_1h(self, _):
|
||||||
"""Get failed logins per hour for the last 24 hours"""
|
"""Get failed logins per hour for the last 24 hours"""
|
||||||
return get_events_per_1h(action=EventAction.LOGIN_FAILED)
|
user = self.context["user"]
|
||||||
|
return (
|
||||||
|
get_objects_for_user(user, "authentik_events.view_event")
|
||||||
|
.filter(action=EventAction.LOGIN_FAILED)
|
||||||
|
.get_events_per_hour()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class AdministrationMetricsViewSet(APIView):
|
class AdministrationMetricsViewSet(APIView):
|
||||||
@ -75,4 +54,5 @@ class AdministrationMetricsViewSet(APIView):
|
|||||||
def get(self, request: Request) -> Response:
|
def get(self, request: Request) -> Response:
|
||||||
"""Login Metrics per 1h"""
|
"""Login Metrics per 1h"""
|
||||||
serializer = LoginMetricsSerializer(True)
|
serializer = LoginMetricsSerializer(True)
|
||||||
|
serializer.context["user"] = request.user
|
||||||
return Response(serializer.data)
|
return Response(serializer.data)
|
||||||
|
@ -86,7 +86,7 @@ class SystemSerializer(PassiveSerializer):
|
|||||||
def get_embedded_outpost_host(self, request: Request) -> str:
|
def get_embedded_outpost_host(self, request: Request) -> str:
|
||||||
"""Get the FQDN configured on the embedded outpost"""
|
"""Get the FQDN configured on the embedded outpost"""
|
||||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||||
if not outposts.exists():
|
if not outposts.exists(): # pragma: no cover
|
||||||
return ""
|
return ""
|
||||||
return outposts.first().config.authentik_host
|
return outposts.first().config.authentik_host
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@ class TaskSerializer(PassiveSerializer):
|
|||||||
are pickled in cache. In that case, just delete the info"""
|
are pickled in cache. In that case, just delete the info"""
|
||||||
try:
|
try:
|
||||||
return super().to_representation(instance)
|
return super().to_representation(instance)
|
||||||
except AttributeError:
|
except AttributeError: # pragma: no cover
|
||||||
if isinstance(self.instance, list):
|
if isinstance(self.instance, list):
|
||||||
for inst in self.instance:
|
for inst in self.instance:
|
||||||
inst.delete()
|
inst.delete()
|
||||||
|
@ -23,6 +23,6 @@ class WorkerView(APIView):
|
|||||||
"""Get currently connected worker count."""
|
"""Get currently connected worker count."""
|
||||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||||
# In debug we run with `CELERY_TASK_ALWAYS_EAGER`, so tasks are ran on the main process
|
# In debug we run with `CELERY_TASK_ALWAYS_EAGER`, so tasks are ran on the main process
|
||||||
if settings.DEBUG:
|
if settings.DEBUG: # pragma: no cover
|
||||||
count += 1
|
count += 1
|
||||||
return Response({"count": count})
|
return Response({"count": count})
|
||||||
|
@ -27,6 +27,7 @@ VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
|
|||||||
# Chop of the first ^ because we want to search the entire string
|
# Chop of the first ^ because we want to search the entire string
|
||||||
URL_FINDER = URLValidator.regex.pattern[1:]
|
URL_FINDER = URLValidator.regex.pattern[1:]
|
||||||
PROM_INFO = Info("authentik_version", "Currently running authentik version")
|
PROM_INFO = Info("authentik_version", "Currently running authentik version")
|
||||||
|
LOCAL_VERSION = parse(__version__)
|
||||||
|
|
||||||
|
|
||||||
def _set_prom_info():
|
def _set_prom_info():
|
||||||
@ -48,12 +49,12 @@ def clear_update_notifications():
|
|||||||
if "new_version" not in notification.event.context:
|
if "new_version" not in notification.event.context:
|
||||||
continue
|
continue
|
||||||
notification_version = notification.event.context["new_version"]
|
notification_version = notification.event.context["new_version"]
|
||||||
if notification_version == __version__:
|
if LOCAL_VERSION >= parse(notification_version):
|
||||||
notification.delete()
|
notification.delete()
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
@prefill_task()
|
@prefill_task
|
||||||
def update_latest_version(self: MonitoredTask):
|
def update_latest_version(self: MonitoredTask):
|
||||||
"""Update latest version info"""
|
"""Update latest version info"""
|
||||||
if CONFIG.y_bool("disable_update_check"):
|
if CONFIG.y_bool("disable_update_check"):
|
||||||
@ -74,8 +75,7 @@ def update_latest_version(self: MonitoredTask):
|
|||||||
_set_prom_info()
|
_set_prom_info()
|
||||||
# Check if upstream version is newer than what we're running,
|
# Check if upstream version is newer than what we're running,
|
||||||
# and if no event exists yet, create one.
|
# and if no event exists yet, create one.
|
||||||
local_version = parse(__version__)
|
if LOCAL_VERSION < parse(upstream_version):
|
||||||
if local_version < parse(upstream_version):
|
|
||||||
# Event has already been created, don't create duplicate
|
# Event has already been created, don't create duplicate
|
||||||
if Event.objects.filter(
|
if Event.objects.filter(
|
||||||
action=EventAction.UPDATE_AVAILABLE,
|
action=EventAction.UPDATE_AVAILABLE,
|
||||||
|
@ -8,6 +8,7 @@ from authentik import __version__
|
|||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
from authentik.core.tasks import clean_expired_models
|
from authentik.core.tasks import clean_expired_models
|
||||||
from authentik.events.monitored_tasks import TaskResultStatus
|
from authentik.events.monitored_tasks import TaskResultStatus
|
||||||
|
from authentik.managed.tasks import managed_reconcile
|
||||||
|
|
||||||
|
|
||||||
class TestAdminAPI(TestCase):
|
class TestAdminAPI(TestCase):
|
||||||
@ -94,5 +95,7 @@ class TestAdminAPI(TestCase):
|
|||||||
|
|
||||||
def test_system(self):
|
def test_system(self):
|
||||||
"""Test system API"""
|
"""Test system API"""
|
||||||
|
# pyright: reportGeneralTypeIssues=false
|
||||||
|
managed_reconcile() # pylint: disable=no-value-for-parameter
|
||||||
response = self.client.get(reverse("authentik_api:admin_system"))
|
response = self.client.get(reverse("authentik_api:admin_system"))
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
@ -3,8 +3,13 @@ from django.core.cache import cache
|
|||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from requests_mock import Mocker
|
from requests_mock import Mocker
|
||||||
|
|
||||||
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
|
from authentik.admin.tasks import (
|
||||||
|
VERSION_CACHE_KEY,
|
||||||
|
clear_update_notifications,
|
||||||
|
update_latest_version,
|
||||||
|
)
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
RESPONSE_VALID = {
|
RESPONSE_VALID = {
|
||||||
"$schema": "https://version.goauthentik.io/schema.json",
|
"$schema": "https://version.goauthentik.io/schema.json",
|
||||||
@ -56,3 +61,23 @@ class TestAdminTasks(TestCase):
|
|||||||
action=EventAction.UPDATE_AVAILABLE, context__new_version="0.0.0"
|
action=EventAction.UPDATE_AVAILABLE, context__new_version="0.0.0"
|
||||||
).exists()
|
).exists()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_version_disabled(self):
|
||||||
|
"""Test Update checker while its disabled"""
|
||||||
|
with CONFIG.patch("disable_update_check", True):
|
||||||
|
update_latest_version.delay().get()
|
||||||
|
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
|
||||||
|
|
||||||
|
def test_clear_update_notifications(self):
|
||||||
|
"""Test clear of previous notification"""
|
||||||
|
Event.objects.create(
|
||||||
|
action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"}
|
||||||
|
)
|
||||||
|
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"})
|
||||||
|
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={})
|
||||||
|
clear_update_notifications()
|
||||||
|
self.assertFalse(
|
||||||
|
Event.objects.filter(
|
||||||
|
action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1"
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
@ -45,7 +45,8 @@ def bearer_auth(raw_header: bytes) -> Optional[User]:
|
|||||||
if not user:
|
if not user:
|
||||||
raise AuthenticationFailed("Token invalid/expired")
|
raise AuthenticationFailed("Token invalid/expired")
|
||||||
return user
|
return user
|
||||||
LOCAL.authentik[KEY_AUTH_VIA] = "api_token"
|
if hasattr(LOCAL, "authentik"):
|
||||||
|
LOCAL.authentik[KEY_AUTH_VIA] = "api_token"
|
||||||
return tokens.first().user
|
return tokens.first().user
|
||||||
|
|
||||||
|
|
||||||
@ -59,7 +60,8 @@ def token_secret_key(value: str) -> Optional[User]:
|
|||||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||||
if not outposts:
|
if not outposts:
|
||||||
return None
|
return None
|
||||||
LOCAL.authentik[KEY_AUTH_VIA] = "secret_key"
|
if hasattr(LOCAL, "authentik"):
|
||||||
|
LOCAL.authentik[KEY_AUTH_VIA] = "secret_key"
|
||||||
outpost = outposts.first()
|
outpost = outposts.first()
|
||||||
return outpost.user
|
return outpost.user
|
||||||
|
|
||||||
|
@ -1,19 +0,0 @@
|
|||||||
"""API tasks"""
|
|
||||||
|
|
||||||
from authentik.lib.utils.http import get_http_session
|
|
||||||
from authentik.root.celery import CELERY_APP
|
|
||||||
|
|
||||||
SENTRY_SESSION = get_http_session()
|
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
|
||||||
def sentry_proxy(payload: str):
|
|
||||||
"""Relay data to sentry"""
|
|
||||||
SENTRY_SESSION.post(
|
|
||||||
"https://sentry.beryju.org/api/8/envelope/",
|
|
||||||
data=payload,
|
|
||||||
headers={
|
|
||||||
"Content-Type": "application/octet-stream",
|
|
||||||
},
|
|
||||||
timeout=10,
|
|
||||||
)
|
|
@ -1,18 +0,0 @@
|
|||||||
"""Throttling classes"""
|
|
||||||
from typing import Type
|
|
||||||
|
|
||||||
from django.views import View
|
|
||||||
from rest_framework.request import Request
|
|
||||||
from rest_framework.throttling import ScopedRateThrottle
|
|
||||||
|
|
||||||
|
|
||||||
class SessionThrottle(ScopedRateThrottle):
|
|
||||||
"""Throttle based on session key"""
|
|
||||||
|
|
||||||
def allow_request(self, request: Request, view):
|
|
||||||
if request._request.user.is_superuser:
|
|
||||||
return True
|
|
||||||
return super().allow_request(request, view)
|
|
||||||
|
|
||||||
def get_cache_key(self, request: Request, view: Type[View]) -> str:
|
|
||||||
return f"authentik-throttle-session-{request._request.session.session_key}"
|
|
@ -4,7 +4,7 @@ from django.urls import include, path
|
|||||||
from authentik.api.v3.urls import urlpatterns as v3_urls
|
from authentik.api.v3.urls import urlpatterns as v3_urls
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
# Remove in 2022.1
|
# TODO: Remove in 2022.1
|
||||||
path("v2beta/", include(v3_urls)),
|
path("v2beta/", include(v3_urls)),
|
||||||
path("v3/", include(v3_urls)),
|
path("v3/", include(v3_urls)),
|
||||||
]
|
]
|
||||||
|
@ -5,7 +5,14 @@ from django.conf import settings
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, IntegerField, ListField
|
from rest_framework.fields import (
|
||||||
|
BooleanField,
|
||||||
|
CharField,
|
||||||
|
ChoiceField,
|
||||||
|
FloatField,
|
||||||
|
IntegerField,
|
||||||
|
ListField,
|
||||||
|
)
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@ -24,13 +31,19 @@ class Capabilities(models.TextChoices):
|
|||||||
CAN_BACKUP = "can_backup"
|
CAN_BACKUP = "can_backup"
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorReportingConfigSerializer(PassiveSerializer):
|
||||||
|
"""Config for error reporting"""
|
||||||
|
|
||||||
|
enabled = BooleanField(read_only=True)
|
||||||
|
environment = CharField(read_only=True)
|
||||||
|
send_pii = BooleanField(read_only=True)
|
||||||
|
traces_sample_rate = FloatField(read_only=True)
|
||||||
|
|
||||||
|
|
||||||
class ConfigSerializer(PassiveSerializer):
|
class ConfigSerializer(PassiveSerializer):
|
||||||
"""Serialize authentik Config into DRF Object"""
|
"""Serialize authentik Config into DRF Object"""
|
||||||
|
|
||||||
error_reporting_enabled = BooleanField(read_only=True)
|
error_reporting = ErrorReportingConfigSerializer(required=True)
|
||||||
error_reporting_environment = CharField(read_only=True)
|
|
||||||
error_reporting_send_pii = BooleanField(read_only=True)
|
|
||||||
|
|
||||||
capabilities = ListField(child=ChoiceField(choices=Capabilities.choices))
|
capabilities = ListField(child=ChoiceField(choices=Capabilities.choices))
|
||||||
|
|
||||||
cache_timeout = IntegerField(required=True)
|
cache_timeout = IntegerField(required=True)
|
||||||
@ -66,9 +79,12 @@ class ConfigView(APIView):
|
|||||||
"""Retrieve public configuration options"""
|
"""Retrieve public configuration options"""
|
||||||
config = ConfigSerializer(
|
config = ConfigSerializer(
|
||||||
{
|
{
|
||||||
"error_reporting_enabled": CONFIG.y("error_reporting.enabled"),
|
"error_reporting": {
|
||||||
"error_reporting_environment": CONFIG.y("error_reporting.environment"),
|
"enabled": CONFIG.y("error_reporting.enabled"),
|
||||||
"error_reporting_send_pii": CONFIG.y("error_reporting.send_pii"),
|
"environment": CONFIG.y("error_reporting.environment"),
|
||||||
|
"send_pii": CONFIG.y("error_reporting.send_pii"),
|
||||||
|
"traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)),
|
||||||
|
},
|
||||||
"capabilities": self.get_capabilities(),
|
"capabilities": self.get_capabilities(),
|
||||||
"cache_timeout": int(CONFIG.y("redis.cache_timeout")),
|
"cache_timeout": int(CONFIG.y("redis.cache_timeout")),
|
||||||
"cache_timeout_flows": int(CONFIG.y("redis.cache_timeout_flows")),
|
"cache_timeout_flows": int(CONFIG.y("redis.cache_timeout_flows")),
|
||||||
|
@ -1,65 +0,0 @@
|
|||||||
"""Sentry tunnel"""
|
|
||||||
from json import loads
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.http.request import HttpRequest
|
|
||||||
from django.http.response import HttpResponse
|
|
||||||
from rest_framework.authentication import SessionAuthentication
|
|
||||||
from rest_framework.parsers import BaseParser
|
|
||||||
from rest_framework.permissions import AllowAny
|
|
||||||
from rest_framework.request import Request
|
|
||||||
from rest_framework.throttling import AnonRateThrottle
|
|
||||||
from rest_framework.views import APIView
|
|
||||||
from structlog.stdlib import get_logger
|
|
||||||
|
|
||||||
from authentik.api.tasks import sentry_proxy
|
|
||||||
from authentik.lib.config import CONFIG
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
|
||||||
|
|
||||||
|
|
||||||
class PlainTextParser(BaseParser):
|
|
||||||
"""Plain text parser."""
|
|
||||||
|
|
||||||
media_type = "text/plain"
|
|
||||||
|
|
||||||
def parse(self, stream, media_type=None, parser_context=None) -> str:
|
|
||||||
"""Simply return a string representing the body of the request."""
|
|
||||||
return stream.read()
|
|
||||||
|
|
||||||
|
|
||||||
class CsrfExemptSessionAuthentication(SessionAuthentication):
|
|
||||||
"""CSRF-exempt Session authentication"""
|
|
||||||
|
|
||||||
def enforce_csrf(self, request: Request):
|
|
||||||
return # To not perform the csrf check previously happening
|
|
||||||
|
|
||||||
|
|
||||||
class SentryTunnelView(APIView):
|
|
||||||
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
|
|
||||||
|
|
||||||
serializer_class = None
|
|
||||||
parser_classes = [PlainTextParser]
|
|
||||||
throttle_classes = [AnonRateThrottle]
|
|
||||||
permission_classes = [AllowAny]
|
|
||||||
authentication_classes = [CsrfExemptSessionAuthentication]
|
|
||||||
|
|
||||||
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
|
||||||
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
|
|
||||||
# Only allow usage of this endpoint when error reporting is enabled
|
|
||||||
if not CONFIG.y_bool("error_reporting.enabled", False):
|
|
||||||
LOGGER.debug("error reporting disabled")
|
|
||||||
return HttpResponse(status=400)
|
|
||||||
# Body is 2 json objects separated by \n
|
|
||||||
full_body = request.body
|
|
||||||
lines = full_body.splitlines()
|
|
||||||
if len(lines) < 1:
|
|
||||||
return HttpResponse(status=400)
|
|
||||||
header = loads(lines[0])
|
|
||||||
# Check that the DSN is what we expect
|
|
||||||
dsn = header.get("dsn", "")
|
|
||||||
if dsn != settings.SENTRY_DSN:
|
|
||||||
LOGGER.debug("Invalid dsn", have=dsn, expected=settings.SENTRY_DSN)
|
|
||||||
return HttpResponse(status=400)
|
|
||||||
sentry_proxy.delay(full_body.decode())
|
|
||||||
return HttpResponse(status=204)
|
|
@ -11,14 +11,14 @@ from authentik.admin.api.tasks import TaskViewSet
|
|||||||
from authentik.admin.api.version import VersionView
|
from authentik.admin.api.version import VersionView
|
||||||
from authentik.admin.api.workers import WorkerView
|
from authentik.admin.api.workers import WorkerView
|
||||||
from authentik.api.v3.config import ConfigView
|
from authentik.api.v3.config import ConfigView
|
||||||
from authentik.api.v3.sentry import SentryTunnelView
|
|
||||||
from authentik.api.views import APIBrowserView
|
from authentik.api.views import APIBrowserView
|
||||||
from authentik.core.api.applications import ApplicationViewSet
|
from authentik.core.api.applications import ApplicationViewSet
|
||||||
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
|
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
|
||||||
|
from authentik.core.api.devices import DeviceViewSet
|
||||||
from authentik.core.api.groups import GroupViewSet
|
from authentik.core.api.groups import GroupViewSet
|
||||||
from authentik.core.api.propertymappings import PropertyMappingViewSet
|
from authentik.core.api.propertymappings import PropertyMappingViewSet
|
||||||
from authentik.core.api.providers import ProviderViewSet
|
from authentik.core.api.providers import ProviderViewSet
|
||||||
from authentik.core.api.sources import SourceViewSet
|
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||||
from authentik.core.api.tokens import TokenViewSet
|
from authentik.core.api.tokens import TokenViewSet
|
||||||
from authentik.core.api.users import UserViewSet
|
from authentik.core.api.users import UserViewSet
|
||||||
from authentik.crypto.api import CertificateKeyPairViewSet
|
from authentik.crypto.api import CertificateKeyPairViewSet
|
||||||
@ -136,6 +136,7 @@ router.register("events/transports", NotificationTransportViewSet)
|
|||||||
router.register("events/rules", NotificationRuleViewSet)
|
router.register("events/rules", NotificationRuleViewSet)
|
||||||
|
|
||||||
router.register("sources/all", SourceViewSet)
|
router.register("sources/all", SourceViewSet)
|
||||||
|
router.register("sources/user_connections/all", UserSourceConnectionViewSet)
|
||||||
router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet)
|
router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet)
|
||||||
router.register("sources/user_connections/plex", PlexSourceConnectionViewSet)
|
router.register("sources/user_connections/plex", PlexSourceConnectionViewSet)
|
||||||
router.register("sources/ldap", LDAPSourceViewSet)
|
router.register("sources/ldap", LDAPSourceViewSet)
|
||||||
@ -169,6 +170,7 @@ router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
|
|||||||
router.register("propertymappings/scope", ScopeMappingViewSet)
|
router.register("propertymappings/scope", ScopeMappingViewSet)
|
||||||
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
|
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
|
||||||
|
|
||||||
|
router.register("authenticators/all", DeviceViewSet, basename="device")
|
||||||
router.register("authenticators/duo", DuoDeviceViewSet)
|
router.register("authenticators/duo", DuoDeviceViewSet)
|
||||||
router.register("authenticators/sms", SMSDeviceViewSet)
|
router.register("authenticators/sms", SMSDeviceViewSet)
|
||||||
router.register("authenticators/static", StaticDeviceViewSet)
|
router.register("authenticators/static", StaticDeviceViewSet)
|
||||||
@ -246,7 +248,6 @@ urlpatterns = (
|
|||||||
FlowInspectorView.as_view(),
|
FlowInspectorView.as_view(),
|
||||||
name="flow-inspector",
|
name="flow-inspector",
|
||||||
),
|
),
|
||||||
path("sentry/", SentryTunnelView.as_view(), name="sentry"),
|
|
||||||
path("schema/", cache_page(86400)(SpectacularAPIView.as_view()), name="schema"),
|
path("schema/", cache_page(86400)(SpectacularAPIView.as_view()), name="schema"),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@ -5,6 +5,7 @@ from django.http.response import HttpResponseBadRequest
|
|||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||||
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import ReadOnlyField
|
from rest_framework.fields import ReadOnlyField
|
||||||
from rest_framework.parsers import MultiPartParser
|
from rest_framework.parsers import MultiPartParser
|
||||||
@ -15,7 +16,7 @@ from rest_framework.viewsets import ModelViewSet
|
|||||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
|
from authentik.admin.api.metrics import CoordinateSerializer
|
||||||
from authentik.api.decorators import permission_required
|
from authentik.api.decorators import permission_required
|
||||||
from authentik.core.api.providers import ProviderSerializer
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
@ -239,8 +240,10 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
|||||||
"""Metrics for application logins"""
|
"""Metrics for application logins"""
|
||||||
app = self.get_object()
|
app = self.get_object()
|
||||||
return Response(
|
return Response(
|
||||||
get_events_per_1h(
|
get_objects_for_user(request.user, "authentik_events.view_event")
|
||||||
|
.filter(
|
||||||
action=EventAction.AUTHORIZE_APPLICATION,
|
action=EventAction.AUTHORIZE_APPLICATION,
|
||||||
context__authorized_application__pk=app.pk.hex,
|
context__authorized_application__pk=app.pk.hex,
|
||||||
)
|
)
|
||||||
|
.get_events_per_hour()
|
||||||
)
|
)
|
||||||
|
36
authentik/core/api/devices.py
Normal file
36
authentik/core/api/devices.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
"""Authenticator Devices API Views"""
|
||||||
|
from django_otp import devices_for_user
|
||||||
|
from django_otp.models import Device
|
||||||
|
from drf_spectacular.utils import extend_schema
|
||||||
|
from rest_framework.fields import CharField, IntegerField, SerializerMethodField
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.viewsets import ViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.utils import MetaNameSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceSerializer(MetaNameSerializer):
|
||||||
|
"""Serializer for Duo authenticator devices"""
|
||||||
|
|
||||||
|
pk = IntegerField()
|
||||||
|
name = CharField()
|
||||||
|
type = SerializerMethodField()
|
||||||
|
|
||||||
|
def get_type(self, instance: Device) -> str:
|
||||||
|
"""Get type of device"""
|
||||||
|
return instance._meta.label
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceViewSet(ViewSet):
|
||||||
|
"""Viewset for authenticator devices"""
|
||||||
|
|
||||||
|
serializer_class = DeviceSerializer
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
@extend_schema(responses={200: DeviceSerializer(many=True)})
|
||||||
|
def list(self, request: Request) -> Response:
|
||||||
|
"""Get all devices for current user"""
|
||||||
|
devices = devices_for_user(request.user)
|
||||||
|
return Response(DeviceSerializer(devices, many=True).data)
|
@ -1,9 +1,11 @@
|
|||||||
"""Groups API Viewset"""
|
"""Groups API Viewset"""
|
||||||
|
from json import loads
|
||||||
|
|
||||||
from django.db.models.query import QuerySet
|
from django.db.models.query import QuerySet
|
||||||
from django_filters.filters import ModelMultipleChoiceFilter
|
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
from rest_framework.fields import CharField, JSONField
|
from rest_framework.fields import CharField, JSONField
|
||||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
|
|
||||||
@ -42,6 +44,7 @@ class GroupSerializer(ModelSerializer):
|
|||||||
users_obj = ListSerializer(
|
users_obj = ListSerializer(
|
||||||
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
||||||
)
|
)
|
||||||
|
parent_name = CharField(source="parent.name", read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
@ -51,6 +54,7 @@ class GroupSerializer(ModelSerializer):
|
|||||||
"name",
|
"name",
|
||||||
"is_superuser",
|
"is_superuser",
|
||||||
"parent",
|
"parent",
|
||||||
|
"parent_name",
|
||||||
"users",
|
"users",
|
||||||
"attributes",
|
"attributes",
|
||||||
"users_obj",
|
"users_obj",
|
||||||
@ -60,6 +64,13 @@ class GroupSerializer(ModelSerializer):
|
|||||||
class GroupFilter(FilterSet):
|
class GroupFilter(FilterSet):
|
||||||
"""Filter for groups"""
|
"""Filter for groups"""
|
||||||
|
|
||||||
|
attributes = CharFilter(
|
||||||
|
field_name="attributes",
|
||||||
|
lookup_expr="",
|
||||||
|
label="Attributes",
|
||||||
|
method="filter_attributes",
|
||||||
|
)
|
||||||
|
|
||||||
members_by_username = ModelMultipleChoiceFilter(
|
members_by_username = ModelMultipleChoiceFilter(
|
||||||
field_name="users__username",
|
field_name="users__username",
|
||||||
to_field_name="username",
|
to_field_name="username",
|
||||||
@ -70,10 +81,28 @@ class GroupFilter(FilterSet):
|
|||||||
queryset=User.objects.all(),
|
queryset=User.objects.all(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def filter_attributes(self, queryset, name, value):
|
||||||
|
"""Filter attributes by query args"""
|
||||||
|
try:
|
||||||
|
value = loads(value)
|
||||||
|
except ValueError:
|
||||||
|
raise ValidationError(detail="filter: failed to parse JSON")
|
||||||
|
if not isinstance(value, dict):
|
||||||
|
raise ValidationError(detail="filter: value must be key:value mapping")
|
||||||
|
qs = {}
|
||||||
|
for key, _value in value.items():
|
||||||
|
qs[f"attributes__{key}"] = _value
|
||||||
|
try:
|
||||||
|
_ = len(queryset.filter(**qs))
|
||||||
|
return queryset.filter(**qs)
|
||||||
|
except ValueError:
|
||||||
|
return queryset
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Group
|
model = Group
|
||||||
fields = ["name", "is_superuser", "members_by_pk", "members_by_username"]
|
fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"]
|
||||||
|
|
||||||
|
|
||||||
class GroupViewSet(UsedByMixin, ModelViewSet):
|
class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
@ -56,6 +56,7 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
|
|||||||
"component",
|
"component",
|
||||||
"verbose_name",
|
"verbose_name",
|
||||||
"verbose_name_plural",
|
"verbose_name_plural",
|
||||||
|
"meta_model_name",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -43,6 +43,7 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
"assigned_application_name",
|
"assigned_application_name",
|
||||||
"verbose_name",
|
"verbose_name",
|
||||||
"verbose_name_plural",
|
"verbose_name_plural",
|
||||||
|
"meta_model_name",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,18 +1,21 @@
|
|||||||
"""Source API Views"""
|
"""Source API Views"""
|
||||||
from typing import Iterable
|
from typing import Iterable
|
||||||
|
|
||||||
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from rest_framework import mixins
|
from rest_framework import mixins
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||||
from rest_framework.viewsets import GenericViewSet
|
from rest_framework.viewsets import GenericViewSet
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.api.authorization import OwnerFilter, OwnerPermissions
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
||||||
from authentik.core.models import Source
|
from authentik.core.models import Source, UserSourceConnection
|
||||||
from authentik.core.types import UserSettingSerializer
|
from authentik.core.types import UserSettingSerializer
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
from authentik.lib.utils.reflection import all_subclasses
|
||||||
from authentik.policies.engine import PolicyEngine
|
from authentik.policies.engine import PolicyEngine
|
||||||
@ -45,6 +48,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
"component",
|
"component",
|
||||||
"verbose_name",
|
"verbose_name",
|
||||||
"verbose_name_plural",
|
"verbose_name_plural",
|
||||||
|
"meta_model_name",
|
||||||
"policy_engine_mode",
|
"policy_engine_mode",
|
||||||
"user_matching_mode",
|
"user_matching_mode",
|
||||||
]
|
]
|
||||||
@ -100,16 +104,52 @@ class SourceViewSet(
|
|||||||
)
|
)
|
||||||
matching_sources: list[UserSettingSerializer] = []
|
matching_sources: list[UserSettingSerializer] = []
|
||||||
for source in _all_sources:
|
for source in _all_sources:
|
||||||
user_settings = source.ui_user_settings
|
user_settings = source.ui_user_settings()
|
||||||
if not user_settings:
|
if not user_settings:
|
||||||
continue
|
continue
|
||||||
policy_engine = PolicyEngine(source, request.user, request)
|
policy_engine = PolicyEngine(source, request.user, request)
|
||||||
policy_engine.build()
|
policy_engine.build()
|
||||||
if not policy_engine.passing:
|
if not policy_engine.passing:
|
||||||
continue
|
continue
|
||||||
source_settings = source.ui_user_settings
|
source_settings = source.ui_user_settings()
|
||||||
source_settings.initial_data["object_uid"] = source.slug
|
source_settings.initial_data["object_uid"] = source.slug
|
||||||
if not source_settings.is_valid():
|
if not source_settings.is_valid():
|
||||||
LOGGER.warning(source_settings.errors)
|
LOGGER.warning(source_settings.errors)
|
||||||
matching_sources.append(source_settings.validated_data)
|
matching_sources.append(source_settings.validated_data)
|
||||||
return Response(matching_sources)
|
return Response(matching_sources)
|
||||||
|
|
||||||
|
|
||||||
|
class UserSourceConnectionSerializer(SourceSerializer):
|
||||||
|
"""OAuth Source Serializer"""
|
||||||
|
|
||||||
|
source = SourceSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = UserSourceConnection
|
||||||
|
fields = [
|
||||||
|
"pk",
|
||||||
|
"user",
|
||||||
|
"source",
|
||||||
|
"created",
|
||||||
|
]
|
||||||
|
extra_kwargs = {
|
||||||
|
"user": {"read_only": True},
|
||||||
|
"created": {"read_only": True},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class UserSourceConnectionViewSet(
|
||||||
|
mixins.RetrieveModelMixin,
|
||||||
|
mixins.UpdateModelMixin,
|
||||||
|
mixins.DestroyModelMixin,
|
||||||
|
UsedByMixin,
|
||||||
|
mixins.ListModelMixin,
|
||||||
|
GenericViewSet,
|
||||||
|
):
|
||||||
|
"""User-source connection Viewset"""
|
||||||
|
|
||||||
|
queryset = UserSourceConnection.objects.all()
|
||||||
|
serializer_class = UserSourceConnectionSerializer
|
||||||
|
permission_classes = [OwnerPermissions]
|
||||||
|
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||||
|
ordering = ["pk"]
|
||||||
|
@ -38,13 +38,14 @@ from rest_framework.viewsets import ModelViewSet
|
|||||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
|
from authentik.admin.api.metrics import CoordinateSerializer
|
||||||
from authentik.api.decorators import permission_required
|
from authentik.api.decorators import permission_required
|
||||||
from authentik.core.api.groups import GroupSerializer
|
from authentik.core.api.groups import GroupSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
|
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
|
||||||
from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER
|
from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER
|
||||||
from authentik.core.models import (
|
from authentik.core.models import (
|
||||||
|
USER_ATTRIBUTE_CHANGE_EMAIL,
|
||||||
USER_ATTRIBUTE_CHANGE_USERNAME,
|
USER_ATTRIBUTE_CHANGE_USERNAME,
|
||||||
USER_ATTRIBUTE_SA,
|
USER_ATTRIBUTE_SA,
|
||||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||||
@ -54,6 +55,7 @@ from authentik.core.models import (
|
|||||||
User,
|
User,
|
||||||
)
|
)
|
||||||
from authentik.events.models import EventAction
|
from authentik.events.models import EventAction
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.stages.email.models import EmailStage
|
from authentik.stages.email.models import EmailStage
|
||||||
from authentik.stages.email.tasks import send_mails
|
from authentik.stages.email.tasks import send_mails
|
||||||
from authentik.stages.email.utils import TemplateEmailMessage
|
from authentik.stages.email.utils import TemplateEmailMessage
|
||||||
@ -122,9 +124,21 @@ class UserSelfSerializer(ModelSerializer):
|
|||||||
"pk": group.pk,
|
"pk": group.pk,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def validate_email(self, email: str):
|
||||||
|
"""Check if the user is allowed to change their email"""
|
||||||
|
if self.instance.group_attributes().get(
|
||||||
|
USER_ATTRIBUTE_CHANGE_EMAIL, CONFIG.y_bool("default_user_change_email", True)
|
||||||
|
):
|
||||||
|
return email
|
||||||
|
if email != self.instance.email:
|
||||||
|
raise ValidationError("Not allowed to change email.")
|
||||||
|
return email
|
||||||
|
|
||||||
def validate_username(self, username: str):
|
def validate_username(self, username: str):
|
||||||
"""Check if the user is allowed to change their username"""
|
"""Check if the user is allowed to change their username"""
|
||||||
if self.instance.group_attributes().get(USER_ATTRIBUTE_CHANGE_USERNAME, True):
|
if self.instance.group_attributes().get(
|
||||||
|
USER_ATTRIBUTE_CHANGE_USERNAME, CONFIG.y_bool("default_user_change_username", True)
|
||||||
|
):
|
||||||
return username
|
return username
|
||||||
if username != self.instance.username:
|
if username != self.instance.username:
|
||||||
raise ValidationError("Not allowed to change username.")
|
raise ValidationError("Not allowed to change username.")
|
||||||
@ -170,19 +184,31 @@ class UserMetricsSerializer(PassiveSerializer):
|
|||||||
def get_logins_per_1h(self, _):
|
def get_logins_per_1h(self, _):
|
||||||
"""Get successful logins per hour for the last 24 hours"""
|
"""Get successful logins per hour for the last 24 hours"""
|
||||||
user = self.context["user"]
|
user = self.context["user"]
|
||||||
return get_events_per_1h(action=EventAction.LOGIN, user__pk=user.pk)
|
return (
|
||||||
|
get_objects_for_user(user, "authentik_events.view_event")
|
||||||
|
.filter(action=EventAction.LOGIN, user__pk=user.pk)
|
||||||
|
.get_events_per_hour()
|
||||||
|
)
|
||||||
|
|
||||||
@extend_schema_field(CoordinateSerializer(many=True))
|
@extend_schema_field(CoordinateSerializer(many=True))
|
||||||
def get_logins_failed_per_1h(self, _):
|
def get_logins_failed_per_1h(self, _):
|
||||||
"""Get failed logins per hour for the last 24 hours"""
|
"""Get failed logins per hour for the last 24 hours"""
|
||||||
user = self.context["user"]
|
user = self.context["user"]
|
||||||
return get_events_per_1h(action=EventAction.LOGIN_FAILED, context__username=user.username)
|
return (
|
||||||
|
get_objects_for_user(user, "authentik_events.view_event")
|
||||||
|
.filter(action=EventAction.LOGIN_FAILED, context__username=user.username)
|
||||||
|
.get_events_per_hour()
|
||||||
|
)
|
||||||
|
|
||||||
@extend_schema_field(CoordinateSerializer(many=True))
|
@extend_schema_field(CoordinateSerializer(many=True))
|
||||||
def get_authorizations_per_1h(self, _):
|
def get_authorizations_per_1h(self, _):
|
||||||
"""Get failed logins per hour for the last 24 hours"""
|
"""Get failed logins per hour for the last 24 hours"""
|
||||||
user = self.context["user"]
|
user = self.context["user"]
|
||||||
return get_events_per_1h(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk)
|
return (
|
||||||
|
get_objects_for_user(user, "authentik_events.view_event")
|
||||||
|
.filter(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk)
|
||||||
|
.get_events_per_hour()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class UsersFilter(FilterSet):
|
class UsersFilter(FilterSet):
|
||||||
@ -219,7 +245,11 @@ class UsersFilter(FilterSet):
|
|||||||
qs = {}
|
qs = {}
|
||||||
for key, _value in value.items():
|
for key, _value in value.items():
|
||||||
qs[f"attributes__{key}"] = _value
|
qs[f"attributes__{key}"] = _value
|
||||||
return queryset.filter(**qs)
|
try:
|
||||||
|
_ = len(queryset.filter(**qs))
|
||||||
|
return queryset.filter(**qs)
|
||||||
|
except ValueError:
|
||||||
|
return queryset
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = User
|
model = User
|
||||||
@ -300,7 +330,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
name=username,
|
name=username,
|
||||||
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: False},
|
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: False},
|
||||||
)
|
)
|
||||||
if create_group:
|
if create_group and self.request.user.has_perm("authentik_core.add_group"):
|
||||||
group = Group.objects.create(
|
group = Group.objects.create(
|
||||||
name=username,
|
name=username,
|
||||||
)
|
)
|
||||||
@ -320,13 +350,14 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
def me(self, request: Request) -> Response:
|
def me(self, request: Request) -> Response:
|
||||||
"""Get information about current user"""
|
"""Get information about current user"""
|
||||||
serializer = SessionUserSerializer(data={"user": UserSelfSerializer(request.user).data})
|
serializer = SessionUserSerializer(
|
||||||
|
data={"user": UserSelfSerializer(instance=request.user).data}
|
||||||
|
)
|
||||||
if SESSION_IMPERSONATE_USER in request._request.session:
|
if SESSION_IMPERSONATE_USER in request._request.session:
|
||||||
serializer.initial_data["original"] = UserSelfSerializer(
|
serializer.initial_data["original"] = UserSelfSerializer(
|
||||||
request._request.session[SESSION_IMPERSONATE_ORIGINAL_USER]
|
instance=request._request.session[SESSION_IMPERSONATE_ORIGINAL_USER]
|
||||||
).data
|
).data
|
||||||
serializer.is_valid()
|
return Response(serializer.initial_data)
|
||||||
return Response(serializer.data)
|
|
||||||
|
|
||||||
@extend_schema(request=UserSelfSerializer, responses={200: SessionUserSerializer(many=False)})
|
@extend_schema(request=UserSelfSerializer, responses={200: SessionUserSerializer(many=False)})
|
||||||
@action(
|
@action(
|
||||||
@ -346,9 +377,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
# since it caches the full object
|
# since it caches the full object
|
||||||
if SESSION_IMPERSONATE_USER in request.session:
|
if SESSION_IMPERSONATE_USER in request.session:
|
||||||
request.session[SESSION_IMPERSONATE_USER] = new_user
|
request.session[SESSION_IMPERSONATE_USER] = new_user
|
||||||
serializer = SessionUserSerializer(data={"user": data.data})
|
return Response({"user": data.data})
|
||||||
serializer.is_valid()
|
|
||||||
return Response(serializer.data)
|
|
||||||
|
|
||||||
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
|
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
|
||||||
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
|
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
|
||||||
|
@ -41,6 +41,7 @@ class MetaNameSerializer(PassiveSerializer):
|
|||||||
|
|
||||||
verbose_name = SerializerMethodField()
|
verbose_name = SerializerMethodField()
|
||||||
verbose_name_plural = SerializerMethodField()
|
verbose_name_plural = SerializerMethodField()
|
||||||
|
meta_model_name = SerializerMethodField()
|
||||||
|
|
||||||
def get_verbose_name(self, obj: Model) -> str:
|
def get_verbose_name(self, obj: Model) -> str:
|
||||||
"""Return object's verbose_name"""
|
"""Return object's verbose_name"""
|
||||||
@ -50,6 +51,10 @@ class MetaNameSerializer(PassiveSerializer):
|
|||||||
"""Return object's plural verbose_name"""
|
"""Return object's plural verbose_name"""
|
||||||
return obj._meta.verbose_name_plural
|
return obj._meta.verbose_name_plural
|
||||||
|
|
||||||
|
def get_meta_model_name(self, obj: Model) -> str:
|
||||||
|
"""Return internal model name"""
|
||||||
|
return f"{obj._meta.app_label}.{obj._meta.model_name}"
|
||||||
|
|
||||||
|
|
||||||
class TypeCreateSerializer(PassiveSerializer):
|
class TypeCreateSerializer(PassiveSerializer):
|
||||||
"""Types of an object that can be created"""
|
"""Types of an object that can be created"""
|
||||||
|
@ -55,5 +55,5 @@ class TokenBackend(InbuiltBackend):
|
|||||||
if not tokens.exists():
|
if not tokens.exists():
|
||||||
return None
|
return None
|
||||||
token = tokens.first()
|
token = tokens.first()
|
||||||
self.set_method("password", request, token=token)
|
self.set_method("token", request, token=token)
|
||||||
return token.user
|
return token.user
|
||||||
|
@ -5,6 +5,7 @@ from typing import Callable
|
|||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from django.http import HttpRequest, HttpResponse
|
from django.http import HttpRequest, HttpResponse
|
||||||
|
from sentry_sdk.api import set_tag
|
||||||
|
|
||||||
SESSION_IMPERSONATE_USER = "authentik_impersonate_user"
|
SESSION_IMPERSONATE_USER = "authentik_impersonate_user"
|
||||||
SESSION_IMPERSONATE_ORIGINAL_USER = "authentik_impersonate_original_user"
|
SESSION_IMPERSONATE_ORIGINAL_USER = "authentik_impersonate_original_user"
|
||||||
@ -12,7 +13,6 @@ LOCAL = local()
|
|||||||
RESPONSE_HEADER_ID = "X-authentik-id"
|
RESPONSE_HEADER_ID = "X-authentik-id"
|
||||||
KEY_AUTH_VIA = "auth_via"
|
KEY_AUTH_VIA = "auth_via"
|
||||||
KEY_USER = "user"
|
KEY_USER = "user"
|
||||||
INTERNAL_HEADER_PREFIX = "X-authentik-internal-"
|
|
||||||
|
|
||||||
|
|
||||||
class ImpersonateMiddleware:
|
class ImpersonateMiddleware:
|
||||||
@ -51,11 +51,12 @@ class RequestIDMiddleware:
|
|||||||
"request_id": request_id,
|
"request_id": request_id,
|
||||||
"host": request.get_host(),
|
"host": request.get_host(),
|
||||||
}
|
}
|
||||||
|
set_tag("authentik.request_id", request_id)
|
||||||
response = self.get_response(request)
|
response = self.get_response(request)
|
||||||
response[RESPONSE_HEADER_ID] = request.request_id
|
response[RESPONSE_HEADER_ID] = request.request_id
|
||||||
if auth_via := LOCAL.authentik.get(KEY_AUTH_VIA, None):
|
setattr(response, "ak_context", {})
|
||||||
response[INTERNAL_HEADER_PREFIX + KEY_AUTH_VIA] = auth_via
|
response.ak_context.update(LOCAL.authentik)
|
||||||
response[INTERNAL_HEADER_PREFIX + KEY_USER] = request.user.username
|
response.ak_context[KEY_USER] = request.user.username
|
||||||
for key in list(LOCAL.authentik.keys()):
|
for key in list(LOCAL.authentik.keys()):
|
||||||
del LOCAL.authentik[key]
|
del LOCAL.authentik[key]
|
||||||
return response
|
return response
|
||||||
@ -66,4 +67,6 @@ def structlog_add_request_id(logger: Logger, method_name: str, event_dict: dict)
|
|||||||
"""If threadlocal has authentik defined, add request_id to log"""
|
"""If threadlocal has authentik defined, add request_id to log"""
|
||||||
if hasattr(LOCAL, "authentik"):
|
if hasattr(LOCAL, "authentik"):
|
||||||
event_dict.update(LOCAL.authentik)
|
event_dict.update(LOCAL.authentik)
|
||||||
|
if hasattr(LOCAL, "authentik_task"):
|
||||||
|
event_dict.update(LOCAL.authentik_task)
|
||||||
return event_dict
|
return event_dict
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
import uuid
|
import uuid
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
import django.core.validators
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
from django.apps.registry import Apps
|
from django.apps.registry import Apps
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@ -12,6 +11,7 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
|||||||
from django.db.models import Count
|
from django.db.models import Count
|
||||||
|
|
||||||
import authentik.core.models
|
import authentik.core.models
|
||||||
|
import authentik.lib.models
|
||||||
|
|
||||||
|
|
||||||
def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
@ -161,7 +161,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name="application",
|
model_name="application",
|
||||||
name="meta_launch_url",
|
name="meta_launch_url",
|
||||||
field=models.TextField(
|
field=models.TextField(
|
||||||
blank=True, default="", validators=[django.core.validators.URLValidator()]
|
blank=True, default="", validators=[authentik.lib.models.DomainlessURLValidator()]
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.RunPython(
|
migrations.RunPython(
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
# Generated by Django 3.2.3 on 2021-06-02 21:51
|
# Generated by Django 3.2.3 on 2021-06-02 21:51
|
||||||
|
|
||||||
import django.core.validators
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
import authentik.lib.models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
@ -17,7 +18,7 @@ class Migration(migrations.Migration):
|
|||||||
field=models.TextField(
|
field=models.TextField(
|
||||||
blank=True,
|
blank=True,
|
||||||
default="",
|
default="",
|
||||||
validators=[django.core.validators.URLValidator()],
|
validators=[authentik.lib.models.DomainlessURLValidator()],
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -9,7 +9,6 @@ from deepmerge import always_merger
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth.models import AbstractUser
|
from django.contrib.auth.models import AbstractUser
|
||||||
from django.contrib.auth.models import UserManager as DjangoUserManager
|
from django.contrib.auth.models import UserManager as DjangoUserManager
|
||||||
from django.core import validators
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import Q, QuerySet, options
|
from django.db.models import Q, QuerySet, options
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
@ -26,10 +25,9 @@ from structlog.stdlib import get_logger
|
|||||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
from authentik.core.exceptions import PropertyMappingExpressionException
|
||||||
from authentik.core.signals import password_changed
|
from authentik.core.signals import password_changed
|
||||||
from authentik.core.types import UILoginButton, UserSettingSerializer
|
from authentik.core.types import UILoginButton, UserSettingSerializer
|
||||||
from authentik.flows.models import Flow
|
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
|
from authentik.lib.models import CreatedUpdatedModel, DomainlessURLValidator, SerializerModel
|
||||||
from authentik.lib.utils.http import get_client_ip
|
from authentik.lib.utils.http import get_client_ip
|
||||||
from authentik.managed.models import ManagedModel
|
from authentik.managed.models import ManagedModel
|
||||||
from authentik.policies.models import PolicyBindingModel
|
from authentik.policies.models import PolicyBindingModel
|
||||||
@ -39,7 +37,8 @@ USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug"
|
|||||||
USER_ATTRIBUTE_SA = "goauthentik.io/user/service-account"
|
USER_ATTRIBUTE_SA = "goauthentik.io/user/service-account"
|
||||||
USER_ATTRIBUTE_SOURCES = "goauthentik.io/user/sources"
|
USER_ATTRIBUTE_SOURCES = "goauthentik.io/user/sources"
|
||||||
USER_ATTRIBUTE_TOKEN_EXPIRING = "goauthentik.io/user/token-expires" # nosec
|
USER_ATTRIBUTE_TOKEN_EXPIRING = "goauthentik.io/user/token-expires" # nosec
|
||||||
USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username" # nosec
|
USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username"
|
||||||
|
USER_ATTRIBUTE_CHANGE_EMAIL = "goauthentik.io/user/can-change-email"
|
||||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips"
|
USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips"
|
||||||
|
|
||||||
GRAVATAR_URL = "https://secure.gravatar.com"
|
GRAVATAR_URL = "https://secure.gravatar.com"
|
||||||
@ -80,6 +79,27 @@ class Group(models.Model):
|
|||||||
)
|
)
|
||||||
attributes = models.JSONField(default=dict, blank=True)
|
attributes = models.JSONField(default=dict, blank=True)
|
||||||
|
|
||||||
|
def is_member(self, user: "User") -> bool:
|
||||||
|
"""Recursively check if `user` is member of us, or any parent."""
|
||||||
|
query = """
|
||||||
|
WITH RECURSIVE parents AS (
|
||||||
|
SELECT authentik_core_group.*, 0 AS relative_depth
|
||||||
|
FROM authentik_core_group
|
||||||
|
WHERE authentik_core_group.group_uuid = %s
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT authentik_core_group.*, parents.relative_depth - 1
|
||||||
|
FROM authentik_core_group,parents
|
||||||
|
WHERE authentik_core_group.parent_id = parents.group_uuid
|
||||||
|
)
|
||||||
|
SELECT group_uuid
|
||||||
|
FROM parents
|
||||||
|
GROUP BY group_uuid;
|
||||||
|
"""
|
||||||
|
groups = Group.objects.raw(query, [self.group_uuid])
|
||||||
|
return user.ak_groups.filter(pk__in=[group.pk for group in groups]).exists()
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Group {self.name}"
|
return f"Group {self.name}"
|
||||||
|
|
||||||
@ -152,7 +172,7 @@ class User(GuardianUserMixin, AbstractUser):
|
|||||||
if mode == "none":
|
if mode == "none":
|
||||||
return DEFAULT_AVATAR
|
return DEFAULT_AVATAR
|
||||||
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
||||||
mail_hash = md5(self.email.encode("utf-8")).hexdigest() # nosec
|
mail_hash = md5(self.email.lower().encode("utf-8")).hexdigest() # nosec
|
||||||
if mode == "gravatar":
|
if mode == "gravatar":
|
||||||
parameters = [
|
parameters = [
|
||||||
("s", "158"),
|
("s", "158"),
|
||||||
@ -182,7 +202,7 @@ class Provider(SerializerModel):
|
|||||||
name = models.TextField()
|
name = models.TextField()
|
||||||
|
|
||||||
authorization_flow = models.ForeignKey(
|
authorization_flow = models.ForeignKey(
|
||||||
Flow,
|
"authentik_flows.Flow",
|
||||||
on_delete=models.CASCADE,
|
on_delete=models.CASCADE,
|
||||||
help_text=_("Flow used when authorizing this provider."),
|
help_text=_("Flow used when authorizing this provider."),
|
||||||
related_name="provider_authorization",
|
related_name="provider_authorization",
|
||||||
@ -224,7 +244,7 @@ class Application(PolicyBindingModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
meta_launch_url = models.TextField(
|
meta_launch_url = models.TextField(
|
||||||
default="", blank=True, validators=[validators.URLValidator()]
|
default="", blank=True, validators=[DomainlessURLValidator()]
|
||||||
)
|
)
|
||||||
# For template applications, this can be set to /static/authentik/applications/*
|
# For template applications, this can be set to /static/authentik/applications/*
|
||||||
meta_icon = models.FileField(
|
meta_icon = models.FileField(
|
||||||
@ -242,7 +262,7 @@ class Application(PolicyBindingModel):
|
|||||||
it is returned as-is"""
|
it is returned as-is"""
|
||||||
if not self.meta_icon:
|
if not self.meta_icon:
|
||||||
return None
|
return None
|
||||||
if self.meta_icon.name.startswith("http") or self.meta_icon.name.startswith("/static"):
|
if "://" in self.meta_icon.name or self.meta_icon.name.startswith("/static"):
|
||||||
return self.meta_icon.name
|
return self.meta_icon.name
|
||||||
return self.meta_icon.url
|
return self.meta_icon.url
|
||||||
|
|
||||||
@ -258,7 +278,13 @@ class Application(PolicyBindingModel):
|
|||||||
"""Get casted provider instance"""
|
"""Get casted provider instance"""
|
||||||
if not self.provider:
|
if not self.provider:
|
||||||
return None
|
return None
|
||||||
return Provider.objects.get_subclass(pk=self.provider.pk)
|
# if the Application class has been cache, self.provider is set
|
||||||
|
# but doing a direct query lookup will fail.
|
||||||
|
# In that case, just return None
|
||||||
|
try:
|
||||||
|
return Provider.objects.get_subclass(pk=self.provider.pk)
|
||||||
|
except Provider.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
@ -303,7 +329,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
|||||||
property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True)
|
property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True)
|
||||||
|
|
||||||
authentication_flow = models.ForeignKey(
|
authentication_flow = models.ForeignKey(
|
||||||
Flow,
|
"authentik_flows.Flow",
|
||||||
blank=True,
|
blank=True,
|
||||||
null=True,
|
null=True,
|
||||||
default=None,
|
default=None,
|
||||||
@ -312,7 +338,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
|||||||
related_name="source_authentication",
|
related_name="source_authentication",
|
||||||
)
|
)
|
||||||
enrollment_flow = models.ForeignKey(
|
enrollment_flow = models.ForeignKey(
|
||||||
Flow,
|
"authentik_flows.Flow",
|
||||||
blank=True,
|
blank=True,
|
||||||
null=True,
|
null=True,
|
||||||
default=None,
|
default=None,
|
||||||
@ -339,13 +365,11 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
|||||||
"""Return component used to edit this object"""
|
"""Return component used to edit this object"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@property
|
def ui_login_button(self, request: HttpRequest) -> Optional[UILoginButton]:
|
||||||
def ui_login_button(self) -> Optional[UILoginButton]:
|
|
||||||
"""If source uses a http-based flow, return UI Information about the login
|
"""If source uses a http-based flow, return UI Information about the login
|
||||||
button. If source doesn't use http-based flow, return None."""
|
button. If source doesn't use http-based flow, return None."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
|
||||||
def ui_user_settings(self) -> Optional[UserSettingSerializer]:
|
def ui_user_settings(self) -> Optional[UserSettingSerializer]:
|
||||||
"""Entrypoint to integrate with User settings. Can either return None if no
|
"""Entrypoint to integrate with User settings. Can either return None if no
|
||||||
user settings are available, or UserSettingSerializer."""
|
user settings are available, or UserSettingSerializer."""
|
||||||
@ -432,6 +456,14 @@ class Token(ManagedModel, ExpiringModel):
|
|||||||
"""Handler which is called when this object is expired."""
|
"""Handler which is called when this object is expired."""
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
|
||||||
|
if self.intent in [
|
||||||
|
TokenIntents.INTENT_RECOVERY,
|
||||||
|
TokenIntents.INTENT_VERIFICATION,
|
||||||
|
TokenIntents.INTENT_APP_PASSWORD,
|
||||||
|
]:
|
||||||
|
super().expire_action(*args, **kwargs)
|
||||||
|
return
|
||||||
|
|
||||||
self.key = default_token_key()
|
self.key = default_token_key()
|
||||||
self.expires = default_token_duration()
|
self.expires = default_token_duration()
|
||||||
self.save(*args, **kwargs)
|
self.save(*args, **kwargs)
|
||||||
|
@ -6,6 +6,7 @@ from os import environ
|
|||||||
from boto3.exceptions import Boto3Error
|
from boto3.exceptions import Boto3Error
|
||||||
from botocore.exceptions import BotoCoreError, ClientError
|
from botocore.exceptions import BotoCoreError, ClientError
|
||||||
from dbbackup.db.exceptions import CommandConnectorError
|
from dbbackup.db.exceptions import CommandConnectorError
|
||||||
|
from django.conf import settings
|
||||||
from django.contrib.humanize.templatetags.humanize import naturaltime
|
from django.contrib.humanize.templatetags.humanize import naturaltime
|
||||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||||
from django.core import management
|
from django.core import management
|
||||||
@ -28,7 +29,7 @@ LOGGER = get_logger()
|
|||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
@prefill_task()
|
@prefill_task
|
||||||
def clean_expired_models(self: MonitoredTask):
|
def clean_expired_models(self: MonitoredTask):
|
||||||
"""Remove expired objects"""
|
"""Remove expired objects"""
|
||||||
messages = []
|
messages = []
|
||||||
@ -55,24 +56,25 @@ def clean_expired_models(self: MonitoredTask):
|
|||||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, messages))
|
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, messages))
|
||||||
|
|
||||||
|
|
||||||
|
def should_backup() -> bool:
|
||||||
|
"""Check if we should be doing backups"""
|
||||||
|
if SERVICE_HOST_ENV_NAME in environ and not CONFIG.y("postgresql.s3_backup.bucket"):
|
||||||
|
LOGGER.info("Running in k8s and s3 backups are not configured, skipping")
|
||||||
|
return False
|
||||||
|
if not CONFIG.y_bool("postgresql.backup.enabled"):
|
||||||
|
return False
|
||||||
|
if settings.DEBUG:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
@prefill_task()
|
@prefill_task
|
||||||
def backup_database(self: MonitoredTask): # pragma: no cover
|
def backup_database(self: MonitoredTask): # pragma: no cover
|
||||||
"""Database backup"""
|
"""Database backup"""
|
||||||
self.result_timeout_hours = 25
|
self.result_timeout_hours = 25
|
||||||
if SERVICE_HOST_ENV_NAME in environ and not CONFIG.y("postgresql.s3_backup.bucket"):
|
if not should_backup():
|
||||||
LOGGER.info("Running in k8s and s3 backups are not configured, skipping")
|
self.set_status(TaskResult(TaskResultStatus.UNKNOWN, ["Backups are not configured."]))
|
||||||
self.set_status(
|
|
||||||
TaskResult(
|
|
||||||
TaskResultStatus.WARNING,
|
|
||||||
[
|
|
||||||
(
|
|
||||||
"Skipping backup as authentik is running in Kubernetes "
|
|
||||||
"without S3 backups configured."
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
start = datetime.now()
|
start = datetime.now()
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
<script src="{% static 'dist/poly.js' %}" type="module"></script>
|
<script src="{% static 'dist/poly.js' %}" type="module"></script>
|
||||||
{% block head %}
|
{% block head %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
<meta name="sentry-trace" content="{{ sentry_trace }}" />
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
{% block body %}
|
{% block body %}
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
{% load i18n %}
|
{% load i18n %}
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
<script src="{% static 'dist/AdminInterface.js' %}" type="module"></script>
|
<script src="{% static 'dist/admin/AdminInterface.js' %}" type="module"></script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
<script src="{% static 'dist/FlowInterface.js' %}" type="module"></script>
|
<script src="{% static 'dist/flow/FlowInterface.js' %}" type="module"></script>
|
||||||
<style>
|
<style>
|
||||||
.pf-c-background-image::before {
|
.pf-c-background-image::before {
|
||||||
--ak-flow-background: url("{{ flow.background_url }}");
|
--ak-flow-background: url("{{ flow.background_url }}");
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
{% load i18n %}
|
{% load i18n %}
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
<script src="{% static 'dist/UserInterface.js' %}" type="module"></script>
|
<script src="{% static 'dist/user/UserInterface.js' %}" type="module"></script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
|
@ -3,7 +3,8 @@ from django.urls import reverse
|
|||||||
from django.utils.encoding import force_str
|
from django.utils.encoding import force_str
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import Application, User
|
from authentik.core.models import Application
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.policies.dummy.models import DummyPolicy
|
from authentik.policies.dummy.models import DummyPolicy
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
|
|
||||||
@ -12,7 +13,7 @@ class TestApplicationsAPI(APITestCase):
|
|||||||
"""Test applications API"""
|
"""Test applications API"""
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.user = User.objects.get(username="akadmin")
|
self.user = create_test_admin_user()
|
||||||
self.allowed = Application.objects.create(name="allowed", slug="allowed")
|
self.allowed = Application.objects.create(name="allowed", slug="allowed")
|
||||||
self.denied = Application.objects.create(name="denied", slug="denied")
|
self.denied = Application.objects.create(name="denied", slug="denied")
|
||||||
PolicyBinding.objects.create(
|
PolicyBinding.objects.create(
|
||||||
|
@ -6,6 +6,7 @@ from django.utils.encoding import force_str
|
|||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
|
|
||||||
|
|
||||||
class TestAuthenticatedSessionsAPI(APITestCase):
|
class TestAuthenticatedSessionsAPI(APITestCase):
|
||||||
@ -13,7 +14,7 @@ class TestAuthenticatedSessionsAPI(APITestCase):
|
|||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.user = User.objects.get(username="akadmin")
|
self.user = create_test_admin_user()
|
||||||
self.other_user = User.objects.create(username="normal-user")
|
self.other_user = User.objects.create(username="normal-user")
|
||||||
|
|
||||||
def test_list(self):
|
def test_list(self):
|
||||||
|
40
authentik/core/tests/test_groups.py
Normal file
40
authentik/core/tests/test_groups.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
"""group tests"""
|
||||||
|
from django.test.testcases import TestCase
|
||||||
|
|
||||||
|
from authentik.core.models import Group, User
|
||||||
|
|
||||||
|
|
||||||
|
class TestGroups(TestCase):
|
||||||
|
"""Test group membership"""
|
||||||
|
|
||||||
|
def test_group_membership_simple(self):
|
||||||
|
"""Test simple membership"""
|
||||||
|
user = User.objects.create(username="user")
|
||||||
|
user2 = User.objects.create(username="user2")
|
||||||
|
group = Group.objects.create(name="group")
|
||||||
|
group.users.add(user)
|
||||||
|
self.assertTrue(group.is_member(user))
|
||||||
|
self.assertFalse(group.is_member(user2))
|
||||||
|
|
||||||
|
def test_group_membership_parent(self):
|
||||||
|
"""Test parent membership"""
|
||||||
|
user = User.objects.create(username="user")
|
||||||
|
user2 = User.objects.create(username="user2")
|
||||||
|
first = Group.objects.create(name="first")
|
||||||
|
second = Group.objects.create(name="second", parent=first)
|
||||||
|
second.users.add(user)
|
||||||
|
self.assertTrue(first.is_member(user))
|
||||||
|
self.assertFalse(first.is_member(user2))
|
||||||
|
|
||||||
|
def test_group_membership_parent_extra(self):
|
||||||
|
"""Test parent membership"""
|
||||||
|
user = User.objects.create(username="user")
|
||||||
|
user2 = User.objects.create(username="user2")
|
||||||
|
first = Group.objects.create(name="first")
|
||||||
|
second = Group.objects.create(name="second", parent=first)
|
||||||
|
third = Group.objects.create(name="third", parent=second)
|
||||||
|
second.users.add(user)
|
||||||
|
self.assertTrue(first.is_member(user))
|
||||||
|
self.assertFalse(first.is_member(user2))
|
||||||
|
self.assertFalse(third.is_member(user))
|
||||||
|
self.assertFalse(third.is_member(user2))
|
@ -5,6 +5,7 @@ from django.test.testcases import TestCase
|
|||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
|
|
||||||
|
|
||||||
class TestImpersonation(TestCase):
|
class TestImpersonation(TestCase):
|
||||||
@ -13,14 +14,14 @@ class TestImpersonation(TestCase):
|
|||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.other_user = User.objects.create(username="to-impersonate")
|
self.other_user = User.objects.create(username="to-impersonate")
|
||||||
self.akadmin = User.objects.get(username="akadmin")
|
self.user = create_test_admin_user()
|
||||||
|
|
||||||
def test_impersonate_simple(self):
|
def test_impersonate_simple(self):
|
||||||
"""test simple impersonation and un-impersonation"""
|
"""test simple impersonation and un-impersonation"""
|
||||||
# test with an inactive user to ensure that still works
|
# test with an inactive user to ensure that still works
|
||||||
self.other_user.is_active = False
|
self.other_user.is_active = False
|
||||||
self.other_user.save()
|
self.other_user.save()
|
||||||
self.client.force_login(self.akadmin)
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
self.client.get(
|
self.client.get(
|
||||||
reverse(
|
reverse(
|
||||||
@ -32,13 +33,13 @@ class TestImpersonation(TestCase):
|
|||||||
response = self.client.get(reverse("authentik_api:user-me"))
|
response = self.client.get(reverse("authentik_api:user-me"))
|
||||||
response_body = loads(response.content.decode())
|
response_body = loads(response.content.decode())
|
||||||
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
||||||
self.assertEqual(response_body["original"]["username"], self.akadmin.username)
|
self.assertEqual(response_body["original"]["username"], self.user.username)
|
||||||
|
|
||||||
self.client.get(reverse("authentik_core:impersonate-end"))
|
self.client.get(reverse("authentik_core:impersonate-end"))
|
||||||
|
|
||||||
response = self.client.get(reverse("authentik_api:user-me"))
|
response = self.client.get(reverse("authentik_api:user-me"))
|
||||||
response_body = loads(response.content.decode())
|
response_body = loads(response.content.decode())
|
||||||
self.assertEqual(response_body["user"]["username"], self.akadmin.username)
|
self.assertEqual(response_body["user"]["username"], self.user.username)
|
||||||
self.assertNotIn("original", response_body)
|
self.assertNotIn("original", response_body)
|
||||||
|
|
||||||
def test_impersonate_denied(self):
|
def test_impersonate_denied(self):
|
||||||
@ -46,7 +47,7 @@ class TestImpersonation(TestCase):
|
|||||||
self.client.force_login(self.other_user)
|
self.client.force_login(self.other_user)
|
||||||
|
|
||||||
self.client.get(
|
self.client.get(
|
||||||
reverse("authentik_core:impersonate-init", kwargs={"user_id": self.akadmin.pk})
|
reverse("authentik_core:impersonate-init", kwargs={"user_id": self.user.pk})
|
||||||
)
|
)
|
||||||
|
|
||||||
response = self.client.get(reverse("authentik_api:user-me"))
|
response = self.client.get(reverse("authentik_api:user-me"))
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from time import sleep
|
from time import sleep
|
||||||
from typing import Callable, Type
|
from typing import Callable, Type
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import RequestFactory, TestCase
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
|
|
||||||
@ -30,6 +30,9 @@ class TestModels(TestCase):
|
|||||||
def source_tester_factory(test_model: Type[Stage]) -> Callable:
|
def source_tester_factory(test_model: Type[Stage]) -> Callable:
|
||||||
"""Test source"""
|
"""Test source"""
|
||||||
|
|
||||||
|
factory = RequestFactory()
|
||||||
|
request = factory.get("/")
|
||||||
|
|
||||||
def tester(self: TestModels):
|
def tester(self: TestModels):
|
||||||
model_class = None
|
model_class = None
|
||||||
if test_model._meta.abstract:
|
if test_model._meta.abstract:
|
||||||
@ -38,8 +41,8 @@ def source_tester_factory(test_model: Type[Stage]) -> Callable:
|
|||||||
model_class = test_model()
|
model_class = test_model()
|
||||||
model_class.slug = "test"
|
model_class.slug = "test"
|
||||||
self.assertIsNotNone(model_class.component)
|
self.assertIsNotNone(model_class.component)
|
||||||
_ = model_class.ui_login_button
|
_ = model_class.ui_login_button(request)
|
||||||
_ = model_class.ui_user_settings
|
_ = model_class.ui_user_settings()
|
||||||
|
|
||||||
return tester
|
return tester
|
||||||
|
|
||||||
@ -49,7 +52,7 @@ def provider_tester_factory(test_model: Type[Stage]) -> Callable:
|
|||||||
|
|
||||||
def tester(self: TestModels):
|
def tester(self: TestModels):
|
||||||
model_class = None
|
model_class = None
|
||||||
if test_model._meta.abstract:
|
if test_model._meta.abstract: # pragma: no cover
|
||||||
model_class = test_model.__bases__[0]()
|
model_class = test_model.__bases__[0]()
|
||||||
else:
|
else:
|
||||||
model_class = test_model()
|
model_class = test_model()
|
||||||
@ -59,6 +62,6 @@ def provider_tester_factory(test_model: Type[Stage]) -> Callable:
|
|||||||
|
|
||||||
|
|
||||||
for model in all_subclasses(Source):
|
for model in all_subclasses(Source):
|
||||||
setattr(TestModels, f"test_model_{model.__name__}", source_tester_factory(model))
|
setattr(TestModels, f"test_source_{model.__name__}", source_tester_factory(model))
|
||||||
for model in all_subclasses(Provider):
|
for model in all_subclasses(Provider):
|
||||||
setattr(TestModels, f"test_model_{model.__name__}", provider_tester_factory(model))
|
setattr(TestModels, f"test_provider_{model.__name__}", provider_tester_factory(model))
|
||||||
|
@ -6,7 +6,8 @@ from rest_framework.serializers import ValidationError
|
|||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||||
from authentik.core.models import PropertyMapping, User
|
from authentik.core.models import PropertyMapping
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
|
|
||||||
|
|
||||||
class TestPropertyMappingAPI(APITestCase):
|
class TestPropertyMappingAPI(APITestCase):
|
||||||
@ -17,7 +18,7 @@ class TestPropertyMappingAPI(APITestCase):
|
|||||||
self.mapping = PropertyMapping.objects.create(
|
self.mapping = PropertyMapping.objects.create(
|
||||||
name="dummy", expression="""return {'foo': 'bar'}"""
|
name="dummy", expression="""return {'foo': 'bar'}"""
|
||||||
)
|
)
|
||||||
self.user = User.objects.get(username="akadmin")
|
self.user = create_test_admin_user()
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
def test_test_call(self):
|
def test_test_call(self):
|
||||||
@ -40,7 +41,7 @@ class TestPropertyMappingAPI(APITestCase):
|
|||||||
expr = "return True"
|
expr = "return True"
|
||||||
self.assertEqual(PropertyMappingSerializer().validate_expression(expr), expr)
|
self.assertEqual(PropertyMappingSerializer().validate_expression(expr), expr)
|
||||||
with self.assertRaises(ValidationError):
|
with self.assertRaises(ValidationError):
|
||||||
print(PropertyMappingSerializer().validate_expression("/"))
|
PropertyMappingSerializer().validate_expression("/")
|
||||||
|
|
||||||
def test_types(self):
|
def test_types(self):
|
||||||
"""Test PropertyMappigns's types endpoint"""
|
"""Test PropertyMappigns's types endpoint"""
|
||||||
|
@ -2,7 +2,8 @@
|
|||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import PropertyMapping, User
|
from authentik.core.models import PropertyMapping
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
|
|
||||||
|
|
||||||
class TestProvidersAPI(APITestCase):
|
class TestProvidersAPI(APITestCase):
|
||||||
@ -13,7 +14,7 @@ class TestProvidersAPI(APITestCase):
|
|||||||
self.mapping = PropertyMapping.objects.create(
|
self.mapping = PropertyMapping.objects.create(
|
||||||
name="dummy", expression="""return {'foo': 'bar'}"""
|
name="dummy", expression="""return {'foo': 'bar'}"""
|
||||||
)
|
)
|
||||||
self.user = User.objects.get(username="akadmin")
|
self.user = create_test_admin_user()
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
def test_types(self):
|
def test_types(self):
|
||||||
|
@ -8,6 +8,7 @@ from rest_framework.test import APITestCase
|
|||||||
|
|
||||||
from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents, User
|
from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents, User
|
||||||
from authentik.core.tasks import clean_expired_models
|
from authentik.core.tasks import clean_expired_models
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
|
|
||||||
|
|
||||||
class TestTokenAPI(APITestCase):
|
class TestTokenAPI(APITestCase):
|
||||||
@ -16,7 +17,7 @@ class TestTokenAPI(APITestCase):
|
|||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.user = User.objects.create(username="testuser")
|
self.user = User.objects.create(username="testuser")
|
||||||
self.admin = User.objects.get(username="akadmin")
|
self.admin = create_test_admin_user()
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
def test_token_create(self):
|
def test_token_create(self):
|
||||||
@ -53,7 +54,9 @@ class TestTokenAPI(APITestCase):
|
|||||||
|
|
||||||
def test_token_expire(self):
|
def test_token_expire(self):
|
||||||
"""Test Token expire task"""
|
"""Test Token expire task"""
|
||||||
token: Token = Token.objects.create(expires=now(), user=get_anonymous_user())
|
token: Token = Token.objects.create(
|
||||||
|
expires=now(), user=get_anonymous_user(), intent=TokenIntents.INTENT_API
|
||||||
|
)
|
||||||
key = token.key
|
key = token.key
|
||||||
clean_expired_models.delay().get()
|
clean_expired_models.delay().get()
|
||||||
token.refresh_from_db()
|
token.refresh_from_db()
|
||||||
|
@ -2,8 +2,9 @@
|
|||||||
from django.urls.base import reverse
|
from django.urls.base import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import USER_ATTRIBUTE_CHANGE_USERNAME, User
|
from authentik.core.models import USER_ATTRIBUTE_CHANGE_EMAIL, USER_ATTRIBUTE_CHANGE_USERNAME, User
|
||||||
from authentik.flows.models import Flow, FlowDesignation
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant
|
||||||
|
from authentik.flows.models import FlowDesignation
|
||||||
from authentik.stages.email.models import EmailStage
|
from authentik.stages.email.models import EmailStage
|
||||||
from authentik.tenants.models import Tenant
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
@ -12,7 +13,7 @@ class TestUsersAPI(APITestCase):
|
|||||||
"""Test Users API"""
|
"""Test Users API"""
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.admin = User.objects.get(username="akadmin")
|
self.admin = create_test_admin_user()
|
||||||
self.user = User.objects.create(username="test-user")
|
self.user = User.objects.create(username="test-user")
|
||||||
|
|
||||||
def test_update_self(self):
|
def test_update_self(self):
|
||||||
@ -33,6 +34,16 @@ class TestUsersAPI(APITestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(response.status_code, 400)
|
self.assertEqual(response.status_code, 400)
|
||||||
|
|
||||||
|
def test_update_self_email_denied(self):
|
||||||
|
"""Test update_self"""
|
||||||
|
self.admin.attributes[USER_ATTRIBUTE_CHANGE_EMAIL] = False
|
||||||
|
self.admin.save()
|
||||||
|
self.client.force_login(self.admin)
|
||||||
|
response = self.client.put(
|
||||||
|
reverse("authentik_api:user-update-self"), data={"email": "foo", "name": "foo"}
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
|
||||||
def test_metrics(self):
|
def test_metrics(self):
|
||||||
"""Test user's metrics"""
|
"""Test user's metrics"""
|
||||||
self.client.force_login(self.admin)
|
self.client.force_login(self.admin)
|
||||||
@ -59,10 +70,8 @@ class TestUsersAPI(APITestCase):
|
|||||||
|
|
||||||
def test_recovery(self):
|
def test_recovery(self):
|
||||||
"""Test user recovery link (no recovery flow set)"""
|
"""Test user recovery link (no recovery flow set)"""
|
||||||
flow = Flow.objects.create(
|
flow = create_test_flow(FlowDesignation.RECOVERY)
|
||||||
name="test", title="test", slug="test", designation=FlowDesignation.RECOVERY
|
tenant: Tenant = create_test_tenant()
|
||||||
)
|
|
||||||
tenant: Tenant = Tenant.objects.first()
|
|
||||||
tenant.flow_recovery = flow
|
tenant.flow_recovery = flow
|
||||||
tenant.save()
|
tenant.save()
|
||||||
self.client.force_login(self.admin)
|
self.client.force_login(self.admin)
|
||||||
@ -89,10 +98,8 @@ class TestUsersAPI(APITestCase):
|
|||||||
"""Test user recovery link (no email stage)"""
|
"""Test user recovery link (no email stage)"""
|
||||||
self.user.email = "foo@bar.baz"
|
self.user.email = "foo@bar.baz"
|
||||||
self.user.save()
|
self.user.save()
|
||||||
flow = Flow.objects.create(
|
flow = create_test_flow(designation=FlowDesignation.RECOVERY)
|
||||||
name="test", title="test", slug="test", designation=FlowDesignation.RECOVERY
|
tenant: Tenant = create_test_tenant()
|
||||||
)
|
|
||||||
tenant: Tenant = Tenant.objects.first()
|
|
||||||
tenant.flow_recovery = flow
|
tenant.flow_recovery = flow
|
||||||
tenant.save()
|
tenant.save()
|
||||||
self.client.force_login(self.admin)
|
self.client.force_login(self.admin)
|
||||||
@ -105,10 +112,8 @@ class TestUsersAPI(APITestCase):
|
|||||||
"""Test user recovery link"""
|
"""Test user recovery link"""
|
||||||
self.user.email = "foo@bar.baz"
|
self.user.email = "foo@bar.baz"
|
||||||
self.user.save()
|
self.user.save()
|
||||||
flow = Flow.objects.create(
|
flow = create_test_flow(FlowDesignation.RECOVERY)
|
||||||
name="test", title="test", slug="test", designation=FlowDesignation.RECOVERY
|
tenant: Tenant = create_test_tenant()
|
||||||
)
|
|
||||||
tenant: Tenant = Tenant.objects.first()
|
|
||||||
tenant.flow_recovery = flow
|
tenant.flow_recovery = flow
|
||||||
tenant.save()
|
tenant.save()
|
||||||
|
|
||||||
|
57
authentik/core/tests/utils.py
Normal file
57
authentik/core/tests/utils.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
"""Test Utils"""
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from django.utils.text import slugify
|
||||||
|
|
||||||
|
from authentik.core.models import Group, User
|
||||||
|
from authentik.crypto.builder import CertificateBuilder
|
||||||
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
from authentik.flows.models import Flow, FlowDesignation
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
|
|
||||||
|
def create_test_flow(designation: FlowDesignation = FlowDesignation.STAGE_CONFIGURATION) -> Flow:
|
||||||
|
"""Generate a flow that can be used for testing"""
|
||||||
|
uid = generate_id(10)
|
||||||
|
return Flow.objects.create(
|
||||||
|
name=uid,
|
||||||
|
title=uid,
|
||||||
|
slug=slugify(uid),
|
||||||
|
designation=designation,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_test_admin_user(name: Optional[str] = None) -> User:
|
||||||
|
"""Generate a test-admin user"""
|
||||||
|
uid = generate_id(20) if not name else name
|
||||||
|
group = Group.objects.create(name=uid, is_superuser=True)
|
||||||
|
user: User = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=uid,
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
user.set_password(uid)
|
||||||
|
user.save()
|
||||||
|
group.users.add(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
def create_test_tenant() -> Tenant:
|
||||||
|
"""Generate a test tenant, removing all other tenants to make sure this one
|
||||||
|
matches."""
|
||||||
|
uid = generate_id(20)
|
||||||
|
Tenant.objects.all().delete()
|
||||||
|
return Tenant.objects.create(domain=uid, default=True)
|
||||||
|
|
||||||
|
|
||||||
|
def create_test_cert() -> CertificateKeyPair:
|
||||||
|
"""Generate a certificate for testing"""
|
||||||
|
CertificateKeyPair.objects.filter(name="goauthentik.io").delete()
|
||||||
|
builder = CertificateBuilder()
|
||||||
|
builder.common_name = "goauthentik.io"
|
||||||
|
builder.build(
|
||||||
|
subject_alt_names=["goauthentik.io"],
|
||||||
|
validity_days=360,
|
||||||
|
)
|
||||||
|
return builder.save()
|
@ -20,6 +20,7 @@ from authentik.api.decorators import permission_required
|
|||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.crypto.builder import CertificateBuilder
|
from authentik.crypto.builder import CertificateBuilder
|
||||||
|
from authentik.crypto.managed import MANAGED_KEY
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
|
||||||
@ -141,9 +142,11 @@ class CertificateKeyPairFilter(FilterSet):
|
|||||||
class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
||||||
"""CertificateKeyPair Viewset"""
|
"""CertificateKeyPair Viewset"""
|
||||||
|
|
||||||
queryset = CertificateKeyPair.objects.all()
|
queryset = CertificateKeyPair.objects.exclude(managed=MANAGED_KEY)
|
||||||
serializer_class = CertificateKeyPairSerializer
|
serializer_class = CertificateKeyPairSerializer
|
||||||
filterset_class = CertificateKeyPairFilter
|
filterset_class = CertificateKeyPairFilter
|
||||||
|
ordering = ["name"]
|
||||||
|
search_fields = ["name"]
|
||||||
|
|
||||||
@permission_required(None, ["authentik_crypto.add_certificatekeypair"])
|
@permission_required(None, ["authentik_crypto.add_certificatekeypair"])
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@ -189,7 +192,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
|||||||
secret=certificate,
|
secret=certificate,
|
||||||
type="certificate",
|
type="certificate",
|
||||||
).from_http(request)
|
).from_http(request)
|
||||||
if "download" in request._request.GET:
|
if "download" in request.query_params:
|
||||||
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
|
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
|
||||||
response = HttpResponse(
|
response = HttpResponse(
|
||||||
certificate.certificate_data, content_type="application/x-pem-file"
|
certificate.certificate_data, content_type="application/x-pem-file"
|
||||||
@ -220,7 +223,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
|||||||
secret=certificate,
|
secret=certificate,
|
||||||
type="private_key",
|
type="private_key",
|
||||||
).from_http(request)
|
).from_http(request)
|
||||||
if "download" in request._request.GET:
|
if "download" in request.query_params:
|
||||||
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
|
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
|
||||||
response = HttpResponse(certificate.key_data, content_type="application/x-pem-file")
|
response = HttpResponse(certificate.key_data, content_type="application/x-pem-file")
|
||||||
response[
|
response[
|
||||||
|
@ -13,3 +13,4 @@ class AuthentikCryptoConfig(AppConfig):
|
|||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
import_module("authentik.crypto.managed")
|
import_module("authentik.crypto.managed")
|
||||||
|
import_module("authentik.crypto.tasks")
|
||||||
|
@ -11,10 +11,13 @@ from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
|||||||
from cryptography.x509 import Certificate, load_pem_x509_certificate
|
from cryptography.x509 import Certificate, load_pem_x509_certificate
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.lib.models import CreatedUpdatedModel
|
from authentik.lib.models import CreatedUpdatedModel
|
||||||
from authentik.managed.models import ManagedModel
|
from authentik.managed.models import ManagedModel
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
class CertificateKeyPair(ManagedModel, CreatedUpdatedModel):
|
class CertificateKeyPair(ManagedModel, CreatedUpdatedModel):
|
||||||
"""CertificateKeyPair that can be used for signing or encrypting if `key_data`
|
"""CertificateKeyPair that can be used for signing or encrypting if `key_data`
|
||||||
@ -55,14 +58,15 @@ class CertificateKeyPair(ManagedModel, CreatedUpdatedModel):
|
|||||||
@property
|
@property
|
||||||
def private_key(self) -> Optional[RSAPrivateKey]:
|
def private_key(self) -> Optional[RSAPrivateKey]:
|
||||||
"""Get python cryptography PrivateKey instance"""
|
"""Get python cryptography PrivateKey instance"""
|
||||||
if not self._private_key and self._private_key != "":
|
if not self._private_key and self.key_data != "":
|
||||||
try:
|
try:
|
||||||
self._private_key = load_pem_private_key(
|
self._private_key = load_pem_private_key(
|
||||||
str.encode("\n".join([x.strip() for x in self.key_data.split("\n")])),
|
str.encode("\n".join([x.strip() for x in self.key_data.split("\n")])),
|
||||||
password=None,
|
password=None,
|
||||||
backend=default_backend(),
|
backend=default_backend(),
|
||||||
)
|
)
|
||||||
except ValueError:
|
except ValueError as exc:
|
||||||
|
LOGGER.warning(exc)
|
||||||
return None
|
return None
|
||||||
return self._private_key
|
return self._private_key
|
||||||
|
|
||||||
|
10
authentik/crypto/settings.py
Normal file
10
authentik/crypto/settings.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
"""Crypto task Settings"""
|
||||||
|
from celery.schedules import crontab
|
||||||
|
|
||||||
|
CELERY_BEAT_SCHEDULE = {
|
||||||
|
"crypto_certificate_discovery": {
|
||||||
|
"task": "authentik.crypto.tasks.certificate_discovery",
|
||||||
|
"schedule": crontab(minute="*/5"),
|
||||||
|
"options": {"queue": "authentik_scheduled"},
|
||||||
|
},
|
||||||
|
}
|
92
authentik/crypto/tasks.py
Normal file
92
authentik/crypto/tasks.py
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
"""Crypto tasks"""
|
||||||
|
from glob import glob
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||||
|
from cryptography.x509.base import load_pem_x509_certificate
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
from authentik.events.monitored_tasks import (
|
||||||
|
MonitoredTask,
|
||||||
|
TaskResult,
|
||||||
|
TaskResultStatus,
|
||||||
|
prefill_task,
|
||||||
|
)
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
MANAGED_DISCOVERED = "goauthentik.io/crypto/discovered/%s"
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_private_key_valid(body: str):
|
||||||
|
"""Attempt loading of an RSA Private key without password"""
|
||||||
|
load_pem_private_key(
|
||||||
|
str.encode("\n".join([x.strip() for x in body.split("\n")])),
|
||||||
|
password=None,
|
||||||
|
backend=default_backend(),
|
||||||
|
)
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_certificate_valid(body: str):
|
||||||
|
"""Attempt loading of a PEM-encoded certificate"""
|
||||||
|
load_pem_x509_certificate(body.encode("utf-8"), default_backend())
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
|
@prefill_task
|
||||||
|
def certificate_discovery(self: MonitoredTask):
|
||||||
|
"""Discover and update certificates form the filesystem"""
|
||||||
|
certs = {}
|
||||||
|
private_keys = {}
|
||||||
|
discovered = 0
|
||||||
|
for file in glob(CONFIG.y("cert_discovery_dir") + "/**", recursive=True):
|
||||||
|
path = Path(file)
|
||||||
|
if not path.exists():
|
||||||
|
continue
|
||||||
|
if path.is_dir():
|
||||||
|
continue
|
||||||
|
# Support certbot's directory structure
|
||||||
|
if path.name in ["fullchain.pem", "privkey.pem"]:
|
||||||
|
cert_name = path.parent.name
|
||||||
|
else:
|
||||||
|
cert_name = path.name.replace(path.suffix, "")
|
||||||
|
try:
|
||||||
|
with open(path, "r+", encoding="utf-8") as _file:
|
||||||
|
body = _file.read()
|
||||||
|
if "BEGIN RSA PRIVATE KEY" in body:
|
||||||
|
private_keys[cert_name] = ensure_private_key_valid(body)
|
||||||
|
else:
|
||||||
|
certs[cert_name] = ensure_certificate_valid(body)
|
||||||
|
except (OSError, ValueError) as exc:
|
||||||
|
LOGGER.warning("Failed to open file or invalid format", exc=exc, file=path)
|
||||||
|
discovered += 1
|
||||||
|
for name, cert_data in certs.items():
|
||||||
|
cert = CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % name).first()
|
||||||
|
if not cert:
|
||||||
|
cert = CertificateKeyPair(
|
||||||
|
name=name,
|
||||||
|
managed=MANAGED_DISCOVERED % name,
|
||||||
|
)
|
||||||
|
dirty = False
|
||||||
|
if cert.certificate_data != cert_data:
|
||||||
|
cert.certificate_data = cert_data
|
||||||
|
dirty = True
|
||||||
|
if name in private_keys:
|
||||||
|
if cert.key_data != private_keys[name]:
|
||||||
|
cert.key_data = private_keys[name]
|
||||||
|
dirty = True
|
||||||
|
if dirty:
|
||||||
|
cert.save()
|
||||||
|
self.set_status(
|
||||||
|
TaskResult(
|
||||||
|
TaskResultStatus.SUCCESSFUL,
|
||||||
|
messages=[_("Successfully imported %(count)d files." % {"count": discovered})],
|
||||||
|
)
|
||||||
|
)
|
@ -1,25 +1,37 @@
|
|||||||
"""Crypto tests"""
|
"""Crypto tests"""
|
||||||
import datetime
|
import datetime
|
||||||
|
from os import makedirs
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
from django.test import TestCase
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.api.used_by import DeleteAction
|
from authentik.core.api.used_by import DeleteAction
|
||||||
from authentik.core.models import User
|
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
||||||
from authentik.crypto.api import CertificateKeyPairSerializer
|
from authentik.crypto.api import CertificateKeyPairSerializer
|
||||||
from authentik.crypto.builder import CertificateBuilder
|
from authentik.crypto.builder import CertificateBuilder
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
from authentik.flows.models import Flow
|
from authentik.crypto.tasks import MANAGED_DISCOVERED, certificate_discovery
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.generators import generate_key
|
from authentik.lib.generators import generate_key
|
||||||
from authentik.providers.oauth2.models import OAuth2Provider
|
from authentik.providers.oauth2.models import OAuth2Provider
|
||||||
|
|
||||||
|
|
||||||
class TestCrypto(TestCase):
|
class TestCrypto(APITestCase):
|
||||||
"""Test Crypto validation"""
|
"""Test Crypto validation"""
|
||||||
|
|
||||||
|
def test_model_private(self):
|
||||||
|
"""Test model private key"""
|
||||||
|
cert = CertificateKeyPair.objects.create(
|
||||||
|
name="test",
|
||||||
|
certificate_data="foo",
|
||||||
|
key_data="foo",
|
||||||
|
)
|
||||||
|
self.assertIsNone(cert.private_key)
|
||||||
|
|
||||||
def test_serializer(self):
|
def test_serializer(self):
|
||||||
"""Test API Validation"""
|
"""Test API Validation"""
|
||||||
keypair = CertificateKeyPair.objects.first()
|
keypair = create_test_cert()
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
CertificateKeyPairSerializer(
|
CertificateKeyPairSerializer(
|
||||||
data={
|
data={
|
||||||
@ -54,10 +66,38 @@ class TestCrypto(TestCase):
|
|||||||
self.assertEqual(instance.name, "test-cert")
|
self.assertEqual(instance.name, "test-cert")
|
||||||
self.assertEqual((instance.certificate.not_valid_after - now).days, 2)
|
self.assertEqual((instance.certificate.not_valid_after - now).days, 2)
|
||||||
|
|
||||||
|
def test_builder_api(self):
|
||||||
|
"""Test Builder (via API)"""
|
||||||
|
self.client.force_login(create_test_admin_user())
|
||||||
|
self.client.post(
|
||||||
|
reverse("authentik_api:certificatekeypair-generate"),
|
||||||
|
data={"common_name": "foo", "subject_alt_name": "bar,baz", "validity_days": 3},
|
||||||
|
)
|
||||||
|
self.assertTrue(CertificateKeyPair.objects.filter(name="foo").exists())
|
||||||
|
|
||||||
|
def test_builder_api_invalid(self):
|
||||||
|
"""Test Builder (via API) (invalid)"""
|
||||||
|
self.client.force_login(create_test_admin_user())
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_api:certificatekeypair-generate"),
|
||||||
|
data={},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
|
||||||
|
def test_list(self):
|
||||||
|
"""Test API List"""
|
||||||
|
self.client.force_login(create_test_admin_user())
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:certificatekeypair-list",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(200, response.status_code)
|
||||||
|
|
||||||
def test_certificate_download(self):
|
def test_certificate_download(self):
|
||||||
"""Test certificate export (download)"""
|
"""Test certificate export (download)"""
|
||||||
self.client.force_login(User.objects.get(username="akadmin"))
|
self.client.force_login(create_test_admin_user())
|
||||||
keypair = CertificateKeyPair.objects.first()
|
keypair = create_test_cert()
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
reverse(
|
reverse(
|
||||||
"authentik_api:certificatekeypair-view-certificate",
|
"authentik_api:certificatekeypair-view-certificate",
|
||||||
@ -77,8 +117,8 @@ class TestCrypto(TestCase):
|
|||||||
|
|
||||||
def test_private_key_download(self):
|
def test_private_key_download(self):
|
||||||
"""Test private_key export (download)"""
|
"""Test private_key export (download)"""
|
||||||
self.client.force_login(User.objects.get(username="akadmin"))
|
self.client.force_login(create_test_admin_user())
|
||||||
keypair = CertificateKeyPair.objects.first()
|
keypair = create_test_cert()
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
reverse(
|
reverse(
|
||||||
"authentik_api:certificatekeypair-view-private-key",
|
"authentik_api:certificatekeypair-view-private-key",
|
||||||
@ -98,15 +138,15 @@ class TestCrypto(TestCase):
|
|||||||
|
|
||||||
def test_used_by(self):
|
def test_used_by(self):
|
||||||
"""Test used_by endpoint"""
|
"""Test used_by endpoint"""
|
||||||
self.client.force_login(User.objects.get(username="akadmin"))
|
self.client.force_login(create_test_admin_user())
|
||||||
keypair = CertificateKeyPair.objects.first()
|
keypair = create_test_cert()
|
||||||
provider = OAuth2Provider.objects.create(
|
provider = OAuth2Provider.objects.create(
|
||||||
name="test",
|
name="test",
|
||||||
client_id="test",
|
client_id="test",
|
||||||
client_secret=generate_key(),
|
client_secret=generate_key(),
|
||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=create_test_flow(),
|
||||||
redirect_uris="http://localhost",
|
redirect_uris="http://localhost",
|
||||||
rsa_key=CertificateKeyPair.objects.first(),
|
rsa_key=keypair,
|
||||||
)
|
)
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
reverse(
|
reverse(
|
||||||
@ -127,3 +167,36 @@ class TestCrypto(TestCase):
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_discovery(self):
|
||||||
|
"""Test certificate discovery"""
|
||||||
|
builder = CertificateBuilder()
|
||||||
|
builder.common_name = "test-cert"
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
builder.save()
|
||||||
|
builder.build(
|
||||||
|
subject_alt_names=[],
|
||||||
|
validity_days=3,
|
||||||
|
)
|
||||||
|
with TemporaryDirectory() as temp_dir:
|
||||||
|
with open(f"{temp_dir}/foo.pem", "w+", encoding="utf-8") as _cert:
|
||||||
|
_cert.write(builder.certificate)
|
||||||
|
with open(f"{temp_dir}/foo.key", "w+", encoding="utf-8") as _key:
|
||||||
|
_key.write(builder.private_key)
|
||||||
|
makedirs(f"{temp_dir}/foo.bar", exist_ok=True)
|
||||||
|
with open(f"{temp_dir}/foo.bar/fullchain.pem", "w+", encoding="utf-8") as _cert:
|
||||||
|
_cert.write(builder.certificate)
|
||||||
|
with open(f"{temp_dir}/foo.bar/privkey.pem", "w+", encoding="utf-8") as _key:
|
||||||
|
_key.write(builder.private_key)
|
||||||
|
with CONFIG.patch("cert_discovery_dir", temp_dir):
|
||||||
|
# pyright: reportGeneralTypeIssues=false
|
||||||
|
certificate_discovery() # pylint: disable=no-value-for-parameter
|
||||||
|
keypair: CertificateKeyPair = CertificateKeyPair.objects.filter(
|
||||||
|
managed=MANAGED_DISCOVERED % "foo"
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(keypair)
|
||||||
|
self.assertIsNotNone(keypair.certificate)
|
||||||
|
self.assertIsNotNone(keypair.private_key)
|
||||||
|
self.assertTrue(
|
||||||
|
CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % "foo.bar").exists()
|
||||||
|
)
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
"""Events API Views"""
|
"""Events API Views"""
|
||||||
|
from json import loads
|
||||||
|
|
||||||
import django_filters
|
import django_filters
|
||||||
from django.db.models.aggregates import Count
|
from django.db.models.aggregates import Count
|
||||||
from django.db.models.fields.json import KeyTextTransform
|
from django.db.models.fields.json import KeyTextTransform
|
||||||
@ -12,6 +14,7 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.admin.api.metrics import CoordinateSerializer
|
||||||
from authentik.core.api.utils import PassiveSerializer, TypeCreateSerializer
|
from authentik.core.api.utils import PassiveSerializer, TypeCreateSerializer
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
|
||||||
@ -110,13 +113,20 @@ class EventViewSet(ModelViewSet):
|
|||||||
@extend_schema(
|
@extend_schema(
|
||||||
methods=["GET"],
|
methods=["GET"],
|
||||||
responses={200: EventTopPerUserSerializer(many=True)},
|
responses={200: EventTopPerUserSerializer(many=True)},
|
||||||
|
filters=[],
|
||||||
parameters=[
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
"action",
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
required=False,
|
||||||
|
),
|
||||||
OpenApiParameter(
|
OpenApiParameter(
|
||||||
"top_n",
|
"top_n",
|
||||||
type=OpenApiTypes.INT,
|
type=OpenApiTypes.INT,
|
||||||
location=OpenApiParameter.QUERY,
|
location=OpenApiParameter.QUERY,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@action(detail=False, methods=["GET"], pagination_class=None)
|
@action(detail=False, methods=["GET"], pagination_class=None)
|
||||||
@ -137,6 +147,40 @@ class EventViewSet(ModelViewSet):
|
|||||||
.order_by("-counted_events")[:top_n]
|
.order_by("-counted_events")[:top_n]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
methods=["GET"],
|
||||||
|
responses={200: CoordinateSerializer(many=True)},
|
||||||
|
filters=[],
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
"action",
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
required=False,
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
"query",
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
required=False,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@action(detail=False, methods=["GET"], pagination_class=None)
|
||||||
|
def per_month(self, request: Request):
|
||||||
|
"""Get the count of events per month"""
|
||||||
|
filtered_action = request.query_params.get("action", EventAction.LOGIN)
|
||||||
|
try:
|
||||||
|
query = loads(request.query_params.get("query", "{}"))
|
||||||
|
except ValueError:
|
||||||
|
return Response(status=400)
|
||||||
|
return Response(
|
||||||
|
get_objects_for_user(request.user, "authentik_events.view_event")
|
||||||
|
.filter(action=filtered_action)
|
||||||
|
.filter(**query)
|
||||||
|
.get_events_per_day()
|
||||||
|
)
|
||||||
|
|
||||||
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
||||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||||
def actions(self, request: Request) -> Response:
|
def actions(self, request: Request) -> Response:
|
||||||
|
@ -7,6 +7,7 @@ from typing import Optional, TypedDict
|
|||||||
from geoip2.database import Reader
|
from geoip2.database import Reader
|
||||||
from geoip2.errors import GeoIP2Error
|
from geoip2.errors import GeoIP2Error
|
||||||
from geoip2.models import City
|
from geoip2.models import City
|
||||||
|
from sentry_sdk.hub import Hub
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
@ -62,13 +63,17 @@ class GeoIPReader:
|
|||||||
|
|
||||||
def city(self, ip_address: str) -> Optional[City]:
|
def city(self, ip_address: str) -> Optional[City]:
|
||||||
"""Wrapper for Reader.city"""
|
"""Wrapper for Reader.city"""
|
||||||
if not self.enabled:
|
with Hub.current.start_span(
|
||||||
return None
|
op="authentik.events.geo.city",
|
||||||
self.__check_expired()
|
description=ip_address,
|
||||||
try:
|
):
|
||||||
return self.__reader.city(ip_address)
|
if not self.enabled:
|
||||||
except (GeoIP2Error, ValueError):
|
return None
|
||||||
return None
|
self.__check_expired()
|
||||||
|
try:
|
||||||
|
return self.__reader.city(ip_address)
|
||||||
|
except (GeoIP2Error, ValueError):
|
||||||
|
return None
|
||||||
|
|
||||||
def city_dict(self, ip_address: str) -> Optional[GeoIPDict]:
|
def city_dict(self, ip_address: str) -> Optional[GeoIPDict]:
|
||||||
"""Wrapper for self.city that returns a dict"""
|
"""Wrapper for self.city that returns a dict"""
|
||||||
|
@ -7,16 +7,25 @@ from django.core.exceptions import SuspiciousOperation
|
|||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.signals import post_save, pre_delete
|
from django.db.models.signals import post_save, pre_delete
|
||||||
from django.http import HttpRequest, HttpResponse
|
from django.http import HttpRequest, HttpResponse
|
||||||
|
from django_otp.plugins.otp_static.models import StaticToken
|
||||||
from guardian.models import UserObjectPermission
|
from guardian.models import UserObjectPermission
|
||||||
|
|
||||||
from authentik.core.middleware import LOCAL
|
from authentik.core.middleware import LOCAL
|
||||||
from authentik.core.models import User
|
from authentik.core.models import AuthenticatedSession, User
|
||||||
from authentik.events.models import Event, EventAction, Notification
|
from authentik.events.models import Event, EventAction, Notification
|
||||||
from authentik.events.signals import EventNewThread
|
from authentik.events.signals import EventNewThread
|
||||||
from authentik.events.utils import model_to_dict
|
from authentik.events.utils import model_to_dict
|
||||||
from authentik.lib.sentry import before_send
|
from authentik.lib.sentry import before_send
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
|
|
||||||
|
IGNORED_MODELS = (
|
||||||
|
Event,
|
||||||
|
Notification,
|
||||||
|
UserObjectPermission,
|
||||||
|
AuthenticatedSession,
|
||||||
|
StaticToken,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class AuditMiddleware:
|
class AuditMiddleware:
|
||||||
"""Register handlers for duration of request-response that log creation/update/deletion
|
"""Register handlers for duration of request-response that log creation/update/deletion
|
||||||
@ -82,7 +91,7 @@ class AuditMiddleware:
|
|||||||
user: User, request: HttpRequest, sender, instance: Model, created: bool, **_
|
user: User, request: HttpRequest, sender, instance: Model, created: bool, **_
|
||||||
):
|
):
|
||||||
"""Signal handler for all object's post_save"""
|
"""Signal handler for all object's post_save"""
|
||||||
if isinstance(instance, (Event, Notification, UserObjectPermission)):
|
if isinstance(instance, IGNORED_MODELS):
|
||||||
return
|
return
|
||||||
|
|
||||||
action = EventAction.MODEL_CREATED if created else EventAction.MODEL_UPDATED
|
action = EventAction.MODEL_CREATED if created else EventAction.MODEL_UPDATED
|
||||||
@ -92,7 +101,7 @@ class AuditMiddleware:
|
|||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def pre_delete_handler(user: User, request: HttpRequest, sender, instance: Model, **_):
|
def pre_delete_handler(user: User, request: HttpRequest, sender, instance: Model, **_):
|
||||||
"""Signal handler for all object's pre_delete"""
|
"""Signal handler for all object's pre_delete"""
|
||||||
if isinstance(instance, (Event, Notification, UserObjectPermission)): # pragma: no cover
|
if isinstance(instance, IGNORED_MODELS): # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
EventNewThread(
|
EventNewThread(
|
||||||
|
@ -4,7 +4,6 @@ import uuid
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Iterable
|
from typing import Iterable
|
||||||
|
|
||||||
import django.core.validators
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
from django.apps.registry import Apps
|
from django.apps.registry import Apps
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@ -12,6 +11,7 @@ from django.db import migrations, models
|
|||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
import authentik.events.models
|
import authentik.events.models
|
||||||
|
import authentik.lib.models
|
||||||
from authentik.events.models import EventAction, NotificationSeverity, TransportMode
|
from authentik.events.models import EventAction, NotificationSeverity, TransportMode
|
||||||
|
|
||||||
|
|
||||||
@ -314,169 +314,10 @@ class Migration(migrations.Migration):
|
|||||||
old_name="user_json",
|
old_name="user_json",
|
||||||
new_name="user",
|
new_name="user",
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
|
||||||
model_name="event",
|
|
||||||
name="action",
|
|
||||||
field=models.TextField(
|
|
||||||
choices=[
|
|
||||||
("login", "Login"),
|
|
||||||
("login_failed", "Login Failed"),
|
|
||||||
("logout", "Logout"),
|
|
||||||
("sign_up", "Sign Up"),
|
|
||||||
("authorize_application", "Authorize Application"),
|
|
||||||
("suspicious_request", "Suspicious Request"),
|
|
||||||
("password_set", "Password Set"),
|
|
||||||
("invitation_created", "Invite Created"),
|
|
||||||
("invitation_used", "Invite Used"),
|
|
||||||
("source_linked", "Source Linked"),
|
|
||||||
("impersonation_started", "Impersonation Started"),
|
|
||||||
("impersonation_ended", "Impersonation Ended"),
|
|
||||||
("model_created", "Model Created"),
|
|
||||||
("model_updated", "Model Updated"),
|
|
||||||
("model_deleted", "Model Deleted"),
|
|
||||||
("custom_", "Custom Prefix"),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="event",
|
|
||||||
name="action",
|
|
||||||
field=models.TextField(
|
|
||||||
choices=[
|
|
||||||
("login", "Login"),
|
|
||||||
("login_failed", "Login Failed"),
|
|
||||||
("logout", "Logout"),
|
|
||||||
("user_write", "User Write"),
|
|
||||||
("suspicious_request", "Suspicious Request"),
|
|
||||||
("password_set", "Password Set"),
|
|
||||||
("invitation_created", "Invite Created"),
|
|
||||||
("invitation_used", "Invite Used"),
|
|
||||||
("authorize_application", "Authorize Application"),
|
|
||||||
("source_linked", "Source Linked"),
|
|
||||||
("impersonation_started", "Impersonation Started"),
|
|
||||||
("impersonation_ended", "Impersonation Ended"),
|
|
||||||
("model_created", "Model Created"),
|
|
||||||
("model_updated", "Model Updated"),
|
|
||||||
("model_deleted", "Model Deleted"),
|
|
||||||
("custom_", "Custom Prefix"),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name="event",
|
model_name="event",
|
||||||
name="date",
|
name="date",
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
|
||||||
model_name="event",
|
|
||||||
name="action",
|
|
||||||
field=models.TextField(
|
|
||||||
choices=[
|
|
||||||
("login", "Login"),
|
|
||||||
("login_failed", "Login Failed"),
|
|
||||||
("logout", "Logout"),
|
|
||||||
("user_write", "User Write"),
|
|
||||||
("suspicious_request", "Suspicious Request"),
|
|
||||||
("password_set", "Password Set"),
|
|
||||||
("token_view", "Token View"),
|
|
||||||
("invitation_created", "Invite Created"),
|
|
||||||
("invitation_used", "Invite Used"),
|
|
||||||
("authorize_application", "Authorize Application"),
|
|
||||||
("source_linked", "Source Linked"),
|
|
||||||
("impersonation_started", "Impersonation Started"),
|
|
||||||
("impersonation_ended", "Impersonation Ended"),
|
|
||||||
("model_created", "Model Created"),
|
|
||||||
("model_updated", "Model Updated"),
|
|
||||||
("model_deleted", "Model Deleted"),
|
|
||||||
("custom_", "Custom Prefix"),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="event",
|
|
||||||
name="action",
|
|
||||||
field=models.TextField(
|
|
||||||
choices=[
|
|
||||||
("login", "Login"),
|
|
||||||
("login_failed", "Login Failed"),
|
|
||||||
("logout", "Logout"),
|
|
||||||
("user_write", "User Write"),
|
|
||||||
("suspicious_request", "Suspicious Request"),
|
|
||||||
("password_set", "Password Set"),
|
|
||||||
("token_view", "Token View"),
|
|
||||||
("invitation_created", "Invite Created"),
|
|
||||||
("invitation_used", "Invite Used"),
|
|
||||||
("authorize_application", "Authorize Application"),
|
|
||||||
("source_linked", "Source Linked"),
|
|
||||||
("impersonation_started", "Impersonation Started"),
|
|
||||||
("impersonation_ended", "Impersonation Ended"),
|
|
||||||
("policy_execution", "Policy Execution"),
|
|
||||||
("policy_exception", "Policy Exception"),
|
|
||||||
("property_mapping_exception", "Property Mapping Exception"),
|
|
||||||
("model_created", "Model Created"),
|
|
||||||
("model_updated", "Model Updated"),
|
|
||||||
("model_deleted", "Model Deleted"),
|
|
||||||
("custom_", "Custom Prefix"),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="event",
|
|
||||||
name="action",
|
|
||||||
field=models.TextField(
|
|
||||||
choices=[
|
|
||||||
("login", "Login"),
|
|
||||||
("login_failed", "Login Failed"),
|
|
||||||
("logout", "Logout"),
|
|
||||||
("user_write", "User Write"),
|
|
||||||
("suspicious_request", "Suspicious Request"),
|
|
||||||
("password_set", "Password Set"),
|
|
||||||
("token_view", "Token View"),
|
|
||||||
("invitation_created", "Invite Created"),
|
|
||||||
("invitation_used", "Invite Used"),
|
|
||||||
("authorize_application", "Authorize Application"),
|
|
||||||
("source_linked", "Source Linked"),
|
|
||||||
("impersonation_started", "Impersonation Started"),
|
|
||||||
("impersonation_ended", "Impersonation Ended"),
|
|
||||||
("policy_execution", "Policy Execution"),
|
|
||||||
("policy_exception", "Policy Exception"),
|
|
||||||
("property_mapping_exception", "Property Mapping Exception"),
|
|
||||||
("model_created", "Model Created"),
|
|
||||||
("model_updated", "Model Updated"),
|
|
||||||
("model_deleted", "Model Deleted"),
|
|
||||||
("update_available", "Update Available"),
|
|
||||||
("custom_", "Custom Prefix"),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="event",
|
|
||||||
name="action",
|
|
||||||
field=models.TextField(
|
|
||||||
choices=[
|
|
||||||
("login", "Login"),
|
|
||||||
("login_failed", "Login Failed"),
|
|
||||||
("logout", "Logout"),
|
|
||||||
("user_write", "User Write"),
|
|
||||||
("suspicious_request", "Suspicious Request"),
|
|
||||||
("password_set", "Password Set"),
|
|
||||||
("token_view", "Token View"),
|
|
||||||
("invitation_used", "Invite Used"),
|
|
||||||
("authorize_application", "Authorize Application"),
|
|
||||||
("source_linked", "Source Linked"),
|
|
||||||
("impersonation_started", "Impersonation Started"),
|
|
||||||
("impersonation_ended", "Impersonation Ended"),
|
|
||||||
("policy_execution", "Policy Execution"),
|
|
||||||
("policy_exception", "Policy Exception"),
|
|
||||||
("property_mapping_exception", "Property Mapping Exception"),
|
|
||||||
("configuration_error", "Configuration Error"),
|
|
||||||
("model_created", "Model Created"),
|
|
||||||
("model_updated", "Model Updated"),
|
|
||||||
("model_deleted", "Model Deleted"),
|
|
||||||
("update_available", "Update Available"),
|
|
||||||
("custom_", "Custom Prefix"),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name="NotificationTransport",
|
name="NotificationTransport",
|
||||||
fields=[
|
fields=[
|
||||||
@ -610,68 +451,6 @@ class Migration(migrations.Migration):
|
|||||||
help_text="Only send notification once, for example when sending a webhook into a chat channel.",
|
help_text="Only send notification once, for example when sending a webhook into a chat channel.",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
|
||||||
model_name="event",
|
|
||||||
name="action",
|
|
||||||
field=models.TextField(
|
|
||||||
choices=[
|
|
||||||
("login", "Login"),
|
|
||||||
("login_failed", "Login Failed"),
|
|
||||||
("logout", "Logout"),
|
|
||||||
("user_write", "User Write"),
|
|
||||||
("suspicious_request", "Suspicious Request"),
|
|
||||||
("password_set", "Password Set"),
|
|
||||||
("token_view", "Token View"),
|
|
||||||
("invitation_used", "Invite Used"),
|
|
||||||
("authorize_application", "Authorize Application"),
|
|
||||||
("source_linked", "Source Linked"),
|
|
||||||
("impersonation_started", "Impersonation Started"),
|
|
||||||
("impersonation_ended", "Impersonation Ended"),
|
|
||||||
("policy_execution", "Policy Execution"),
|
|
||||||
("policy_exception", "Policy Exception"),
|
|
||||||
("property_mapping_exception", "Property Mapping Exception"),
|
|
||||||
("system_task_execution", "System Task Execution"),
|
|
||||||
("system_task_exception", "System Task Exception"),
|
|
||||||
("configuration_error", "Configuration Error"),
|
|
||||||
("model_created", "Model Created"),
|
|
||||||
("model_updated", "Model Updated"),
|
|
||||||
("model_deleted", "Model Deleted"),
|
|
||||||
("update_available", "Update Available"),
|
|
||||||
("custom_", "Custom Prefix"),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="event",
|
|
||||||
name="action",
|
|
||||||
field=models.TextField(
|
|
||||||
choices=[
|
|
||||||
("login", "Login"),
|
|
||||||
("login_failed", "Login Failed"),
|
|
||||||
("logout", "Logout"),
|
|
||||||
("user_write", "User Write"),
|
|
||||||
("suspicious_request", "Suspicious Request"),
|
|
||||||
("password_set", "Password Set"),
|
|
||||||
("secret_view", "Secret View"),
|
|
||||||
("invitation_used", "Invite Used"),
|
|
||||||
("authorize_application", "Authorize Application"),
|
|
||||||
("source_linked", "Source Linked"),
|
|
||||||
("impersonation_started", "Impersonation Started"),
|
|
||||||
("impersonation_ended", "Impersonation Ended"),
|
|
||||||
("policy_execution", "Policy Execution"),
|
|
||||||
("policy_exception", "Policy Exception"),
|
|
||||||
("property_mapping_exception", "Property Mapping Exception"),
|
|
||||||
("system_task_execution", "System Task Execution"),
|
|
||||||
("system_task_exception", "System Task Exception"),
|
|
||||||
("configuration_error", "Configuration Error"),
|
|
||||||
("model_created", "Model Created"),
|
|
||||||
("model_updated", "Model Updated"),
|
|
||||||
("model_deleted", "Model Deleted"),
|
|
||||||
("update_available", "Update Available"),
|
|
||||||
("custom_", "Custom Prefix"),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.RunPython(
|
migrations.RunPython(
|
||||||
code=token_view_to_secret_view,
|
code=token_view_to_secret_view,
|
||||||
),
|
),
|
||||||
@ -688,76 +467,11 @@ class Migration(migrations.Migration):
|
|||||||
migrations.RunPython(
|
migrations.RunPython(
|
||||||
code=update_expires,
|
code=update_expires,
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
|
||||||
model_name="event",
|
|
||||||
name="action",
|
|
||||||
field=models.TextField(
|
|
||||||
choices=[
|
|
||||||
("login", "Login"),
|
|
||||||
("login_failed", "Login Failed"),
|
|
||||||
("logout", "Logout"),
|
|
||||||
("user_write", "User Write"),
|
|
||||||
("suspicious_request", "Suspicious Request"),
|
|
||||||
("password_set", "Password Set"),
|
|
||||||
("secret_view", "Secret View"),
|
|
||||||
("invitation_used", "Invite Used"),
|
|
||||||
("authorize_application", "Authorize Application"),
|
|
||||||
("source_linked", "Source Linked"),
|
|
||||||
("impersonation_started", "Impersonation Started"),
|
|
||||||
("impersonation_ended", "Impersonation Ended"),
|
|
||||||
("policy_execution", "Policy Execution"),
|
|
||||||
("policy_exception", "Policy Exception"),
|
|
||||||
("property_mapping_exception", "Property Mapping Exception"),
|
|
||||||
("system_task_execution", "System Task Execution"),
|
|
||||||
("system_task_exception", "System Task Exception"),
|
|
||||||
("configuration_error", "Configuration Error"),
|
|
||||||
("model_created", "Model Created"),
|
|
||||||
("model_updated", "Model Updated"),
|
|
||||||
("model_deleted", "Model Deleted"),
|
|
||||||
("email_sent", "Email Sent"),
|
|
||||||
("update_available", "Update Available"),
|
|
||||||
("custom_", "Custom Prefix"),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="event",
|
model_name="event",
|
||||||
name="tenant",
|
name="tenant",
|
||||||
field=models.JSONField(blank=True, default=authentik.events.models.default_tenant),
|
field=models.JSONField(blank=True, default=authentik.events.models.default_tenant),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
|
||||||
model_name="event",
|
|
||||||
name="action",
|
|
||||||
field=models.TextField(
|
|
||||||
choices=[
|
|
||||||
("login", "Login"),
|
|
||||||
("login_failed", "Login Failed"),
|
|
||||||
("logout", "Logout"),
|
|
||||||
("user_write", "User Write"),
|
|
||||||
("suspicious_request", "Suspicious Request"),
|
|
||||||
("password_set", "Password Set"),
|
|
||||||
("secret_view", "Secret View"),
|
|
||||||
("invitation_used", "Invite Used"),
|
|
||||||
("authorize_application", "Authorize Application"),
|
|
||||||
("source_linked", "Source Linked"),
|
|
||||||
("impersonation_started", "Impersonation Started"),
|
|
||||||
("impersonation_ended", "Impersonation Ended"),
|
|
||||||
("policy_execution", "Policy Execution"),
|
|
||||||
("policy_exception", "Policy Exception"),
|
|
||||||
("property_mapping_exception", "Property Mapping Exception"),
|
|
||||||
("system_task_execution", "System Task Execution"),
|
|
||||||
("system_task_exception", "System Task Exception"),
|
|
||||||
("system_exception", "System Exception"),
|
|
||||||
("configuration_error", "Configuration Error"),
|
|
||||||
("model_created", "Model Created"),
|
|
||||||
("model_updated", "Model Updated"),
|
|
||||||
("model_deleted", "Model Deleted"),
|
|
||||||
("email_sent", "Email Sent"),
|
|
||||||
("update_available", "Update Available"),
|
|
||||||
("custom_", "Custom Prefix"),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="event",
|
model_name="event",
|
||||||
name="action",
|
name="action",
|
||||||
@ -776,6 +490,7 @@ class Migration(migrations.Migration):
|
|||||||
("source_linked", "Source Linked"),
|
("source_linked", "Source Linked"),
|
||||||
("impersonation_started", "Impersonation Started"),
|
("impersonation_started", "Impersonation Started"),
|
||||||
("impersonation_ended", "Impersonation Ended"),
|
("impersonation_ended", "Impersonation Ended"),
|
||||||
|
("flow_execution", "Flow Execution"),
|
||||||
("policy_execution", "Policy Execution"),
|
("policy_execution", "Policy Execution"),
|
||||||
("policy_exception", "Policy Exception"),
|
("policy_exception", "Policy Exception"),
|
||||||
("property_mapping_exception", "Property Mapping Exception"),
|
("property_mapping_exception", "Property Mapping Exception"),
|
||||||
@ -826,6 +541,8 @@ class Migration(migrations.Migration):
|
|||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="notificationtransport",
|
model_name="notificationtransport",
|
||||||
name="webhook_url",
|
name="webhook_url",
|
||||||
field=models.TextField(blank=True, validators=[django.core.validators.URLValidator()]),
|
field=models.TextField(
|
||||||
|
blank=True, validators=[authentik.lib.models.DomainlessURLValidator()]
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
# Generated by Django 3.2.7 on 2021-10-04 15:31
|
# Generated by Django 3.2.7 on 2021-10-04 15:31
|
||||||
|
|
||||||
import django.core.validators
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
import authentik.lib.models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
@ -14,6 +15,8 @@ class Migration(migrations.Migration):
|
|||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="notificationtransport",
|
model_name="notificationtransport",
|
||||||
name="webhook_url",
|
name="webhook_url",
|
||||||
field=models.TextField(blank=True, validators=[django.core.validators.URLValidator()]),
|
field=models.TextField(
|
||||||
|
blank=True, validators=[authentik.lib.models.DomainlessURLValidator()]
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,13 +1,20 @@
|
|||||||
"""authentik events models"""
|
"""authentik events models"""
|
||||||
|
import time
|
||||||
|
from collections import Counter
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from inspect import getmodule, stack
|
from inspect import currentframe
|
||||||
from smtplib import SMTPException
|
from smtplib import SMTPException
|
||||||
from typing import TYPE_CHECKING, Optional, Type, Union
|
from typing import TYPE_CHECKING, Optional, Type, Union
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.validators import URLValidator
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
from django.db.models import Count, ExpressionWrapper, F
|
||||||
|
from django.db.models.fields import DurationField
|
||||||
|
from django.db.models.functions import ExtractHour
|
||||||
|
from django.db.models.functions.datetime import ExtractDay
|
||||||
|
from django.db.models.manager import Manager
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from django.http.request import QueryDict
|
from django.http.request import QueryDict
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
@ -20,6 +27,7 @@ from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION
|
|||||||
from authentik.core.models import ExpiringModel, Group, PropertyMapping, User
|
from authentik.core.models import ExpiringModel, Group, PropertyMapping, User
|
||||||
from authentik.events.geo import GEOIP_READER
|
from authentik.events.geo import GEOIP_READER
|
||||||
from authentik.events.utils import cleanse_dict, get_user, model_to_dict, sanitize_dict
|
from authentik.events.utils import cleanse_dict, get_user, model_to_dict, sanitize_dict
|
||||||
|
from authentik.lib.models import DomainlessURLValidator
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
from authentik.lib.utils.http import get_client_ip, get_http_session
|
from authentik.lib.utils.http import get_client_ip, get_http_session
|
||||||
from authentik.lib.utils.time import timedelta_from_string
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
@ -70,6 +78,7 @@ class EventAction(models.TextChoices):
|
|||||||
IMPERSONATION_STARTED = "impersonation_started"
|
IMPERSONATION_STARTED = "impersonation_started"
|
||||||
IMPERSONATION_ENDED = "impersonation_ended"
|
IMPERSONATION_ENDED = "impersonation_ended"
|
||||||
|
|
||||||
|
FLOW_EXECUTION = "flow_execution"
|
||||||
POLICY_EXECUTION = "policy_execution"
|
POLICY_EXECUTION = "policy_execution"
|
||||||
POLICY_EXCEPTION = "policy_exception"
|
POLICY_EXCEPTION = "policy_exception"
|
||||||
PROPERTY_MAPPING_EXCEPTION = "property_mapping_exception"
|
PROPERTY_MAPPING_EXCEPTION = "property_mapping_exception"
|
||||||
@ -90,6 +99,72 @@ class EventAction(models.TextChoices):
|
|||||||
CUSTOM_PREFIX = "custom_"
|
CUSTOM_PREFIX = "custom_"
|
||||||
|
|
||||||
|
|
||||||
|
class EventQuerySet(QuerySet):
|
||||||
|
"""Custom events query set with helper functions"""
|
||||||
|
|
||||||
|
def get_events_per_hour(self) -> list[dict[str, int]]:
|
||||||
|
"""Get event count by hour in the last day, fill with zeros"""
|
||||||
|
date_from = now() - timedelta(days=1)
|
||||||
|
result = (
|
||||||
|
self.filter(created__gte=date_from)
|
||||||
|
.annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField()))
|
||||||
|
.annotate(age_hours=ExtractHour("age"))
|
||||||
|
.values("age_hours")
|
||||||
|
.annotate(count=Count("pk"))
|
||||||
|
.order_by("age_hours")
|
||||||
|
)
|
||||||
|
data = Counter({int(d["age_hours"]): d["count"] for d in result})
|
||||||
|
results = []
|
||||||
|
_now = now()
|
||||||
|
for hour in range(0, -24, -1):
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
"x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple()) * 1000,
|
||||||
|
"y_cord": data[hour * -1],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return results
|
||||||
|
|
||||||
|
def get_events_per_day(self) -> list[dict[str, int]]:
|
||||||
|
"""Get event count by hour in the last day, fill with zeros"""
|
||||||
|
date_from = now() - timedelta(weeks=4)
|
||||||
|
result = (
|
||||||
|
self.filter(created__gte=date_from)
|
||||||
|
.annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField()))
|
||||||
|
.annotate(age_days=ExtractDay("age"))
|
||||||
|
.values("age_days")
|
||||||
|
.annotate(count=Count("pk"))
|
||||||
|
.order_by("age_days")
|
||||||
|
)
|
||||||
|
data = Counter({int(d["age_days"]): d["count"] for d in result})
|
||||||
|
results = []
|
||||||
|
_now = now()
|
||||||
|
for day in range(0, -30, -1):
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
"x_cord": time.mktime((_now + timedelta(days=day)).timetuple()) * 1000,
|
||||||
|
"y_cord": data[day * -1],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
class EventManager(Manager):
|
||||||
|
"""Custom helper methods for Events"""
|
||||||
|
|
||||||
|
def get_queryset(self) -> QuerySet:
|
||||||
|
"""use custom queryset"""
|
||||||
|
return EventQuerySet(self.model, using=self._db)
|
||||||
|
|
||||||
|
def get_events_per_hour(self) -> list[dict[str, int]]:
|
||||||
|
"""Wrap method from queryset"""
|
||||||
|
return self.get_queryset().get_events_per_hour()
|
||||||
|
|
||||||
|
def get_events_per_day(self) -> list[dict[str, int]]:
|
||||||
|
"""Wrap method from queryset"""
|
||||||
|
return self.get_queryset().get_events_per_day()
|
||||||
|
|
||||||
|
|
||||||
class Event(ExpiringModel):
|
class Event(ExpiringModel):
|
||||||
"""An individual Audit/Metrics/Notification/Error Event"""
|
"""An individual Audit/Metrics/Notification/Error Event"""
|
||||||
|
|
||||||
@ -105,6 +180,8 @@ class Event(ExpiringModel):
|
|||||||
# Shadow the expires attribute from ExpiringModel to override the default duration
|
# Shadow the expires attribute from ExpiringModel to override the default duration
|
||||||
expires = models.DateTimeField(default=default_event_duration)
|
expires = models.DateTimeField(default=default_event_duration)
|
||||||
|
|
||||||
|
objects = EventManager()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_app_from_request(request: HttpRequest) -> str:
|
def _get_app_from_request(request: HttpRequest) -> str:
|
||||||
if not isinstance(request, HttpRequest):
|
if not isinstance(request, HttpRequest):
|
||||||
@ -115,14 +192,15 @@ class Event(ExpiringModel):
|
|||||||
def new(
|
def new(
|
||||||
action: Union[str, EventAction],
|
action: Union[str, EventAction],
|
||||||
app: Optional[str] = None,
|
app: Optional[str] = None,
|
||||||
_inspect_offset: int = 1,
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> "Event":
|
) -> "Event":
|
||||||
"""Create new Event instance from arguments. Instance is NOT saved."""
|
"""Create new Event instance from arguments. Instance is NOT saved."""
|
||||||
if not isinstance(action, EventAction):
|
if not isinstance(action, EventAction):
|
||||||
action = EventAction.CUSTOM_PREFIX + action
|
action = EventAction.CUSTOM_PREFIX + action
|
||||||
if not app:
|
if not app:
|
||||||
app = getmodule(stack()[_inspect_offset][0]).__name__
|
current = currentframe()
|
||||||
|
parent = current.f_back
|
||||||
|
app = parent.f_globals["__name__"]
|
||||||
cleaned_kwargs = cleanse_dict(sanitize_dict(kwargs))
|
cleaned_kwargs = cleanse_dict(sanitize_dict(kwargs))
|
||||||
event = Event(action=action, app=app, context=cleaned_kwargs)
|
event = Event(action=action, app=app, context=cleaned_kwargs)
|
||||||
return event
|
return event
|
||||||
@ -224,7 +302,7 @@ class NotificationTransport(models.Model):
|
|||||||
name = models.TextField(unique=True)
|
name = models.TextField(unique=True)
|
||||||
mode = models.TextField(choices=TransportMode.choices)
|
mode = models.TextField(choices=TransportMode.choices)
|
||||||
|
|
||||||
webhook_url = models.TextField(blank=True, validators=[URLValidator()])
|
webhook_url = models.TextField(blank=True, validators=[DomainlessURLValidator()])
|
||||||
webhook_mapping = models.ForeignKey(
|
webhook_mapping = models.ForeignKey(
|
||||||
"NotificationWebhookMapping", on_delete=models.SET_DEFAULT, null=True, default=None
|
"NotificationWebhookMapping", on_delete=models.SET_DEFAULT, null=True, default=None
|
||||||
)
|
)
|
||||||
|
@ -46,7 +46,7 @@ class TaskResult:
|
|||||||
|
|
||||||
def with_error(self, exc: Exception) -> "TaskResult":
|
def with_error(self, exc: Exception) -> "TaskResult":
|
||||||
"""Since errors might not always be pickle-able, set the traceback"""
|
"""Since errors might not always be pickle-able, set the traceback"""
|
||||||
self.messages.extend(exception_to_string(exc).splitlines())
|
self.messages.append(str(exc))
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
@ -112,30 +112,6 @@ class TaskInfo:
|
|||||||
cache.set(key, self, timeout=timeout_hours * 60 * 60)
|
cache.set(key, self, timeout=timeout_hours * 60 * 60)
|
||||||
|
|
||||||
|
|
||||||
def prefill_task():
|
|
||||||
"""Ensure a task's details are always in cache, so it can always be triggered via API"""
|
|
||||||
|
|
||||||
def inner_wrap(func):
|
|
||||||
status = TaskInfo.by_name(func.__name__)
|
|
||||||
if status:
|
|
||||||
return func
|
|
||||||
TaskInfo(
|
|
||||||
task_name=func.__name__,
|
|
||||||
task_description=func.__doc__,
|
|
||||||
result=TaskResult(TaskResultStatus.UNKNOWN, messages=[_("Task has not been run yet.")]),
|
|
||||||
task_call_module=func.__module__,
|
|
||||||
task_call_func=func.__name__,
|
|
||||||
# We don't have real values for these attributes but they cannot be null
|
|
||||||
start_timestamp=default_timer(),
|
|
||||||
finish_timestamp=default_timer(),
|
|
||||||
finish_time=datetime.now(),
|
|
||||||
).save(86400)
|
|
||||||
LOGGER.debug("prefilled task", task_name=func.__name__)
|
|
||||||
return func
|
|
||||||
|
|
||||||
return inner_wrap
|
|
||||||
|
|
||||||
|
|
||||||
class MonitoredTask(Task):
|
class MonitoredTask(Task):
|
||||||
"""Task which can save its state to the cache"""
|
"""Task which can save its state to the cache"""
|
||||||
|
|
||||||
@ -210,5 +186,21 @@ class MonitoredTask(Task):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
for task in TaskInfo.all().values():
|
def prefill_task(func):
|
||||||
task.set_prom_metrics()
|
"""Ensure a task's details are always in cache, so it can always be triggered via API"""
|
||||||
|
status = TaskInfo.by_name(func.__name__)
|
||||||
|
if status:
|
||||||
|
return func
|
||||||
|
TaskInfo(
|
||||||
|
task_name=func.__name__,
|
||||||
|
task_description=func.__doc__,
|
||||||
|
result=TaskResult(TaskResultStatus.UNKNOWN, messages=[_("Task has not been run yet.")]),
|
||||||
|
task_call_module=func.__module__,
|
||||||
|
task_call_func=func.__name__,
|
||||||
|
# We don't have real values for these attributes but they cannot be null
|
||||||
|
start_timestamp=default_timer(),
|
||||||
|
finish_timestamp=default_timer(),
|
||||||
|
finish_time=datetime.now(),
|
||||||
|
).save(86400)
|
||||||
|
LOGGER.debug("prefilled task", task_name=func.__name__)
|
||||||
|
return func
|
||||||
|
@ -3,14 +3,14 @@ from threading import Thread
|
|||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from django.contrib.auth.signals import user_logged_in, user_logged_out, user_login_failed
|
from django.contrib.auth.signals import user_logged_in, user_logged_out, user_login_failed
|
||||||
from django.db.models.signals import post_save
|
from django.db.models.signals import post_save, pre_delete
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.core.signals import password_changed
|
from authentik.core.signals import password_changed
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.events.tasks import event_notification_handler
|
from authentik.events.tasks import event_notification_handler, gdpr_cleanup
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_SOURCE, FlowPlan
|
from authentik.flows.planner import PLAN_CONTEXT_SOURCE, FlowPlan
|
||||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||||
from authentik.stages.invitation.models import Invitation
|
from authentik.stages.invitation.models import Invitation
|
||||||
@ -108,3 +108,10 @@ def on_password_changed(sender, user: User, password: str, **_):
|
|||||||
def event_post_save_notification(sender, instance: Event, **_):
|
def event_post_save_notification(sender, instance: Event, **_):
|
||||||
"""Start task to check if any policies trigger an notification on this event"""
|
"""Start task to check if any policies trigger an notification on this event"""
|
||||||
event_notification_handler.delay(instance.event_uuid.hex)
|
event_notification_handler.delay(instance.event_uuid.hex)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(pre_delete, sender=User)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def event_user_pre_delete_cleanup(sender, instance: User, **_):
|
||||||
|
"""If gdpr_compliance is enabled, remove all the user's events"""
|
||||||
|
gdpr_cleanup.delay(instance.pk)
|
||||||
|
@ -98,9 +98,19 @@ def notification_transport(self: MonitoredTask, notification_pk: int, transport_
|
|||||||
notification: Notification = Notification.objects.filter(pk=notification_pk).first()
|
notification: Notification = Notification.objects.filter(pk=notification_pk).first()
|
||||||
if not notification:
|
if not notification:
|
||||||
return
|
return
|
||||||
transport: NotificationTransport = NotificationTransport.objects.get(pk=transport_pk)
|
transport = NotificationTransport.objects.filter(pk=transport_pk).first()
|
||||||
|
if not transport:
|
||||||
|
return
|
||||||
transport.send(notification)
|
transport.send(notification)
|
||||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
|
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
|
||||||
except NotificationTransportError as exc:
|
except NotificationTransportError as exc:
|
||||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||||
raise exc
|
raise exc
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def gdpr_cleanup(user_pk: int):
|
||||||
|
"""cleanup events from gdpr_compliance"""
|
||||||
|
events = Event.objects.filter(user__pk=user_pk)
|
||||||
|
LOGGER.debug("GDPR cleanup, removing events from user", events=events.count())
|
||||||
|
events.delete()
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.events.models import (
|
from authentik.events.models import (
|
||||||
Event,
|
Event,
|
||||||
EventAction,
|
EventAction,
|
||||||
@ -17,7 +17,7 @@ class TestEventsAPI(APITestCase):
|
|||||||
"""Test Event API"""
|
"""Test Event API"""
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.user = User.objects.get(username="akadmin")
|
self.user = create_test_admin_user()
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
def test_top_n(self):
|
def test_top_n(self):
|
||||||
|
@ -3,7 +3,8 @@
|
|||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import Application, User
|
from authentik.core.models import Application
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
|
||||||
|
|
||||||
@ -12,7 +13,7 @@ class TestEventsMiddleware(APITestCase):
|
|||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.user = User.objects.get(username="akadmin")
|
self.user = create_test_admin_user()
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
def test_create(self):
|
def test_create(self):
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
"""Flow Stage API Views"""
|
"""Flow Stage API Views"""
|
||||||
from typing import Iterable
|
|
||||||
|
|
||||||
from django.urls.base import reverse
|
from django.urls.base import reverse
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from rest_framework import mixins
|
from rest_framework import mixins
|
||||||
@ -15,7 +13,7 @@ from authentik.core.api.used_by import UsedByMixin
|
|||||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
||||||
from authentik.core.types import UserSettingSerializer
|
from authentik.core.types import UserSettingSerializer
|
||||||
from authentik.flows.api.flows import FlowSerializer
|
from authentik.flows.api.flows import FlowSerializer
|
||||||
from authentik.flows.models import Stage
|
from authentik.flows.models import ConfigurableStage, Stage
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
from authentik.lib.utils.reflection import all_subclasses
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -43,6 +41,7 @@ class StageSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
"component",
|
"component",
|
||||||
"verbose_name",
|
"verbose_name",
|
||||||
"verbose_name_plural",
|
"verbose_name_plural",
|
||||||
|
"meta_model_name",
|
||||||
"flow_set",
|
"flow_set",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -86,10 +85,12 @@ class StageViewSet(
|
|||||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||||
def user_settings(self, request: Request) -> Response:
|
def user_settings(self, request: Request) -> Response:
|
||||||
"""Get all stages the user can configure"""
|
"""Get all stages the user can configure"""
|
||||||
_all_stages: Iterable[Stage] = Stage.objects.all().select_subclasses().order_by("name")
|
stages = []
|
||||||
|
for configurable_stage in all_subclasses(ConfigurableStage):
|
||||||
|
stages += list(configurable_stage.objects.all().order_by("name"))
|
||||||
matching_stages: list[dict] = []
|
matching_stages: list[dict] = []
|
||||||
for stage in _all_stages:
|
for stage in stages:
|
||||||
user_settings = stage.ui_user_settings
|
user_settings = stage.ui_user_settings()
|
||||||
if not user_settings:
|
if not user_settings:
|
||||||
continue
|
continue
|
||||||
user_settings.initial_data["object_uid"] = str(stage.pk)
|
user_settings.initial_data["object_uid"] = str(stage.pk)
|
||||||
|
@ -10,7 +10,7 @@ from django.test import RequestFactory
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik import __version__
|
from authentik import __version__
|
||||||
from authentik.core.models import User
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.flows.models import Flow
|
from authentik.flows.models import Flow
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
|
||||||
|
|
||||||
@ -68,7 +68,7 @@ class Command(BaseCommand): # pragma: no cover
|
|||||||
def benchmark_flows(self, proc_count):
|
def benchmark_flows(self, proc_count):
|
||||||
"""Get full recovery link"""
|
"""Get full recovery link"""
|
||||||
flow = Flow.objects.get(slug="default-authentication-flow")
|
flow = Flow.objects.get(slug="default-authentication-flow")
|
||||||
user = User.objects.get(username="akadmin")
|
user = create_test_admin_user()
|
||||||
manager = Manager()
|
manager = Manager()
|
||||||
return_dict = manager.dict()
|
return_dict = manager.dict()
|
||||||
|
|
||||||
|
46
authentik/flows/migrations/0020_flowtoken.py
Normal file
46
authentik/flows/migrations/0020_flowtoken.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2021-12-05 13:50
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0018_auto_20210330_1345_squashed_0028_alter_token_intent"),
|
||||||
|
(
|
||||||
|
"authentik_flows",
|
||||||
|
"0019_alter_flow_background_squashed_0024_alter_flow_compatibility_mode",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="FlowToken",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"token_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.token",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("_plan", models.TextField()),
|
||||||
|
(
|
||||||
|
"flow",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to="authentik_flows.flow"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Flow Token",
|
||||||
|
"verbose_name_plural": "Flow Tokens",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.token",),
|
||||||
|
),
|
||||||
|
]
|
@ -1,4 +1,6 @@
|
|||||||
"""Flow models"""
|
"""Flow models"""
|
||||||
|
from base64 import b64decode, b64encode
|
||||||
|
from pickle import dumps, loads # nosec
|
||||||
from typing import TYPE_CHECKING, Optional, Type
|
from typing import TYPE_CHECKING, Optional, Type
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
@ -9,11 +11,13 @@ from model_utils.managers import InheritanceManager
|
|||||||
from rest_framework.serializers import BaseSerializer
|
from rest_framework.serializers import BaseSerializer
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.core.models import Token
|
||||||
from authentik.core.types import UserSettingSerializer
|
from authentik.core.types import UserSettingSerializer
|
||||||
from authentik.lib.models import InheritanceForeignKey, SerializerModel
|
from authentik.lib.models import InheritanceForeignKey, SerializerModel
|
||||||
from authentik.policies.models import PolicyBindingModel
|
from authentik.policies.models import PolicyBindingModel
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from authentik.flows.planner import FlowPlan
|
||||||
from authentik.flows.stage import StageView
|
from authentik.flows.stage import StageView
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -71,7 +75,6 @@ class Stage(SerializerModel):
|
|||||||
"""Return component used to edit this object"""
|
"""Return component used to edit this object"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@property
|
|
||||||
def ui_user_settings(self) -> Optional[UserSettingSerializer]:
|
def ui_user_settings(self) -> Optional[UserSettingSerializer]:
|
||||||
"""Entrypoint to integrate with User settings. Can either return None if no
|
"""Entrypoint to integrate with User settings. Can either return None if no
|
||||||
user settings are available, or a challenge."""
|
user settings are available, or a challenge."""
|
||||||
@ -260,3 +263,30 @@ class ConfigurableStage(models.Model):
|
|||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
|
|
||||||
|
class FlowToken(Token):
|
||||||
|
"""Subclass of a standard Token, stores the currently active flow plan upon creation.
|
||||||
|
Can be used to later resume a flow."""
|
||||||
|
|
||||||
|
flow = models.ForeignKey(Flow, on_delete=models.CASCADE)
|
||||||
|
_plan = models.TextField()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pickle(plan) -> str:
|
||||||
|
"""Pickle into string"""
|
||||||
|
data = dumps(plan)
|
||||||
|
return b64encode(data).decode()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def plan(self) -> "FlowPlan":
|
||||||
|
"""Load Flow plan from pickled version"""
|
||||||
|
return loads(b64decode(self._plan.encode())) # nosec
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Flow Token {super().__str__()}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
verbose_name = _("Flow Token")
|
||||||
|
verbose_name_plural = _("Flow Tokens")
|
||||||
|
@ -24,6 +24,9 @@ PLAN_CONTEXT_SSO = "is_sso"
|
|||||||
PLAN_CONTEXT_REDIRECT = "redirect"
|
PLAN_CONTEXT_REDIRECT = "redirect"
|
||||||
PLAN_CONTEXT_APPLICATION = "application"
|
PLAN_CONTEXT_APPLICATION = "application"
|
||||||
PLAN_CONTEXT_SOURCE = "source"
|
PLAN_CONTEXT_SOURCE = "source"
|
||||||
|
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
|
||||||
|
# was restored.
|
||||||
|
PLAN_CONTEXT_IS_RESTORED = "is_restored"
|
||||||
GAUGE_FLOWS_CACHED = UpdatingGauge(
|
GAUGE_FLOWS_CACHED = UpdatingGauge(
|
||||||
"authentik_flows_cached",
|
"authentik_flows_cached",
|
||||||
"Cached flows",
|
"Cached flows",
|
||||||
@ -123,7 +126,9 @@ class FlowPlanner:
|
|||||||
) -> FlowPlan:
|
) -> FlowPlan:
|
||||||
"""Check each of the flows' policies, check policies for each stage with PolicyBinding
|
"""Check each of the flows' policies, check policies for each stage with PolicyBinding
|
||||||
and return ordered list"""
|
and return ordered list"""
|
||||||
with Hub.current.start_span(op="flow.planner.plan") as span:
|
with Hub.current.start_span(
|
||||||
|
op="authentik.flow.planner.plan", description=self.flow.slug
|
||||||
|
) as span:
|
||||||
span: Span
|
span: Span
|
||||||
span.set_data("flow", self.flow)
|
span.set_data("flow", self.flow)
|
||||||
span.set_data("request", request)
|
span.set_data("request", request)
|
||||||
@ -178,7 +183,8 @@ class FlowPlanner:
|
|||||||
"""Build flow plan by checking each stage in their respective
|
"""Build flow plan by checking each stage in their respective
|
||||||
order and checking the applied policies"""
|
order and checking the applied policies"""
|
||||||
with Hub.current.start_span(
|
with Hub.current.start_span(
|
||||||
op="flow.planner.build_plan"
|
op="authentik.flow.planner.build_plan",
|
||||||
|
description=self.flow.slug,
|
||||||
) as span, HIST_FLOWS_PLAN_TIME.labels(flow_slug=self.flow.slug).time():
|
) as span, HIST_FLOWS_PLAN_TIME.labels(flow_slug=self.flow.slug).time():
|
||||||
span: Span
|
span: Span
|
||||||
span.set_data("flow", self.flow)
|
span.set_data("flow", self.flow)
|
||||||
|
@ -6,6 +6,7 @@ from django.http.response import HttpResponse
|
|||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from django.views.generic.base import View
|
from django.views.generic.base import View
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
|
from sentry_sdk.hub import Hub
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.models import DEFAULT_AVATAR, User
|
from authentik.core.models import DEFAULT_AVATAR, User
|
||||||
@ -94,8 +95,16 @@ class ChallengeStageView(StageView):
|
|||||||
keep_context=keep_context,
|
keep_context=keep_context,
|
||||||
)
|
)
|
||||||
return self.executor.restart_flow(keep_context)
|
return self.executor.restart_flow(keep_context)
|
||||||
return self.challenge_invalid(challenge)
|
with Hub.current.start_span(
|
||||||
return self.challenge_valid(challenge)
|
op="authentik.flow.stage.challenge_invalid",
|
||||||
|
description=self.__class__.__name__,
|
||||||
|
):
|
||||||
|
return self.challenge_invalid(challenge)
|
||||||
|
with Hub.current.start_span(
|
||||||
|
op="authentik.flow.stage.challenge_valid",
|
||||||
|
description=self.__class__.__name__,
|
||||||
|
):
|
||||||
|
return self.challenge_valid(challenge)
|
||||||
|
|
||||||
def format_title(self) -> str:
|
def format_title(self) -> str:
|
||||||
"""Allow usage of placeholder in flow title."""
|
"""Allow usage of placeholder in flow title."""
|
||||||
@ -104,7 +113,11 @@ class ChallengeStageView(StageView):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _get_challenge(self, *args, **kwargs) -> Challenge:
|
def _get_challenge(self, *args, **kwargs) -> Challenge:
|
||||||
challenge = self.get_challenge(*args, **kwargs)
|
with Hub.current.start_span(
|
||||||
|
op="authentik.flow.stage.get_challenge",
|
||||||
|
description=self.__class__.__name__,
|
||||||
|
):
|
||||||
|
challenge = self.get_challenge(*args, **kwargs)
|
||||||
if "flow_info" not in challenge.initial_data:
|
if "flow_info" not in challenge.initial_data:
|
||||||
flow_info = ContextualFlowInfo(
|
flow_info = ContextualFlowInfo(
|
||||||
data={
|
data={
|
||||||
@ -149,7 +162,7 @@ class ChallengeStageView(StageView):
|
|||||||
)
|
)
|
||||||
challenge_response.initial_data["response_errors"] = full_errors
|
challenge_response.initial_data["response_errors"] = full_errors
|
||||||
if not challenge_response.is_valid():
|
if not challenge_response.is_valid():
|
||||||
LOGGER.warning(
|
LOGGER.error(
|
||||||
"f(ch): invalid challenge response",
|
"f(ch): invalid challenge response",
|
||||||
binding=self.executor.current_binding,
|
binding=self.executor.current_binding,
|
||||||
errors=challenge_response.errors,
|
errors=challenge_response.errors,
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.flows.api.stages import StageSerializer, StageViewSet
|
from authentik.flows.api.stages import StageSerializer, StageViewSet
|
||||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, Stage
|
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, Stage
|
||||||
from authentik.policies.dummy.models import DummyPolicy
|
from authentik.policies.dummy.models import DummyPolicy
|
||||||
@ -32,7 +32,7 @@ class TestFlowsAPI(APITestCase):
|
|||||||
|
|
||||||
def test_models(self):
|
def test_models(self):
|
||||||
"""Test that ui_user_settings returns none"""
|
"""Test that ui_user_settings returns none"""
|
||||||
self.assertIsNone(Stage().ui_user_settings)
|
self.assertIsNone(Stage().ui_user_settings())
|
||||||
|
|
||||||
def test_api_serializer(self):
|
def test_api_serializer(self):
|
||||||
"""Test that stage serializer returns the correct type"""
|
"""Test that stage serializer returns the correct type"""
|
||||||
@ -47,7 +47,7 @@ class TestFlowsAPI(APITestCase):
|
|||||||
|
|
||||||
def test_api_diagram(self):
|
def test_api_diagram(self):
|
||||||
"""Test flow diagram."""
|
"""Test flow diagram."""
|
||||||
user = User.objects.get(username="akadmin")
|
user = create_test_admin_user()
|
||||||
self.client.force_login(user)
|
self.client.force_login(user)
|
||||||
|
|
||||||
flow = Flow.objects.create(
|
flow = Flow.objects.create(
|
||||||
@ -77,7 +77,7 @@ class TestFlowsAPI(APITestCase):
|
|||||||
|
|
||||||
def test_api_diagram_no_stages(self):
|
def test_api_diagram_no_stages(self):
|
||||||
"""Test flow diagram with no stages."""
|
"""Test flow diagram with no stages."""
|
||||||
user = User.objects.get(username="akadmin")
|
user = create_test_admin_user()
|
||||||
self.client.force_login(user)
|
self.client.force_login(user)
|
||||||
|
|
||||||
flow = Flow.objects.create(
|
flow = Flow.objects.create(
|
||||||
@ -93,7 +93,7 @@ class TestFlowsAPI(APITestCase):
|
|||||||
|
|
||||||
def test_types(self):
|
def test_types(self):
|
||||||
"""Test Stage's types endpoint"""
|
"""Test Stage's types endpoint"""
|
||||||
user = User.objects.get(username="akadmin")
|
user = create_test_admin_user()
|
||||||
self.client.force_login(user)
|
self.client.force_login(user)
|
||||||
|
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
|
@ -545,6 +545,7 @@ class TestFlowExecutor(APITestCase):
|
|||||||
"password_fields": False,
|
"password_fields": False,
|
||||||
"primary_action": "Log in",
|
"primary_action": "Log in",
|
||||||
"sources": [],
|
"sources": [],
|
||||||
|
"show_source_labels": False,
|
||||||
"user_fields": [UserFields.E_MAIL],
|
"user_fields": [UserFields.E_MAIL],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -6,7 +6,7 @@ from django.test.client import RequestFactory
|
|||||||
from django.urls.base import reverse
|
from django.urls.base import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.flows.challenge import ChallengeTypes
|
from authentik.flows.challenge import ChallengeTypes
|
||||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, InvalidResponseAction
|
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, InvalidResponseAction
|
||||||
from authentik.stages.dummy.models import DummyStage
|
from authentik.stages.dummy.models import DummyStage
|
||||||
@ -18,7 +18,7 @@ class TestFlowInspector(APITestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.request_factory = RequestFactory()
|
self.request_factory = RequestFactory()
|
||||||
self.admin = User.objects.get(username="akadmin")
|
self.admin = create_test_admin_user()
|
||||||
self.client.force_login(self.admin)
|
self.client.force_login(self.admin)
|
||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
@ -60,6 +60,7 @@ class TestFlowInspector(APITestCase):
|
|||||||
"password_fields": False,
|
"password_fields": False,
|
||||||
"primary_action": "Log in",
|
"primary_action": "Log in",
|
||||||
"sources": [],
|
"sources": [],
|
||||||
|
"show_source_labels": False,
|
||||||
"user_fields": ["username"],
|
"user_fields": ["username"],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -76,7 +77,7 @@ class TestFlowInspector(APITestCase):
|
|||||||
|
|
||||||
self.client.post(
|
self.client.post(
|
||||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||||
{"uid_field": "akadmin"},
|
{"uid_field": self.admin.username},
|
||||||
follow=True,
|
follow=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -88,5 +89,5 @@ class TestFlowInspector(APITestCase):
|
|||||||
self.assertEqual(content["plans"][0]["current_stage"]["stage_obj"]["name"], "ident")
|
self.assertEqual(content["plans"][0]["current_stage"]["stage_obj"]["name"], "ident")
|
||||||
self.assertEqual(content["current_plan"]["current_stage"]["stage_obj"]["name"], "dummy2")
|
self.assertEqual(content["current_plan"]["current_stage"]["stage_obj"]["name"], "dummy2")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
content["current_plan"]["plan_context"]["pending_user"]["username"], "akadmin"
|
content["current_plan"]["plan_context"]["pending_user"]["username"], self.admin.username
|
||||||
)
|
)
|
||||||
|
@ -17,13 +17,13 @@ def model_tester_factory(test_model: Type[Stage]) -> Callable:
|
|||||||
|
|
||||||
def tester(self: TestModels):
|
def tester(self: TestModels):
|
||||||
model_class = None
|
model_class = None
|
||||||
if test_model._meta.abstract:
|
if test_model._meta.abstract: # pragma: no cover
|
||||||
model_class = test_model.__bases__[0]()
|
model_class = test_model.__bases__[0]()
|
||||||
else:
|
else:
|
||||||
model_class = test_model()
|
model_class = test_model()
|
||||||
self.assertTrue(issubclass(model_class.type, StageView))
|
self.assertTrue(issubclass(model_class.type, StageView))
|
||||||
self.assertIsNotNone(test_model.component)
|
self.assertIsNotNone(test_model.component)
|
||||||
_ = test_model.ui_user_settings
|
_ = model_class.ui_user_settings()
|
||||||
|
|
||||||
return tester
|
return tester
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
|
||||||
|
from authentik.core.tests.utils import create_test_flow
|
||||||
from authentik.flows.models import Flow, FlowDesignation
|
from authentik.flows.models import Flow, FlowDesignation
|
||||||
from authentik.flows.planner import FlowPlan
|
from authentik.flows.planner import FlowPlan
|
||||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||||
@ -12,9 +13,8 @@ class TestHelperView(TestCase):
|
|||||||
|
|
||||||
def test_default_view(self):
|
def test_default_view(self):
|
||||||
"""Test that ToDefaultFlow returns the expected URL"""
|
"""Test that ToDefaultFlow returns the expected URL"""
|
||||||
flow = Flow.objects.filter(
|
Flow.objects.filter(designation=FlowDesignation.INVALIDATION).delete()
|
||||||
designation=FlowDesignation.INVALIDATION,
|
flow = create_test_flow(FlowDesignation.INVALIDATION)
|
||||||
).first()
|
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
reverse("authentik_flows:default-invalidation"),
|
reverse("authentik_flows:default-invalidation"),
|
||||||
)
|
)
|
||||||
@ -24,9 +24,8 @@ class TestHelperView(TestCase):
|
|||||||
|
|
||||||
def test_default_view_invalid_plan(self):
|
def test_default_view_invalid_plan(self):
|
||||||
"""Test that ToDefaultFlow returns the expected URL (with an invalid plan)"""
|
"""Test that ToDefaultFlow returns the expected URL (with an invalid plan)"""
|
||||||
flow = Flow.objects.filter(
|
Flow.objects.filter(designation=FlowDesignation.INVALIDATION).delete()
|
||||||
designation=FlowDesignation.INVALIDATION,
|
flow = create_test_flow(FlowDesignation.INVALIDATION)
|
||||||
).first()
|
|
||||||
plan = FlowPlan(flow_pk=flow.pk.hex + "aa")
|
plan = FlowPlan(flow_pk=flow.pk.hex + "aa")
|
||||||
session = self.client.session
|
session = self.client.session
|
||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
|
@ -19,6 +19,8 @@ from drf_spectacular.utils import OpenApiParameter, PolymorphicProxySerializer,
|
|||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from sentry_sdk import capture_exception
|
from sentry_sdk import capture_exception
|
||||||
|
from sentry_sdk.api import set_tag
|
||||||
|
from sentry_sdk.hub import Hub
|
||||||
from structlog.stdlib import BoundLogger, get_logger
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
from authentik.core.models import USER_ATTRIBUTE_DEBUG
|
from authentik.core.models import USER_ATTRIBUTE_DEBUG
|
||||||
@ -34,8 +36,16 @@ from authentik.flows.challenge import (
|
|||||||
WithUserInfoChallenge,
|
WithUserInfoChallenge,
|
||||||
)
|
)
|
||||||
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
||||||
from authentik.flows.models import ConfigurableStage, Flow, FlowDesignation, FlowStageBinding, Stage
|
from authentik.flows.models import (
|
||||||
|
ConfigurableStage,
|
||||||
|
Flow,
|
||||||
|
FlowDesignation,
|
||||||
|
FlowStageBinding,
|
||||||
|
FlowToken,
|
||||||
|
Stage,
|
||||||
|
)
|
||||||
from authentik.flows.planner import (
|
from authentik.flows.planner import (
|
||||||
|
PLAN_CONTEXT_IS_RESTORED,
|
||||||
PLAN_CONTEXT_PENDING_USER,
|
PLAN_CONTEXT_PENDING_USER,
|
||||||
PLAN_CONTEXT_REDIRECT,
|
PLAN_CONTEXT_REDIRECT,
|
||||||
FlowPlan,
|
FlowPlan,
|
||||||
@ -53,7 +63,9 @@ NEXT_ARG_NAME = "next"
|
|||||||
SESSION_KEY_PLAN = "authentik_flows_plan"
|
SESSION_KEY_PLAN = "authentik_flows_plan"
|
||||||
SESSION_KEY_APPLICATION_PRE = "authentik_flows_application_pre"
|
SESSION_KEY_APPLICATION_PRE = "authentik_flows_application_pre"
|
||||||
SESSION_KEY_GET = "authentik_flows_get"
|
SESSION_KEY_GET = "authentik_flows_get"
|
||||||
|
SESSION_KEY_POST = "authentik_flows_post"
|
||||||
SESSION_KEY_HISTORY = "authentik_flows_history"
|
SESSION_KEY_HISTORY = "authentik_flows_history"
|
||||||
|
QS_KEY_TOKEN = "flow_token" # nosec
|
||||||
|
|
||||||
|
|
||||||
def challenge_types():
|
def challenge_types():
|
||||||
@ -116,6 +128,7 @@ class FlowExecutorView(APIView):
|
|||||||
super().setup(request, flow_slug=flow_slug)
|
super().setup(request, flow_slug=flow_slug)
|
||||||
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
||||||
self._logger = get_logger().bind(flow_slug=flow_slug)
|
self._logger = get_logger().bind(flow_slug=flow_slug)
|
||||||
|
set_tag("authentik.flow", self.flow.slug)
|
||||||
|
|
||||||
def handle_invalid_flow(self, exc: BaseException) -> HttpResponse:
|
def handle_invalid_flow(self, exc: BaseException) -> HttpResponse:
|
||||||
"""When a flow is non-applicable check if user is on the correct domain"""
|
"""When a flow is non-applicable check if user is on the correct domain"""
|
||||||
@ -126,71 +139,100 @@ class FlowExecutorView(APIView):
|
|||||||
message = exc.__doc__ if exc.__doc__ else str(exc)
|
message = exc.__doc__ if exc.__doc__ else str(exc)
|
||||||
return self.stage_invalid(error_message=message)
|
return self.stage_invalid(error_message=message)
|
||||||
|
|
||||||
|
def _check_flow_token(self, get_params: QueryDict):
|
||||||
|
"""Check if the user is using a flow token to restore a plan"""
|
||||||
|
tokens = FlowToken.filter_not_expired(key=get_params[QS_KEY_TOKEN])
|
||||||
|
if not tokens.exists():
|
||||||
|
return False
|
||||||
|
token: FlowToken = tokens.first()
|
||||||
|
try:
|
||||||
|
plan = token.plan
|
||||||
|
except (AttributeError, EOFError, ImportError, IndexError) as exc:
|
||||||
|
LOGGER.warning("f(exec): Failed to restore token plan", exc=exc)
|
||||||
|
finally:
|
||||||
|
token.delete()
|
||||||
|
if not isinstance(plan, FlowPlan):
|
||||||
|
return None
|
||||||
|
plan.context[PLAN_CONTEXT_IS_RESTORED] = True
|
||||||
|
self._logger.debug("f(exec): restored flow plan from token", plan=plan)
|
||||||
|
return plan
|
||||||
|
|
||||||
# pylint: disable=unused-argument, too-many-return-statements
|
# pylint: disable=unused-argument, too-many-return-statements
|
||||||
def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse:
|
def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse:
|
||||||
# Early check if there's an active Plan for the current session
|
with Hub.current.start_span(
|
||||||
if SESSION_KEY_PLAN in self.request.session:
|
op="authentik.flow.executor.dispatch", description=self.flow.slug
|
||||||
self.plan = self.request.session[SESSION_KEY_PLAN]
|
) as span:
|
||||||
if self.plan.flow_pk != self.flow.pk.hex:
|
span.set_data("authentik Flow", self.flow.slug)
|
||||||
self._logger.warning(
|
get_params = QueryDict(request.GET.get("query", ""))
|
||||||
"f(exec): Found existing plan for other flow, deleting plan",
|
if QS_KEY_TOKEN in get_params:
|
||||||
)
|
plan = self._check_flow_token(get_params)
|
||||||
# Existing plan is deleted from session and instance
|
if plan:
|
||||||
self.plan = None
|
self.request.session[SESSION_KEY_PLAN] = plan
|
||||||
self.cancel()
|
# Early check if there's an active Plan for the current session
|
||||||
self._logger.debug("f(exec): Continuing existing plan")
|
if SESSION_KEY_PLAN in self.request.session:
|
||||||
|
self.plan = self.request.session[SESSION_KEY_PLAN]
|
||||||
|
if self.plan.flow_pk != self.flow.pk.hex:
|
||||||
|
self._logger.warning(
|
||||||
|
"f(exec): Found existing plan for other flow, deleting plan",
|
||||||
|
)
|
||||||
|
# Existing plan is deleted from session and instance
|
||||||
|
self.plan = None
|
||||||
|
self.cancel()
|
||||||
|
self._logger.debug("f(exec): Continuing existing plan")
|
||||||
|
|
||||||
# Don't check session again as we've either already loaded the plan or we need to plan
|
# Don't check session again as we've either already loaded the plan or we need to plan
|
||||||
if not self.plan:
|
if not self.plan:
|
||||||
request.session[SESSION_KEY_HISTORY] = []
|
request.session[SESSION_KEY_HISTORY] = []
|
||||||
self._logger.debug("f(exec): No active Plan found, initiating planner")
|
self._logger.debug("f(exec): No active Plan found, initiating planner")
|
||||||
|
try:
|
||||||
|
self.plan = self._initiate_plan()
|
||||||
|
except FlowNonApplicableException as exc:
|
||||||
|
self._logger.warning("f(exec): Flow not applicable to current user", exc=exc)
|
||||||
|
return to_stage_response(self.request, self.handle_invalid_flow(exc))
|
||||||
|
except EmptyFlowException as exc:
|
||||||
|
self._logger.warning("f(exec): Flow is empty", exc=exc)
|
||||||
|
# To match behaviour with loading an empty flow plan from cache,
|
||||||
|
# we don't show an error message here, but rather call _flow_done()
|
||||||
|
return self._flow_done()
|
||||||
|
# Initial flow request, check if we have an upstream query string passed in
|
||||||
|
request.session[SESSION_KEY_GET] = get_params
|
||||||
|
# We don't save the Plan after getting the next stage
|
||||||
|
# as it hasn't been successfully passed yet
|
||||||
try:
|
try:
|
||||||
self.plan = self._initiate_plan()
|
# This is the first time we actually access any attribute on the selected plan
|
||||||
except FlowNonApplicableException as exc:
|
# if the cached plan is from an older version, it might have different attributes
|
||||||
self._logger.warning("f(exec): Flow not applicable to current user", exc=exc)
|
# in which case we just delete the plan and invalidate everything
|
||||||
return to_stage_response(self.request, self.handle_invalid_flow(exc))
|
next_binding = self.plan.next(self.request)
|
||||||
except EmptyFlowException as exc:
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
self._logger.warning("f(exec): Flow is empty", exc=exc)
|
self._logger.warning(
|
||||||
# To match behaviour with loading an empty flow plan from cache,
|
"f(exec): found incompatible flow plan, invalidating run", exc=exc
|
||||||
# we don't show an error message here, but rather call _flow_done()
|
)
|
||||||
|
keys = cache.keys("flow_*")
|
||||||
|
cache.delete_many(keys)
|
||||||
|
return self.stage_invalid()
|
||||||
|
if not next_binding:
|
||||||
|
self._logger.debug("f(exec): no more stages, flow is done.")
|
||||||
return self._flow_done()
|
return self._flow_done()
|
||||||
# Initial flow request, check if we have an upstream query string passed in
|
self.current_binding = next_binding
|
||||||
request.session[SESSION_KEY_GET] = QueryDict(request.GET.get("query", ""))
|
self.current_stage = next_binding.stage
|
||||||
# We don't save the Plan after getting the next stage
|
self._logger.debug(
|
||||||
# as it hasn't been successfully passed yet
|
"f(exec): Current stage",
|
||||||
try:
|
current_stage=self.current_stage,
|
||||||
# This is the first time we actually access any attribute on the selected plan
|
flow_slug=self.flow.slug,
|
||||||
# if the cached plan is from an older version, it might have different attributes
|
)
|
||||||
# in which case we just delete the plan and invalidate everything
|
try:
|
||||||
next_binding = self.plan.next(self.request)
|
stage_cls = self.current_stage.type
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except NotImplementedError as exc:
|
||||||
self._logger.warning("f(exec): found incompatible flow plan, invalidating run", exc=exc)
|
self._logger.debug("Error getting stage type", exc=exc)
|
||||||
keys = cache.keys("flow_*")
|
return self.stage_invalid()
|
||||||
cache.delete_many(keys)
|
self.current_stage_view = stage_cls(self)
|
||||||
return self.stage_invalid()
|
self.current_stage_view.args = self.args
|
||||||
if not next_binding:
|
self.current_stage_view.kwargs = self.kwargs
|
||||||
self._logger.debug("f(exec): no more stages, flow is done.")
|
self.current_stage_view.request = request
|
||||||
return self._flow_done()
|
try:
|
||||||
self.current_binding = next_binding
|
return super().dispatch(request)
|
||||||
self.current_stage = next_binding.stage
|
except InvalidStageError as exc:
|
||||||
self._logger.debug(
|
return self.stage_invalid(str(exc))
|
||||||
"f(exec): Current stage",
|
|
||||||
current_stage=self.current_stage,
|
|
||||||
flow_slug=self.flow.slug,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
stage_cls = self.current_stage.type
|
|
||||||
except NotImplementedError as exc:
|
|
||||||
self._logger.debug("Error getting stage type", exc=exc)
|
|
||||||
return self.stage_invalid()
|
|
||||||
self.current_stage_view = stage_cls(self)
|
|
||||||
self.current_stage_view.args = self.args
|
|
||||||
self.current_stage_view.kwargs = self.kwargs
|
|
||||||
self.current_stage_view.request = request
|
|
||||||
try:
|
|
||||||
return super().dispatch(request)
|
|
||||||
except InvalidStageError as exc:
|
|
||||||
return self.stage_invalid(str(exc))
|
|
||||||
|
|
||||||
def handle_exception(self, exc: Exception) -> HttpResponse:
|
def handle_exception(self, exc: Exception) -> HttpResponse:
|
||||||
"""Handle exception in stage execution"""
|
"""Handle exception in stage execution"""
|
||||||
@ -232,8 +274,15 @@ class FlowExecutorView(APIView):
|
|||||||
stage=self.current_stage,
|
stage=self.current_stage,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
stage_response = self.current_stage_view.get(request, *args, **kwargs)
|
with Hub.current.start_span(
|
||||||
return to_stage_response(request, stage_response)
|
op="authentik.flow.executor.stage",
|
||||||
|
description=class_to_path(self.current_stage_view.__class__),
|
||||||
|
) as span:
|
||||||
|
span.set_data("Method", "GET")
|
||||||
|
span.set_data("authentik Stage", self.current_stage_view)
|
||||||
|
span.set_data("authentik Flow", self.flow.slug)
|
||||||
|
stage_response = self.current_stage_view.get(request, *args, **kwargs)
|
||||||
|
return to_stage_response(request, stage_response)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
return self.handle_exception(exc)
|
return self.handle_exception(exc)
|
||||||
|
|
||||||
@ -269,8 +318,15 @@ class FlowExecutorView(APIView):
|
|||||||
stage=self.current_stage,
|
stage=self.current_stage,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
stage_response = self.current_stage_view.post(request, *args, **kwargs)
|
with Hub.current.start_span(
|
||||||
return to_stage_response(request, stage_response)
|
op="authentik.flow.executor.stage",
|
||||||
|
description=class_to_path(self.current_stage_view.__class__),
|
||||||
|
) as span:
|
||||||
|
span.set_data("Method", "POST")
|
||||||
|
span.set_data("authentik Stage", self.current_stage_view)
|
||||||
|
span.set_data("authentik Flow", self.flow.slug)
|
||||||
|
stage_response = self.current_stage_view.post(request, *args, **kwargs)
|
||||||
|
return to_stage_response(request, stage_response)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
return self.handle_exception(exc)
|
return self.handle_exception(exc)
|
||||||
|
|
||||||
@ -315,6 +371,12 @@ class FlowExecutorView(APIView):
|
|||||||
NEXT_ARG_NAME, "authentik_core:root-redirect"
|
NEXT_ARG_NAME, "authentik_core:root-redirect"
|
||||||
)
|
)
|
||||||
self.cancel()
|
self.cancel()
|
||||||
|
Event.new(
|
||||||
|
action=EventAction.FLOW_EXECUTION,
|
||||||
|
flow=self.flow,
|
||||||
|
designation=self.flow.designation,
|
||||||
|
successful=True,
|
||||||
|
).from_http(self.request)
|
||||||
return to_stage_response(self.request, redirect_with_qs(next_param))
|
return to_stage_response(self.request, redirect_with_qs(next_param))
|
||||||
|
|
||||||
def stage_ok(self) -> HttpResponse:
|
def stage_ok(self) -> HttpResponse:
|
||||||
|
@ -87,9 +87,7 @@ class FlowInspectorView(APIView):
|
|||||||
@extend_schema(
|
@extend_schema(
|
||||||
responses={
|
responses={
|
||||||
200: FlowInspectionSerializer(),
|
200: FlowInspectionSerializer(),
|
||||||
400: OpenApiResponse(
|
400: OpenApiResponse(description="No flow plan in session."),
|
||||||
description="No flow plan in session."
|
|
||||||
), # This error can be raised by the email stage
|
|
||||||
},
|
},
|
||||||
request=OpenApiTypes.NONE,
|
request=OpenApiTypes.NONE,
|
||||||
operation_id="flows_inspector_get",
|
operation_id="flows_inspector_get",
|
||||||
@ -106,7 +104,10 @@ class FlowInspectorView(APIView):
|
|||||||
if SESSION_KEY_PLAN in request.session:
|
if SESSION_KEY_PLAN in request.session:
|
||||||
current_plan: FlowPlan = request.session[SESSION_KEY_PLAN]
|
current_plan: FlowPlan = request.session[SESSION_KEY_PLAN]
|
||||||
else:
|
else:
|
||||||
current_plan = request.session[SESSION_KEY_HISTORY][-1]
|
try:
|
||||||
|
current_plan = request.session[SESSION_KEY_HISTORY][-1]
|
||||||
|
except IndexError:
|
||||||
|
return Response(status=400)
|
||||||
is_completed = True
|
is_completed = True
|
||||||
current_serializer = FlowInspectorPlanSerializer(
|
current_serializer = FlowInspectorPlanSerializer(
|
||||||
instance=current_plan, context={"request": request}
|
instance=current_plan, context={"request": request}
|
||||||
|
@ -3,7 +3,9 @@ import os
|
|||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from glob import glob
|
from glob import glob
|
||||||
from json import dumps
|
from json import dumps, loads
|
||||||
|
from json.decoder import JSONDecodeError
|
||||||
|
from sys import argv, stderr
|
||||||
from time import time
|
from time import time
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
@ -59,7 +61,7 @@ class ConfigLoader:
|
|||||||
"timestamp": time(),
|
"timestamp": time(),
|
||||||
}
|
}
|
||||||
output.update(kwargs)
|
output.update(kwargs)
|
||||||
print(dumps(output))
|
print(dumps(output), file=stderr)
|
||||||
|
|
||||||
def update(self, root: dict[str, Any], updatee: dict[str, Any]) -> dict[str, Any]:
|
def update(self, root: dict[str, Any], updatee: dict[str, Any]) -> dict[str, Any]:
|
||||||
"""Recursively update dictionary"""
|
"""Recursively update dictionary"""
|
||||||
@ -81,8 +83,8 @@ class ConfigLoader:
|
|||||||
try:
|
try:
|
||||||
with open(url.path, "r", encoding="utf8") as _file:
|
with open(url.path, "r", encoding="utf8") as _file:
|
||||||
value = _file.read()
|
value = _file.read()
|
||||||
except OSError:
|
except OSError as exc:
|
||||||
self._log("error", f"Failed to read config value from {url.path}")
|
self._log("error", f"Failed to read config value from {url.path}: {exc}")
|
||||||
value = url.query
|
value = url.query
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -123,6 +125,11 @@ class ConfigLoader:
|
|||||||
if dot_part not in current_obj:
|
if dot_part not in current_obj:
|
||||||
current_obj[dot_part] = {}
|
current_obj[dot_part] = {}
|
||||||
current_obj = current_obj[dot_part]
|
current_obj = current_obj[dot_part]
|
||||||
|
# Check if the value is json, and try to load it
|
||||||
|
try:
|
||||||
|
value = loads(value)
|
||||||
|
except JSONDecodeError:
|
||||||
|
pass
|
||||||
current_obj[dot_parts[-1]] = value
|
current_obj[dot_parts[-1]] = value
|
||||||
idx += 1
|
idx += 1
|
||||||
if idx > 0:
|
if idx > 0:
|
||||||
@ -174,3 +181,9 @@ class ConfigLoader:
|
|||||||
|
|
||||||
|
|
||||||
CONFIG = ConfigLoader()
|
CONFIG = ConfigLoader()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if len(argv) < 2:
|
||||||
|
print(dumps(CONFIG.raw, indent=4))
|
||||||
|
else:
|
||||||
|
print(CONFIG.y(argv[1]))
|
||||||
|
@ -5,12 +5,21 @@ postgresql:
|
|||||||
user: authentik
|
user: authentik
|
||||||
port: 5432
|
port: 5432
|
||||||
password: 'env://POSTGRES_PASSWORD'
|
password: 'env://POSTGRES_PASSWORD'
|
||||||
|
backup:
|
||||||
|
enabled: true
|
||||||
|
s3_backup:
|
||||||
|
access_key: ""
|
||||||
|
secret_key: ""
|
||||||
|
bucket: ""
|
||||||
|
region: eu-central-1
|
||||||
|
host: ""
|
||||||
|
location: ""
|
||||||
|
insecure_skip_verify: false
|
||||||
|
|
||||||
web:
|
web:
|
||||||
listen: 0.0.0.0:9000
|
listen: 0.0.0.0:9000
|
||||||
listen_tls: 0.0.0.0:9443
|
listen_tls: 0.0.0.0:9443
|
||||||
listen_metrics: 0.0.0.0:9300
|
listen_metrics: 0.0.0.0:9300
|
||||||
load_local_files: false
|
|
||||||
outpost_port_offset: 0
|
outpost_port_offset: 0
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
@ -37,6 +46,7 @@ error_reporting:
|
|||||||
enabled: false
|
enabled: false
|
||||||
environment: customer
|
environment: customer
|
||||||
send_pii: false
|
send_pii: false
|
||||||
|
sample_rate: 0.5
|
||||||
|
|
||||||
# Global email settings
|
# Global email settings
|
||||||
email:
|
email:
|
||||||
@ -54,16 +64,22 @@ outposts:
|
|||||||
# %(type)s: Outpost type; proxy, ldap, etc
|
# %(type)s: Outpost type; proxy, ldap, etc
|
||||||
# %(version)s: Current version; 2021.4.1
|
# %(version)s: Current version; 2021.4.1
|
||||||
# %(build_hash)s: Build hash if you're running a beta version
|
# %(build_hash)s: Build hash if you're running a beta version
|
||||||
container_image_base: env://AUTHENTIK_OUTPOSTS__DOCKER_IMAGE_BASE?goauthentik.io/%(type)s:%(version)s
|
container_image_base: goauthentik.io/%(type)s:%(version)s
|
||||||
|
|
||||||
cookie_domain: null
|
cookie_domain: null
|
||||||
disable_update_check: false
|
disable_update_check: false
|
||||||
|
disable_startup_analytics: false
|
||||||
avatars: env://AUTHENTIK_AUTHENTIK__AVATARS?gravatar
|
avatars: env://AUTHENTIK_AUTHENTIK__AVATARS?gravatar
|
||||||
geoip: "./GeoLite2-City.mmdb"
|
geoip: "./GeoLite2-City.mmdb"
|
||||||
|
|
||||||
# Can't currently be configured via environment variables, only yaml
|
|
||||||
footer_links:
|
footer_links:
|
||||||
- name: Documentation
|
- name: Documentation
|
||||||
href: https://goauthentik.io/docs/?utm_source=authentik
|
href: https://goauthentik.io/docs/?utm_source=authentik
|
||||||
- name: authentik Website
|
- name: authentik Website
|
||||||
href: https://goauthentik.io/?utm_source=authentik
|
href: https://goauthentik.io/?utm_source=authentik
|
||||||
|
|
||||||
|
default_user_change_email: true
|
||||||
|
default_user_change_username: true
|
||||||
|
|
||||||
|
gdpr_compliance: true
|
||||||
|
cert_discovery_dir: /certs
|
||||||
|
@ -80,8 +80,9 @@ class BaseEvaluator:
|
|||||||
"""Parse and evaluate expression. If the syntax is incorrect, a SyntaxError is raised.
|
"""Parse and evaluate expression. If the syntax is incorrect, a SyntaxError is raised.
|
||||||
If any exception is raised during execution, it is raised.
|
If any exception is raised during execution, it is raised.
|
||||||
The result is returned without any type-checking."""
|
The result is returned without any type-checking."""
|
||||||
with Hub.current.start_span(op="lib.evaluator.evaluate") as span:
|
with Hub.current.start_span(op="authentik.lib.evaluator.evaluate") as span:
|
||||||
span: Span
|
span: Span
|
||||||
|
span.description = self._filename
|
||||||
span.set_data("expression", expression_source)
|
span.set_data("expression", expression_source)
|
||||||
param_keys = self._context.keys()
|
param_keys = self._context.keys()
|
||||||
try:
|
try:
|
||||||
|
@ -66,3 +66,11 @@ class DomainlessURLValidator(URLValidator):
|
|||||||
r"\Z",
|
r"\Z",
|
||||||
re.IGNORECASE,
|
re.IGNORECASE,
|
||||||
)
|
)
|
||||||
|
self.schemes = ["http", "https", "blank"] + list(self.schemes)
|
||||||
|
|
||||||
|
def __call__(self, value: str):
|
||||||
|
# Check if the scheme is valid.
|
||||||
|
scheme = value.split("://")[0].lower()
|
||||||
|
if scheme not in self.schemes:
|
||||||
|
value = "default" + value
|
||||||
|
super().__call__(value)
|
||||||
|
@ -8,11 +8,13 @@ from botocore.exceptions import BotoCoreError
|
|||||||
from celery.exceptions import CeleryError
|
from celery.exceptions import CeleryError
|
||||||
from channels.middleware import BaseMiddleware
|
from channels.middleware import BaseMiddleware
|
||||||
from channels_redis.core import ChannelFull
|
from channels_redis.core import ChannelFull
|
||||||
|
from django.conf import settings
|
||||||
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation, ValidationError
|
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation, ValidationError
|
||||||
from django.db import InternalError, OperationalError, ProgrammingError
|
from django.db import InternalError, OperationalError, ProgrammingError
|
||||||
from django.http.response import Http404
|
from django.http.response import Http404
|
||||||
from django_redis.exceptions import ConnectionInterrupted
|
from django_redis.exceptions import ConnectionInterrupted
|
||||||
from docker.errors import DockerException
|
from docker.errors import DockerException
|
||||||
|
from h11 import LocalProtocolError
|
||||||
from ldap3.core.exceptions import LDAPException
|
from ldap3.core.exceptions import LDAPException
|
||||||
from redis.exceptions import ConnectionError as RedisConnectionError
|
from redis.exceptions import ConnectionError as RedisConnectionError
|
||||||
from redis.exceptions import RedisError, ResponseError
|
from redis.exceptions import RedisError, ResponseError
|
||||||
@ -72,6 +74,7 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
|||||||
# websocket errors
|
# websocket errors
|
||||||
ChannelFull,
|
ChannelFull,
|
||||||
WebSocketException,
|
WebSocketException,
|
||||||
|
LocalProtocolError,
|
||||||
# rest_framework error
|
# rest_framework error
|
||||||
APIException,
|
APIException,
|
||||||
# celery errors
|
# celery errors
|
||||||
@ -90,6 +93,7 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
|||||||
# End-user errors
|
# End-user errors
|
||||||
Http404,
|
Http404,
|
||||||
)
|
)
|
||||||
|
exc_value = None
|
||||||
if "exc_info" in hint:
|
if "exc_info" in hint:
|
||||||
_, exc_value, _ = hint["exc_info"]
|
_, exc_value, _ = hint["exc_info"]
|
||||||
if isinstance(exc_value, ignored_classes):
|
if isinstance(exc_value, ignored_classes):
|
||||||
@ -103,6 +107,10 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
|||||||
"asyncio",
|
"asyncio",
|
||||||
"multiprocessing",
|
"multiprocessing",
|
||||||
"django_redis",
|
"django_redis",
|
||||||
|
"django.security.DisallowedHost",
|
||||||
]:
|
]:
|
||||||
return None
|
return None
|
||||||
|
LOGGER.debug("sending event to sentry", exc=exc_value, source_logger=event.get("logger", None))
|
||||||
|
if settings.DEBUG:
|
||||||
|
return None
|
||||||
return event
|
return event
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""Test Evaluator base functions"""
|
"""Test Evaluator base functions"""
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.lib.expression.evaluator import BaseEvaluator
|
from authentik.lib.expression.evaluator import BaseEvaluator
|
||||||
|
|
||||||
|
|
||||||
@ -19,12 +19,11 @@ class TestEvaluator(TestCase):
|
|||||||
|
|
||||||
def test_user_by(self):
|
def test_user_by(self):
|
||||||
"""Test expr_user_by"""
|
"""Test expr_user_by"""
|
||||||
self.assertIsNotNone(BaseEvaluator.expr_user_by(username="akadmin"))
|
user = create_test_admin_user()
|
||||||
|
self.assertIsNotNone(BaseEvaluator.expr_user_by(username=user.username))
|
||||||
self.assertIsNone(BaseEvaluator.expr_user_by(username="bar"))
|
self.assertIsNone(BaseEvaluator.expr_user_by(username="bar"))
|
||||||
self.assertIsNone(BaseEvaluator.expr_user_by(foo="bar"))
|
self.assertIsNone(BaseEvaluator.expr_user_by(foo="bar"))
|
||||||
|
|
||||||
def test_is_group_member(self):
|
def test_is_group_member(self):
|
||||||
"""Test expr_is_group_member"""
|
"""Test expr_is_group_member"""
|
||||||
self.assertFalse(
|
self.assertFalse(BaseEvaluator.expr_is_group_member(create_test_admin_user(), name="test"))
|
||||||
BaseEvaluator.expr_is_group_member(User.objects.get(username="akadmin"), name="test")
|
|
||||||
)
|
|
||||||
|
@ -1,17 +1,24 @@
|
|||||||
"""Test HTTP Helpers"""
|
"""Test HTTP Helpers"""
|
||||||
from django.test import RequestFactory, TestCase
|
from django.test import RequestFactory, TestCase
|
||||||
|
|
||||||
from authentik.core.models import USER_ATTRIBUTE_CAN_OVERRIDE_IP, Token, TokenIntents, User
|
from authentik.core.models import USER_ATTRIBUTE_CAN_OVERRIDE_IP, Token, TokenIntents
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.lib.utils.http import OUTPOST_REMOTE_IP_HEADER, OUTPOST_TOKEN_HEADER, get_client_ip
|
from authentik.lib.utils.http import OUTPOST_REMOTE_IP_HEADER, OUTPOST_TOKEN_HEADER, get_client_ip
|
||||||
|
from authentik.lib.views import bad_request_message
|
||||||
|
|
||||||
|
|
||||||
class TestHTTP(TestCase):
|
class TestHTTP(TestCase):
|
||||||
"""Test HTTP Helpers"""
|
"""Test HTTP Helpers"""
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.user = User.objects.get(username="akadmin")
|
self.user = create_test_admin_user()
|
||||||
self.factory = RequestFactory()
|
self.factory = RequestFactory()
|
||||||
|
|
||||||
|
def test_bad_request_message(self):
|
||||||
|
"""test bad_request_message"""
|
||||||
|
request = self.factory.get("/")
|
||||||
|
self.assertEqual(bad_request_message(request, "foo").status_code, 400)
|
||||||
|
|
||||||
def test_normal(self):
|
def test_normal(self):
|
||||||
"""Test normal request"""
|
"""Test normal request"""
|
||||||
request = self.factory.get("/")
|
request = self.factory.get("/")
|
||||||
|
@ -4,6 +4,7 @@ from typing import Any, Optional
|
|||||||
|
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from requests.sessions import Session
|
from requests.sessions import Session
|
||||||
|
from sentry_sdk.hub import Hub
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||||
@ -52,6 +53,12 @@ def _get_outpost_override_ip(request: HttpRequest) -> Optional[str]:
|
|||||||
fake_ip=fake_ip,
|
fake_ip=fake_ip,
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
# Update sentry scope to include correct IP
|
||||||
|
user = Hub.current.scope._user
|
||||||
|
if not user:
|
||||||
|
user = {}
|
||||||
|
user["ip_address"] = fake_ip
|
||||||
|
Hub.current.scope.set_user(user)
|
||||||
return fake_ip
|
return fake_ip
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,8 +1,13 @@
|
|||||||
"""authentik lib reflection utilities"""
|
"""authentik lib reflection utilities"""
|
||||||
|
import os
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
from pathlib import Path
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||||
|
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
|
|
||||||
def all_subclasses(cls, sort=True):
|
def all_subclasses(cls, sort=True):
|
||||||
@ -42,3 +47,16 @@ def get_apps():
|
|||||||
for _app in apps.get_app_configs():
|
for _app in apps.get_app_configs():
|
||||||
if _app.name.startswith("authentik"):
|
if _app.name.startswith("authentik"):
|
||||||
yield _app
|
yield _app
|
||||||
|
|
||||||
|
|
||||||
|
def get_env() -> str:
|
||||||
|
"""Get environment in which authentik is currently running"""
|
||||||
|
if SERVICE_HOST_ENV_NAME in os.environ:
|
||||||
|
return "kubernetes"
|
||||||
|
if "CI" in os.environ:
|
||||||
|
return "ci"
|
||||||
|
if Path("/tmp/authentik-mode").exists(): # nosec
|
||||||
|
return "compose"
|
||||||
|
if CONFIG.y_bool("debug"):
|
||||||
|
return "dev"
|
||||||
|
return "custom"
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user